Repository: dotnet/sign
Branch: main
Commit: 2a6e1f414321
Files: 629
Total size: 2.1 MB
Directory structure:
gitextract_xxtpi398/
├── .azuredevops/
│ └── dependabot.yml
├── .config/
│ └── 1espt/
│ ├── PipelineAutobaseliningConfig.yml
│ └── README.md
├── .editorconfig
├── .gitattributes
├── .github/
│ ├── CODEOWNERS
│ ├── ISSUE_TEMPLATE/
│ │ ├── bug_report.md
│ │ └── feature_request.md
│ └── workflows/
│ └── stale.yml
├── .gitignore
├── .vsts-ci.yml
├── .vsts-pr.yml
├── CODE-OF-CONDUCT.md
├── Directory.Build.props
├── Directory.Build.targets
├── Directory.Packages.props
├── LICENSE.txt
├── NuGet.Config
├── README.md
├── SECURITY.md
├── SdkTools.props
├── THIRD-PARTY-NOTICES.txt
├── docs/
│ ├── artifact-signing-integration.md
│ ├── azdo-build-and-sign.yml
│ ├── comparisons.md
│ ├── file-globbing.md
│ ├── gh-build-and-sign.yml
│ └── signing-tool-spec.md
├── eng/
│ ├── PoliCheckExclusions.xml
│ ├── Signing.props
│ ├── Version.Details.xml
│ ├── Versions.props
│ └── common/
│ ├── BuildConfiguration/
│ │ └── build-configuration.json
│ ├── CIBuild.cmd
│ ├── PSScriptAnalyzerSettings.psd1
│ ├── README.md
│ ├── SetupNugetSources.ps1
│ ├── SetupNugetSources.sh
│ ├── build.cmd
│ ├── build.ps1
│ ├── build.sh
│ ├── cibuild.sh
│ ├── core-templates/
│ │ ├── job/
│ │ │ ├── job.yml
│ │ │ ├── onelocbuild.yml
│ │ │ ├── publish-build-assets.yml
│ │ │ ├── source-build.yml
│ │ │ └── source-index-stage1.yml
│ │ ├── jobs/
│ │ │ ├── codeql-build.yml
│ │ │ ├── jobs.yml
│ │ │ └── source-build.yml
│ │ ├── post-build/
│ │ │ ├── common-variables.yml
│ │ │ ├── post-build.yml
│ │ │ └── setup-maestro-vars.yml
│ │ ├── steps/
│ │ │ ├── cleanup-microbuild.yml
│ │ │ ├── enable-internal-runtimes.yml
│ │ │ ├── enable-internal-sources.yml
│ │ │ ├── generate-sbom.yml
│ │ │ ├── get-delegation-sas.yml
│ │ │ ├── get-federated-access-token.yml
│ │ │ ├── install-microbuild.yml
│ │ │ ├── publish-build-artifacts.yml
│ │ │ ├── publish-logs.yml
│ │ │ ├── publish-pipeline-artifacts.yml
│ │ │ ├── retain-build.yml
│ │ │ ├── send-to-helix.yml
│ │ │ ├── source-build.yml
│ │ │ └── source-index-stage1-publish.yml
│ │ └── variables/
│ │ └── pool-providers.yml
│ ├── cross/
│ │ ├── arm/
│ │ │ └── tizen/
│ │ │ └── tizen.patch
│ │ ├── arm64/
│ │ │ └── tizen/
│ │ │ └── tizen.patch
│ │ ├── armel/
│ │ │ └── tizen/
│ │ │ └── tizen.patch
│ │ ├── build-android-rootfs.sh
│ │ ├── build-rootfs.sh
│ │ ├── install-debs.py
│ │ ├── riscv64/
│ │ │ └── tizen/
│ │ │ └── tizen.patch
│ │ ├── tizen-build-rootfs.sh
│ │ ├── tizen-fetch.sh
│ │ └── toolchain.cmake
│ ├── darc-init.ps1
│ ├── darc-init.sh
│ ├── dotnet-install.cmd
│ ├── dotnet-install.ps1
│ ├── dotnet-install.sh
│ ├── dotnet.cmd
│ ├── dotnet.ps1
│ ├── dotnet.sh
│ ├── enable-cross-org-publishing.ps1
│ ├── generate-locproject.ps1
│ ├── helixpublish.proj
│ ├── init-tools-native.cmd
│ ├── init-tools-native.ps1
│ ├── init-tools-native.sh
│ ├── internal/
│ │ ├── Directory.Build.props
│ │ ├── NuGet.config
│ │ └── Tools.csproj
│ ├── internal-feed-operations.ps1
│ ├── internal-feed-operations.sh
│ ├── loc/
│ │ └── P22DotNetHtmlLocalization.lss
│ ├── msbuild.ps1
│ ├── msbuild.sh
│ ├── native/
│ │ ├── CommonLibrary.psm1
│ │ ├── common-library.sh
│ │ ├── init-compiler.sh
│ │ ├── init-distro-rid.sh
│ │ ├── init-os-and-arch.sh
│ │ ├── install-cmake-test.sh
│ │ ├── install-cmake.sh
│ │ ├── install-dependencies.sh
│ │ └── install-tool.ps1
│ ├── pipeline-logging-functions.ps1
│ ├── pipeline-logging-functions.sh
│ ├── post-build/
│ │ ├── check-channel-consistency.ps1
│ │ ├── nuget-validation.ps1
│ │ ├── nuget-verification.ps1
│ │ ├── publish-using-darc.ps1
│ │ ├── redact-logs.ps1
│ │ ├── sourcelink-validation.ps1
│ │ └── symbols-validation.ps1
│ ├── retain-build.ps1
│ ├── sdk-task.ps1
│ ├── sdk-task.sh
│ ├── sdl/
│ │ ├── NuGet.config
│ │ ├── configure-sdl-tool.ps1
│ │ ├── execute-all-sdl-tools.ps1
│ │ ├── extract-artifact-archives.ps1
│ │ ├── extract-artifact-packages.ps1
│ │ ├── init-sdl.ps1
│ │ ├── packages.config
│ │ ├── run-sdl.ps1
│ │ ├── sdl.ps1
│ │ └── trim-assets-version.ps1
│ ├── template-guidance.md
│ ├── templates/
│ │ ├── job/
│ │ │ ├── job.yml
│ │ │ ├── onelocbuild.yml
│ │ │ ├── publish-build-assets.yml
│ │ │ ├── source-build.yml
│ │ │ └── source-index-stage1.yml
│ │ ├── jobs/
│ │ │ ├── codeql-build.yml
│ │ │ ├── jobs.yml
│ │ │ └── source-build.yml
│ │ ├── post-build/
│ │ │ ├── common-variables.yml
│ │ │ ├── post-build.yml
│ │ │ └── setup-maestro-vars.yml
│ │ ├── steps/
│ │ │ ├── enable-internal-runtimes.yml
│ │ │ ├── enable-internal-sources.yml
│ │ │ ├── generate-sbom.yml
│ │ │ ├── get-delegation-sas.yml
│ │ │ ├── get-federated-access-token.yml
│ │ │ ├── publish-build-artifacts.yml
│ │ │ ├── publish-logs.yml
│ │ │ ├── publish-pipeline-artifacts.yml
│ │ │ ├── retain-build.yml
│ │ │ ├── send-to-helix.yml
│ │ │ ├── source-build.yml
│ │ │ ├── source-index-stage1-publish.yml
│ │ │ └── vmr-sync.yml
│ │ ├── variables/
│ │ │ └── pool-providers.yml
│ │ └── vmr-build-pr.yml
│ ├── templates-official/
│ │ ├── job/
│ │ │ ├── job.yml
│ │ │ ├── onelocbuild.yml
│ │ │ ├── publish-build-assets.yml
│ │ │ ├── source-build.yml
│ │ │ └── source-index-stage1.yml
│ │ ├── jobs/
│ │ │ ├── codeql-build.yml
│ │ │ ├── jobs.yml
│ │ │ └── source-build.yml
│ │ ├── post-build/
│ │ │ ├── common-variables.yml
│ │ │ ├── post-build.yml
│ │ │ └── setup-maestro-vars.yml
│ │ ├── steps/
│ │ │ ├── enable-internal-runtimes.yml
│ │ │ ├── enable-internal-sources.yml
│ │ │ ├── generate-sbom.yml
│ │ │ ├── get-delegation-sas.yml
│ │ │ ├── get-federated-access-token.yml
│ │ │ ├── publish-build-artifacts.yml
│ │ │ ├── publish-logs.yml
│ │ │ ├── publish-pipeline-artifacts.yml
│ │ │ ├── retain-build.yml
│ │ │ ├── send-to-helix.yml
│ │ │ ├── source-build.yml
│ │ │ └── source-index-stage1-publish.yml
│ │ └── variables/
│ │ ├── pool-providers.yml
│ │ └── sdl-variables.yml
│ ├── tools.ps1
│ ├── tools.sh
│ ├── vmr-sync.ps1
│ └── vmr-sync.sh
├── es-metadata.yml
├── global.json
├── scripts/
│ ├── UpdateWintrust.ps1
│ └── VerifyNuGetPackage.ps1
├── sign.sln
├── src/
│ ├── Sign.Cli/
│ │ ├── ArtifactSigningCommand.cs
│ │ ├── ArtifactSigningResources.Designer.cs
│ │ ├── ArtifactSigningResources.resx
│ │ ├── AzureCredentialOptions.cs
│ │ ├── AzureCredentialType.cs
│ │ ├── AzureKeyVaultCommand.cs
│ │ ├── AzureKeyVaultResources.Designer.cs
│ │ ├── AzureKeyVaultResources.resx
│ │ ├── CertificateStoreCommand.cs
│ │ ├── CertificateStoreResources.Designer.cs
│ │ ├── CertificateStoreResources.resx
│ │ ├── CodeCommand.cs
│ │ ├── Helpers/
│ │ │ └── HashAlgorithmParser.cs
│ │ ├── Kernel32.cs
│ │ ├── PACKAGE.md
│ │ ├── Program.cs
│ │ ├── Properties/
│ │ │ └── launchSettings.json
│ │ ├── Resources.Designer.cs
│ │ ├── Resources.resx
│ │ ├── Sign.Cli.csproj
│ │ ├── SignCommand.cs
│ │ ├── StandardStreamWriterExtensions.cs
│ │ ├── TemporaryConsoleEncoding.cs
│ │ ├── TrustedSigningCommand.cs
│ │ ├── TrustedSigningResources.Designer.cs
│ │ ├── TrustedSigningResources.resx
│ │ ├── appsettings.json
│ │ └── xlf/
│ │ ├── ArtifactSigningResources.cs.xlf
│ │ ├── ArtifactSigningResources.de.xlf
│ │ ├── ArtifactSigningResources.es.xlf
│ │ ├── ArtifactSigningResources.fr.xlf
│ │ ├── ArtifactSigningResources.it.xlf
│ │ ├── ArtifactSigningResources.ja.xlf
│ │ ├── ArtifactSigningResources.ko.xlf
│ │ ├── ArtifactSigningResources.pl.xlf
│ │ ├── ArtifactSigningResources.pt-BR.xlf
│ │ ├── ArtifactSigningResources.ru.xlf
│ │ ├── ArtifactSigningResources.tr.xlf
│ │ ├── ArtifactSigningResources.zh-Hans.xlf
│ │ ├── ArtifactSigningResources.zh-Hant.xlf
│ │ ├── AzureKeyVaultResources.cs.xlf
│ │ ├── AzureKeyVaultResources.de.xlf
│ │ ├── AzureKeyVaultResources.es.xlf
│ │ ├── AzureKeyVaultResources.fr.xlf
│ │ ├── AzureKeyVaultResources.it.xlf
│ │ ├── AzureKeyVaultResources.ja.xlf
│ │ ├── AzureKeyVaultResources.ko.xlf
│ │ ├── AzureKeyVaultResources.pl.xlf
│ │ ├── AzureKeyVaultResources.pt-BR.xlf
│ │ ├── AzureKeyVaultResources.ru.xlf
│ │ ├── AzureKeyVaultResources.tr.xlf
│ │ ├── AzureKeyVaultResources.zh-Hans.xlf
│ │ ├── AzureKeyVaultResources.zh-Hant.xlf
│ │ ├── CertManagerResources.cs.xlf
│ │ ├── CertManagerResources.de.xlf
│ │ ├── CertManagerResources.es.xlf
│ │ ├── CertManagerResources.fr.xlf
│ │ ├── CertManagerResources.it.xlf
│ │ ├── CertManagerResources.ja.xlf
│ │ ├── CertManagerResources.ko.xlf
│ │ ├── CertManagerResources.pl.xlf
│ │ ├── CertManagerResources.pt-BR.xlf
│ │ ├── CertManagerResources.ru.xlf
│ │ ├── CertManagerResources.tr.xlf
│ │ ├── CertManagerResources.zh-Hans.xlf
│ │ ├── CertManagerResources.zh-Hant.xlf
│ │ ├── CertificateStoreResources.cs.xlf
│ │ ├── CertificateStoreResources.de.xlf
│ │ ├── CertificateStoreResources.es.xlf
│ │ ├── CertificateStoreResources.fr.xlf
│ │ ├── CertificateStoreResources.it.xlf
│ │ ├── CertificateStoreResources.ja.xlf
│ │ ├── CertificateStoreResources.ko.xlf
│ │ ├── CertificateStoreResources.pl.xlf
│ │ ├── CertificateStoreResources.pt-BR.xlf
│ │ ├── CertificateStoreResources.ru.xlf
│ │ ├── CertificateStoreResources.tr.xlf
│ │ ├── CertificateStoreResources.zh-Hans.xlf
│ │ ├── CertificateStoreResources.zh-Hant.xlf
│ │ ├── Resources.cs.xlf
│ │ ├── Resources.de.xlf
│ │ ├── Resources.es.xlf
│ │ ├── Resources.fr.xlf
│ │ ├── Resources.it.xlf
│ │ ├── Resources.ja.xlf
│ │ ├── Resources.ko.xlf
│ │ ├── Resources.pl.xlf
│ │ ├── Resources.pt-BR.xlf
│ │ ├── Resources.ru.xlf
│ │ ├── Resources.tr.xlf
│ │ ├── Resources.zh-Hans.xlf
│ │ ├── Resources.zh-Hant.xlf
│ │ ├── TrustedSigningResources.cs.xlf
│ │ ├── TrustedSigningResources.de.xlf
│ │ ├── TrustedSigningResources.es.xlf
│ │ ├── TrustedSigningResources.fr.xlf
│ │ ├── TrustedSigningResources.it.xlf
│ │ ├── TrustedSigningResources.ja.xlf
│ │ ├── TrustedSigningResources.ko.xlf
│ │ ├── TrustedSigningResources.pl.xlf
│ │ ├── TrustedSigningResources.pt-BR.xlf
│ │ ├── TrustedSigningResources.ru.xlf
│ │ ├── TrustedSigningResources.tr.xlf
│ │ ├── TrustedSigningResources.zh-Hans.xlf
│ │ └── TrustedSigningResources.zh-Hant.xlf
│ ├── Sign.Core/
│ │ ├── AppInitializer.cs
│ │ ├── Certificates/
│ │ │ ├── CertificateVerifier.cs
│ │ │ └── ICertificateVerifier.cs
│ │ ├── Containers/
│ │ │ ├── AppxBundleContainer.cs
│ │ │ ├── AppxContainer.cs
│ │ │ ├── Container.cs
│ │ │ ├── ContainerProvider.cs
│ │ │ ├── IContainer.cs
│ │ │ ├── IContainerProvider.cs
│ │ │ ├── NuGetContainer.cs
│ │ │ └── ZipContainer.cs
│ │ ├── DataFormatSigners/
│ │ │ ├── AggregatingSigner.cs
│ │ │ ├── AppInstallerServiceSigner.cs
│ │ │ ├── AzureSignToolSigner.cs
│ │ │ ├── ClickOnceSigner.cs
│ │ │ ├── DefaultSigner.cs
│ │ │ ├── DistinguishedNameParser.cs
│ │ │ ├── DynamicsBusinessCentralAppFileType.cs
│ │ │ ├── IAggregatingDataFormatSigner.cs
│ │ │ ├── IAzureSignToolDataFormatSigner.cs
│ │ │ ├── IDataFormatSigner.cs
│ │ │ ├── IDefaultDataFormatSigner.cs
│ │ │ ├── IManifestSigner.cs
│ │ │ ├── ISignableFileType.cs
│ │ │ ├── ManifestSigner.cs
│ │ │ ├── NuGetSigner.cs
│ │ │ ├── RSAPKCS1SHA256SignatureDescription.cs
│ │ │ ├── RSAPKCS1SignatureDescription.cs
│ │ │ ├── RetryingSigner.cs
│ │ │ ├── SignOptions.cs
│ │ │ ├── SignableFileTypeByExtension.cs
│ │ │ └── VsixSigner.cs
│ │ ├── ExitCode.cs
│ │ ├── FileList/
│ │ │ ├── FileListReader.cs
│ │ │ ├── FileMatcher.cs
│ │ │ ├── Globber.cs
│ │ │ ├── IFileListReader.cs
│ │ │ ├── IFileMatcher.cs
│ │ │ ├── IMatcherFactory.cs
│ │ │ └── MatcherFactory.cs
│ │ ├── FileSystem/
│ │ │ ├── AppRootDirectoryLocator.cs
│ │ │ ├── DirectoryService.cs
│ │ │ ├── FileInfoComparer.cs
│ │ │ ├── FileMetadataService.cs
│ │ │ ├── IAppRootDirectoryLocator.cs
│ │ │ ├── IDirectoryService.cs
│ │ │ ├── IFileMetadataService.cs
│ │ │ ├── ITemporaryDirectory.cs
│ │ │ └── TemporaryDirectory.cs
│ │ ├── GlobalSuppressions.cs
│ │ ├── ICertificateProvider.cs
│ │ ├── IServiceProviderFactory.cs
│ │ ├── ISignatureAlgorithmProvider.cs
│ │ ├── ISignatureProvider.cs
│ │ ├── ISigner.cs
│ │ ├── Native/
│ │ │ ├── Kernel32.cs
│ │ │ ├── Ntdsapi.cs
│ │ │ └── mansign2.cs
│ │ ├── Resources.Designer.cs
│ │ ├── Resources.resx
│ │ ├── ServiceProvider.cs
│ │ ├── ServiceProviderFactory.cs
│ │ ├── Sign.Core.csproj
│ │ ├── Signer.cs
│ │ ├── SigningException.cs
│ │ ├── Tools/
│ │ │ ├── CliTool.cs
│ │ │ ├── ICliTool.cs
│ │ │ ├── IMageCli.cs
│ │ │ ├── IMakeAppxCli.cs
│ │ │ ├── INuGetSignTool.cs
│ │ │ ├── ITool.cs
│ │ │ ├── IToolConfigurationProvider.cs
│ │ │ ├── IVsixSignTool.cs
│ │ │ ├── MageCli.cs
│ │ │ ├── MakeAppxCli.cs
│ │ │ ├── NuGet/
│ │ │ │ ├── NuGetLogger.cs
│ │ │ │ ├── NuGetPackageSigner.cs
│ │ │ │ └── NuGetSignatureProvider.cs
│ │ │ ├── NuGetSignTool.cs
│ │ │ ├── Tool.cs
│ │ │ ├── ToolConfigurationProvider.cs
│ │ │ ├── VsixSignTool/
│ │ │ │ ├── HashAlgorithmInfo.cs
│ │ │ │ ├── HexHelpers.cs
│ │ │ │ ├── ISignatureBuilderPreset.cs
│ │ │ │ ├── ISigningContext.cs
│ │ │ │ ├── Interop/
│ │ │ │ │ ├── Crypt32.cs
│ │ │ │ │ └── CryptMemorySafeHandle.cs
│ │ │ │ ├── KnownOids.cs
│ │ │ │ ├── OpcContentTypes.cs
│ │ │ │ ├── OpcKnownMimeTypes.cs
│ │ │ │ ├── OpcKnownUris.cs
│ │ │ │ ├── OpcPackage.cs
│ │ │ │ ├── OpcPackageFileMode.cs
│ │ │ │ ├── OpcPackageSignatureBuilder.cs
│ │ │ │ ├── OpcPackageTimestampBuilder.cs
│ │ │ │ ├── OpcPart.cs
│ │ │ │ ├── OpcPartDigest.cs
│ │ │ │ ├── OpcPartDigestProcessor.cs
│ │ │ │ ├── OpcRelationships.cs
│ │ │ │ ├── OpcSignature.cs
│ │ │ │ ├── OpcSignatureManifest.cs
│ │ │ │ ├── SignConfigurationSet.cs
│ │ │ │ ├── SignatureAlgorithmTranslator.cs
│ │ │ │ ├── SigningAlgorithm.cs
│ │ │ │ ├── SigningContext.cs
│ │ │ │ ├── Timestamp/
│ │ │ │ │ ├── TimestampBuilder.cs
│ │ │ │ │ ├── TimestampBuilder.netcoreapp.cs
│ │ │ │ │ ├── TimestampNonce.cs
│ │ │ │ │ └── TimestampResult.cs
│ │ │ │ ├── UriHelpers.cs
│ │ │ │ ├── VSIXSignatureBuilderPreset.cs
│ │ │ │ └── XmlSignatureBuilder.cs
│ │ │ └── VsixSignTool.cs
│ │ └── xlf/
│ │ ├── Resources.cs.xlf
│ │ ├── Resources.de.xlf
│ │ ├── Resources.es.xlf
│ │ ├── Resources.fr.xlf
│ │ ├── Resources.it.xlf
│ │ ├── Resources.ja.xlf
│ │ ├── Resources.ko.xlf
│ │ ├── Resources.pl.xlf
│ │ ├── Resources.pt-BR.xlf
│ │ ├── Resources.ru.xlf
│ │ ├── Resources.tr.xlf
│ │ ├── Resources.zh-Hans.xlf
│ │ └── Resources.zh-Hant.xlf
│ ├── Sign.SignatureProviders.ArtifactSigning/
│ │ ├── ArtifactSigningService.cs
│ │ ├── ArtifactSigningServiceProvider.cs
│ │ ├── RSAArtifactSigning.cs
│ │ ├── Resources.Designer.cs
│ │ ├── Resources.resx
│ │ ├── Sign.SignatureProviders.ArtifactSigning.csproj
│ │ └── xlf/
│ │ ├── Resources.cs.xlf
│ │ ├── Resources.de.xlf
│ │ ├── Resources.es.xlf
│ │ ├── Resources.fr.xlf
│ │ ├── Resources.it.xlf
│ │ ├── Resources.ja.xlf
│ │ ├── Resources.ko.xlf
│ │ ├── Resources.pl.xlf
│ │ ├── Resources.pt-BR.xlf
│ │ ├── Resources.ru.xlf
│ │ ├── Resources.tr.xlf
│ │ ├── Resources.zh-Hans.xlf
│ │ └── Resources.zh-Hant.xlf
│ ├── Sign.SignatureProviders.CertificateStore/
│ │ ├── CertificateStoreService.cs
│ │ ├── CertificateStoreServiceProvider.cs
│ │ ├── Resources.Designer.cs
│ │ ├── Resources.resx
│ │ ├── Sign.SignatureProviders.CertificateStore.csproj
│ │ └── xlf/
│ │ ├── Resources.cs.xlf
│ │ ├── Resources.de.xlf
│ │ ├── Resources.es.xlf
│ │ ├── Resources.fr.xlf
│ │ ├── Resources.it.xlf
│ │ ├── Resources.ja.xlf
│ │ ├── Resources.ko.xlf
│ │ ├── Resources.pl.xlf
│ │ ├── Resources.pt-BR.xlf
│ │ ├── Resources.ru.xlf
│ │ ├── Resources.tr.xlf
│ │ ├── Resources.zh-Hans.xlf
│ │ └── Resources.zh-Hant.xlf
│ └── Sign.SignatureProviders.KeyVault/
│ ├── KeyVaultService.cs
│ ├── KeyVaultServiceProvider.cs
│ ├── RSAKeyVaultWrapper.cs
│ ├── Resources.Designer.cs
│ ├── Resources.resx
│ ├── Sign.SignatureProviders.KeyVault.csproj
│ └── xlf/
│ ├── Resources.cs.xlf
│ ├── Resources.de.xlf
│ ├── Resources.es.xlf
│ ├── Resources.fr.xlf
│ ├── Resources.it.xlf
│ ├── Resources.ja.xlf
│ ├── Resources.ko.xlf
│ ├── Resources.pl.xlf
│ ├── Resources.pt-BR.xlf
│ ├── Resources.ru.xlf
│ ├── Resources.tr.xlf
│ ├── Resources.zh-Hans.xlf
│ └── Resources.zh-Hant.xlf
├── test/
│ ├── Sign.Cli.Test/
│ │ ├── ArtifactSigningCommandTests.cs
│ │ ├── AzureCredentialOptionsTests.cs
│ │ ├── AzureKeyVaultCommandTests.cs
│ │ ├── CertificateStoreCommandTests.cs
│ │ ├── CodeCommandTests.cs
│ │ ├── Options/
│ │ │ ├── ApplicationNameOptionTests.cs
│ │ │ ├── BaseDirectoryOptionTests.cs
│ │ │ ├── DescriptionOptionTests.cs
│ │ │ ├── DescriptionUrlOptionTests.cs
│ │ │ ├── DirectoryInfoOptionTests.cs
│ │ │ ├── FileDigestOptionTests.cs
│ │ │ ├── HashAlgorithmNameOptionTests.cs
│ │ │ ├── Int32OptionTests.cs
│ │ │ ├── MaxConcurrencyOptionTests.cs
│ │ │ ├── OptionTests.cs
│ │ │ ├── OutputOptionTests.cs
│ │ │ ├── PublisherNameOptionTests.cs
│ │ │ ├── TimestampDigestOptionTests.cs
│ │ │ ├── TimestampUrlOptionTests.cs
│ │ │ ├── UriOptionTests.cs
│ │ │ └── VerbosityOptionTests.cs
│ │ ├── Sign.Cli.Test.csproj
│ │ ├── SignCommandTests.Globbing.cs
│ │ ├── SignCommandTests.cs
│ │ ├── TemporaryConsoleEncodingTests.cs
│ │ ├── TestInfrastructure/
│ │ │ ├── SignerSpy.cs
│ │ │ └── TestServiceProviderFactory.cs
│ │ ├── TrustedSigningCommandTests.cs
│ │ └── Usings.cs
│ ├── Sign.Core.Test/
│ │ ├── AssemblyInitializer.cs
│ │ ├── Certificates/
│ │ │ └── CertificateVerifierTests.cs
│ │ ├── Containers/
│ │ │ ├── AppxBundleContainerTests.cs
│ │ │ ├── AppxContainerTests.cs
│ │ │ ├── ContainerProviderTests.cs
│ │ │ ├── NuGetContainerTests.cs
│ │ │ └── ZipContainerTests.cs
│ │ ├── DataFormatSigners/
│ │ │ ├── AggregatingSignerTests.Containers.cs
│ │ │ ├── AggregatingSignerTests.PortableExecutableFiles.cs
│ │ │ ├── AggregatingSignerTests.cs
│ │ │ ├── AppInstallerServiceSignerTests.cs
│ │ │ ├── AzureSignToolSignerTests.cs
│ │ │ ├── ClickOnceSignerTests.cs
│ │ │ ├── DefaultSignerTests.cs
│ │ │ ├── DistinguishedNameParserTests.cs
│ │ │ ├── DynamicsBusinessCentralAppFileTypeTests.cs
│ │ │ ├── NuGetSignerTests.cs
│ │ │ ├── PowerShell/
│ │ │ │ ├── PowerShellFileReader.cs
│ │ │ │ ├── TextPowerShellFileReader.cs
│ │ │ │ └── XmlPowerShellFileReader.cs
│ │ │ ├── RSAPKCS1SHA256SignatureDescriptionTests.cs
│ │ │ ├── SignableFileTypeByExtensionTests.cs
│ │ │ └── VsixSignerTests.cs
│ │ ├── FileList/
│ │ │ ├── FileListReaderTests.cs
│ │ │ ├── FileMatcherTests.cs
│ │ │ └── MatcherFactoryTests.cs
│ │ ├── FileSystem/
│ │ │ ├── AppRootDirectoryLocatorTests.cs
│ │ │ ├── DirectoryServiceTests.cs
│ │ │ ├── FileInfoComparerTests.cs
│ │ │ ├── FileMetadataServiceTests.cs
│ │ │ └── TemporaryDirectoryTests.cs
│ │ ├── Native/
│ │ │ └── SignedCmiManifest2Tests.cs
│ │ ├── ServiceProviderFactoryTests.cs
│ │ ├── ServiceProviderTests.cs
│ │ ├── Sign.Core.Test.csproj
│ │ ├── SignerTests.cs
│ │ ├── TestAssets/
│ │ │ ├── App1_1.0.0.0_x64.msixbundle
│ │ │ ├── EmptyExtension.app
│ │ │ ├── PowerShell/
│ │ │ │ ├── cmdlet-definition.cdxml
│ │ │ │ ├── data.psd1
│ │ │ │ ├── formatting.ps1xml
│ │ │ │ ├── module.psm1
│ │ │ │ └── script.ps1
│ │ │ ├── VSIXSamples/
│ │ │ │ ├── OpenVsixSignToolTest-Signed.vsix
│ │ │ │ └── OpenVsixSignToolTest.vsix
│ │ │ └── VsixPackage.vsix
│ │ ├── TestInfrastructure/
│ │ │ ├── AggregatingSignerSpy.cs
│ │ │ ├── AggregatingSignerTest.cs
│ │ │ ├── AuthenticodeSignatureReader.cs
│ │ │ ├── CertificateStoreServiceStub.cs
│ │ │ ├── ContainerProviderStub.cs
│ │ │ ├── ContainerSpy.cs
│ │ │ ├── DirectoryServiceStub.cs
│ │ │ ├── FileMetadataServiceStub.cs
│ │ │ ├── KeyVaultServiceStub.cs
│ │ │ ├── Server/
│ │ │ │ ├── AiaResponder.cs
│ │ │ │ ├── AlgorithmIdentifier.cs
│ │ │ │ ├── AttributeUtility.cs
│ │ │ │ ├── CertificateAuthority.cs
│ │ │ │ ├── CertificateUtilities.cs
│ │ │ │ ├── CertificatesFixture.cs
│ │ │ │ ├── CommitmentTypeIndication.cs
│ │ │ │ ├── CommitmentTypeQualifier.cs
│ │ │ │ ├── CrlResponder.cs
│ │ │ │ ├── EssCertId.cs
│ │ │ │ ├── EssCertIdV2.cs
│ │ │ │ ├── GeneralName.cs
│ │ │ │ ├── HashAlgorithmNameExtensions.cs
│ │ │ │ ├── HttpResponder.cs
│ │ │ │ ├── IHttpResponder.cs
│ │ │ │ ├── ITestServer.cs
│ │ │ │ ├── IssuerSerial.cs
│ │ │ │ ├── OcspResponder.cs
│ │ │ │ ├── OidExtensions.cs
│ │ │ │ ├── Oids.cs
│ │ │ │ ├── PfxFilesFixture.cs
│ │ │ │ ├── PolicyInformation.cs
│ │ │ │ ├── PolicyQualifierInfo.cs
│ │ │ │ ├── SigningCertificateV2.cs
│ │ │ │ ├── SigningTestsCollection.cs
│ │ │ │ ├── TestServer.cs
│ │ │ │ ├── TestServerFixture.cs
│ │ │ │ ├── TestUtility.cs
│ │ │ │ └── TimestampService.cs
│ │ │ ├── SignerSpy.cs
│ │ │ └── TemporaryEnvironmentPathOverride.cs
│ │ ├── Tools/
│ │ │ ├── ToolConfigurationProviderTests.cs
│ │ │ └── VSIXSignTool/
│ │ │ ├── CertificateSigningContextTests.cs
│ │ │ ├── Crypt32Tests.cs
│ │ │ ├── HexHelperTests.cs
│ │ │ ├── OpcPackageSigningTests.cs
│ │ │ ├── OpcPackageTests.cs
│ │ │ └── UriHelpersTests.cs
│ │ └── Usings.cs
│ ├── Sign.SignatureProviders.ArtifactSigning.Test/
│ │ ├── RSATrustedSigningTests.cs
│ │ ├── Sign.SignatureProviders.ArtifactSigning.Test.csproj
│ │ ├── TrustedSigningServiceProviderTests.cs
│ │ ├── TrustedSigningServiceTests.cs
│ │ └── Usings.cs
│ ├── Sign.SignatureProviders.CertificateStore.Test/
│ │ ├── CertificateStoreServiceProviderTests.cs
│ │ ├── CertificateStoreServiceTests.cs
│ │ ├── Sign.SignatureProviders.CertificateStore.Test.csproj
│ │ └── Usings.cs
│ ├── Sign.SignatureProviders.KeyVault.Test/
│ │ ├── KeyVaultServiceProviderTests.cs
│ │ ├── KeyVaultServiceTests.cs
│ │ ├── RSAKeyVaultWrapperTests.cs
│ │ ├── Sign.SignatureProviders.KeyVault.Test.csproj
│ │ └── Usings.cs
│ └── Sign.TestInfrastructure/
│ ├── Constants.cs
│ ├── EphemeralTrust.cs
│ ├── RequiresElevationTheoryAttribute.cs
│ ├── ResidualTestCertificatesFoundInRootStoreException.cs
│ ├── SelfIssuedCertificateCreator.cs
│ ├── Sign.TestInfrastructure.csproj
│ ├── TemporaryFile.cs
│ ├── TestAssets.cs
│ ├── TestFileCreator.cs
│ ├── TestLogEntry.cs
│ ├── TestLogger.cs
│ └── TrustedCertificateFixture.cs
└── triage-policy.md
================================================
FILE CONTENTS
================================================
================================================
FILE: .azuredevops/dependabot.yml
================================================
version: 2
# Disabling dependabot on Azure DevOps as this is a mirrored repo. Updates should go through github.
enable-campaigned-updates: false
enable-security-updates: false
================================================
FILE: .config/1espt/PipelineAutobaseliningConfig.yml
================================================
## DO NOT MODIFY THIS FILE MANUALLY. This is part of auto-baselining from 1ES Pipeline Templates. Go to [https://aka.ms/1espt-autobaselining] for more details.
pipelines:
1190:
retail:
source:
credscan:
lastModifiedDate: 2024-03-28
eslint:
lastModifiedDate: 2024-03-28
psscriptanalyzer:
lastModifiedDate: 2024-03-28
armory:
lastModifiedDate: 2024-03-28
binary:
credscan:
lastModifiedDate: 2024-03-28
binskim:
lastModifiedDate: 2025-03-19
spotbugs:
lastModifiedDate: 2024-03-28
================================================
FILE: .config/1espt/README.md
================================================
Do not merge changes to PipelineAutobaseliningConfig.yml in the internal Azure DevOps repository, as it would break commit mirroring from the public GitHub repository. Instead, merge the changes into the public GitHub repository.
See https://dev.azure.com/dnceng/internal/_wiki/wikis/DNCEng%20Services%20Wiki/1214/1ES-Pipeline-Template-Migration-FAQ?anchor=should-i-accept-these-automated-prs-into-my-repo-that-is-mirrored-from-github-to-fix-cg/security-issues%3F for guidance.
================================================
FILE: .editorconfig
================================================
root = true
[*]
insert_final_newline = true
indent_style = space
indent_size = 4
trim_trailing_whitespace = true
[*.{csproj,md,props,targets,yml}]
indent_size = 2
[*.cs]
# IDE0063: Use simple 'using' statement
csharp_prefer_simple_using_statement = false
# CA2254: Template should be a static expression
# See https://github.com/dotnet/roslyn-analyzers/issues/5626
dotnet_diagnostic.CA2254.severity = none
# CA2255: The ModuleInitializer attribute should not be used in libraries
dotnet_diagnostic.CA2255.severity = none
# IDE0073: File header
dotnet_diagnostic.IDE0073.severity = warning
file_header_template = Licensed to the .NET Foundation under one or more agreements.\nThe .NET Foundation licenses this file to you under the MIT license.\nSee the LICENSE.txt file in the project root for more information.
================================================
FILE: .gitattributes
================================================
###############################################################################
# Set default behavior to automatically normalize line endings.
###############################################################################
* text=auto
###############################################################################
# Set default behavior for command prompt diff.
#
# This is need for earlier builds of msysgit that does not have it on by
# default for csharp files.
# Note: This is only used by command line
###############################################################################
#*.cs diff=csharp
###############################################################################
# Set the merge driver for project and solution files
#
# Merging from the command prompt will add diff markers to the files if there
# are conflicts (Merging from VS is not affected by the settings below, in VS
# the diff markers are never inserted). Diff markers may cause the following
# file extensions to fail to load in VS. An alternative would be to treat
# these files as binary and thus will always conflict and require user
# intervention with every merge. To do so, just uncomment the entries below
###############################################################################
#*.sln merge=binary
#*.csproj merge=binary
#*.vbproj merge=binary
#*.vcxproj merge=binary
#*.vcproj merge=binary
#*.dbproj merge=binary
#*.fsproj merge=binary
#*.lsproj merge=binary
#*.wixproj merge=binary
#*.modelproj merge=binary
#*.sqlproj merge=binary
#*.wwaproj merge=binary
###############################################################################
# behavior for image files
#
# image files are treated as binary by default.
###############################################################################
#*.jpg binary
#*.png binary
#*.gif binary
###############################################################################
# diff behavior for common document formats
#
# Convert binary document formats to text before diffing them. This feature
# is only available from the command line. Turn it on by uncommenting the
# entries below.
###############################################################################
#*.doc diff=astextplain
#*.DOC diff=astextplain
#*.docx diff=astextplain
#*.DOCX diff=astextplain
#*.dot diff=astextplain
#*.DOT diff=astextplain
#*.pdf diff=astextplain
#*.PDF diff=astextplain
#*.rtf diff=astextplain
#*.RTF diff=astextplain
================================================
FILE: .github/CODEOWNERS
================================================
# These owners will be the default owners for everything in
# the repo. Unless a later match takes precedence,
# review when someone opens a pull request.
# For more on how to customize the CODEOWNERS file - https://help.github.com/en/articles/about-code-owners
* @dotnet/sign-maintainers
================================================
FILE: .github/ISSUE_TEMPLATE/bug_report.md
================================================
---
name: Bug report
about: Create a report to help us improve
title: ''
labels: ''
assignees: ''
---
**Describe the bug**
A clear and concise description of what the bug is.
**Repro steps**
**Expected behavior**
A clear and concise description of what you expected to happen.
**Actual behavior**
A clear and concise description of what actually happened.
**Additional context**
- Include the output of `sign --version`.
- Include the output of `dotnet --info`.
- Add any other context about the problem here.
================================================
FILE: .github/ISSUE_TEMPLATE/feature_request.md
================================================
---
name: Feature request
about: Suggest an idea for this project
title: ''
labels: ''
assignees: ''
---
**Is your feature request related to a problem? Please describe.**
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
**Describe the solution you'd like**
A clear and concise description of what you want to happen.
**Describe alternatives you've considered**
A clear and concise description of any alternative solutions or features you've considered.
**Additional context**
Include the output of sign --version.
Add any other context about the problem here.
================================================
FILE: .github/workflows/stale.yml
================================================
name: 'Close stale issues'
permissions:
issues: write
on:
schedule:
- cron: '30 1 * * *'
jobs:
stale:
runs-on: ubuntu-latest
steps:
- uses: actions/stale@v9
with:
stale-issue-message: 'This issue is stale because it has been open 10 days with no activity after asking for more info. Comment or this will be closed in 4 days.'
close-issue-message: 'This issue was closed because it has been stalled for 14 days with no activity. This can be reopened if additional information is provided.'
days-before-issue-stale: 10
days-before-issue-close: 4
days-before-pr-stale: -1
days-before-pr-close: -1
any-of-labels: "needs-more-info"
================================================
FILE: .gitignore
================================================
## Ignore Visual Studio temporary files, build results, and
## files generated by popular Visual Studio add-ons.
# Tools directory
.dotnet/
.packages/
.tools/
/[Tt]ools/
# User-specific files
*.suo
*.user
*.userosscache
*.sln.docstates
# User-specific files (MonoDevelop/Xamarin Studio)
*.userprefs
# Build results
[Dd]ebug/
[Dd]ebugPublic/
[Rr]elease/
[Rr]eleases/
[Xx]64/
[Xx]86/
[Bb]uild/
bld/
[Bb]in/
[Oo]bj/
# Visual Studio 2015 cache/options directory
.vs/
.vscode/
.store/
# Uncomment if you have tasks that create the project's static files in wwwroot
#wwwroot/
# MSTest test Results
[Tt]est[Rr]esult*/
[Bb]uild[Ll]og.*
# NUNIT
*.VisualState.xml
TestResult.xml
# Build Results of an ATL Project
[Dd]ebugPS/
[Rr]eleasePS/
dlldata.c
# DNX
project.lock.json
*.lock.json
artifacts/
*_i.c
*_p.c
*_i.h
*.ilk
*.meta
*.obj
*.pch
*.pdb
*.pgc
*.pgd
*.rsp
*.sbr
*.tlb
*.tli
*.tlh
*.tmp
*.tmp_proj
*.log
*.vspscc
*.vssscc
.builds
*.pidb
*.svclog
*.scc
# Chutzpah Test files
_Chutzpah*
# Visual C++ cache files
ipch/
*.aps
*.ncb
*.opendb
*.opensdf
*.sdf
*.cachefile
*.VC.db
# Visual Studio profiler
*.psess
*.vsp
*.vspx
*.sap
# TFS 2012 Local Workspace
$tf/
# Guidance Automation Toolkit
*.gpState
# ReSharper is a .NET coding add-in
_ReSharper*/
*.[Rr]e[Ss]harper
*.DotSettings.user
# JustCode is a .NET coding add-in
.JustCode
# TeamCity is a build add-in
_TeamCity*
# DotCover is a Code Coverage Tool
*.dotCover
# NCrunch
_NCrunch_*
.*crunch*.local.xml
nCrunchTemp_*
# MightyMoose
*.mm.*
AutoTest.Net/
# Web workbench (sass)
.sass-cache/
# Installshield output folder
[Ee]xpress/
# DocProject is a documentation generator add-in
DocProject/buildhelp/
DocProject/Help/*.HxT
DocProject/Help/*.HxC
DocProject/Help/*.hhc
DocProject/Help/*.hhk
DocProject/Help/*.hhp
DocProject/Help/Html2
DocProject/Help/html
# Click-Once directory
publish/
# Publish Web Output
*.[Pp]ublish.xml
*.azurePubxml
# TODO: Un-comment the next line if you do not want to checkin
# your web deploy settings because they may include unencrypted
# passwords
#*.pubxml
*.publishproj
# NuGet Packages
*.nupkg
# The packages folder can be ignored because of Package Restore
**/packages/*
# except build/, which is used as an MSBuild target.
!**/packages/build/
# Uncomment if necessary however generally it will be regenerated when needed
#!**/packages/repositories.config
# NuGet v3's project.json files produces more ignoreable files
*.nuget.props
*.nuget.targets
# Microsoft Azure Build Output
csx/
*.build.csdef
# Microsoft Azure Emulator
ecf/
rcf/
# Microsoft Azure ApplicationInsights config file
ApplicationInsights.config
# Windows Store app package directory
AppPackages/
BundleArtifacts/
# Visual Studio cache files
# files ending in .cache can be ignored
*.[Cc]ache
# but keep track of directories ending in .cache
!*.[Cc]ache/
# Others
ClientBin/
[Ss]tyle[Cc]op.*
~$*
*~
*.dbmdl
*.dbproj.schemaview
*.pfx
*.publishsettings
node_modules/
orleans.codegen.cs
# RIA/Silverlight projects
Generated_Code/
# Backup & report files from converting an old project file
# to a newer Visual Studio version. Backup files are not needed,
# because we have git ;-)
_UpgradeReport_Files/
Backup*/
UpgradeLog*.XML
UpgradeLog*.htm
# SQL Server files
*.mdf
*.ldf
# Business Intelligence projects
*.rdl.data
*.bim.layout
*.bim_*.settings
# Microsoft Fakes
FakesAssemblies/
# GhostDoc plugin setting file
*.GhostDoc.xml
# Node.js Tools for Visual Studio
.ntvs_analysis.dat
# Visual Studio 6 build log
*.plg
# Visual Studio 6 workspace options file
*.opt
# Visual Studio LightSwitch build output
**/*.HTMLClient/GeneratedArtifacts
**/*.DesktopClient/GeneratedArtifacts
**/*.DesktopClient/ModelManifest.xml
**/*.Server/GeneratedArtifacts
**/*.Server/ModelManifest.xml
_Pvt_Extensions
# LightSwitch generated files
GeneratedArtifacts/
ModelManifest.xml
# Paket dependency manager
.paket/paket.exe
# FAKE - F# Make
.fake/
/src/SignClient/Properties/launchSettings.json
/src/SignService/Properties/launchSettings.json
/src/SignService/App_Data/
/src/SignService/tools/SDK/
!**/KeyVaultSignToolWrapper/x86/
!**/KeyVaultSignToolWrapper/x64/
/src/SignService/Properties/PublishProfiles
/src/InstallUtility/Properties/launchSettings.json
/arm/ArmDeploy/azuredeploy.parameters.json
================================================
FILE: .vsts-ci.yml
================================================
# Pipeline: https://dnceng.visualstudio.com/internal/_build?definitionId=1190
variables:
- name: _TeamName
value: DotNetCore
- name: Build.Repository.Clean
value: true
- name: Codeql.Enabled
value: true
- name: Codeql.TSAEnabled
value: true
- group: DotNet-Sign-SDLValidation-Params
- template: /eng/common/templates-official/variables/pool-providers.yml
trigger:
batch: true
branches:
include:
- main
paths:
exclude:
- "*.md"
pr:
autoCancel: false
branches:
include:
- '*'
resources:
repositories:
- repository: 1esPipelines
type: git
name: 1ESPipelineTemplates/1ESPipelineTemplates
ref: refs/tags/release
extends:
template: v1/1ES.Official.PipelineTemplate.yml@1esPipelines
parameters:
settings:
networkIsolationPolicy: Permissive,CFSClean,CFSClean2
sdl:
sourceAnalysisPool:
name: $(DncEngInternalBuildPool)
image: 1es-windows-2022
os: windows
customBuildTags:
- ES365AIMigrationTooling
stages:
- stage: Build_Windows
displayName: Build Windows
jobs:
- ${{ if and(eq(variables['System.TeamProject'], 'internal'), notin(variables['Build.Reason'], 'PullRequest'), eq(variables['Build.SourceBranch'], 'refs/heads/main')) }}:
- template: /eng/common/templates-official/job/onelocbuild.yml@self
parameters:
LclSource: lclFilesfromPackage
LclPackageId: 'LCL-JUNO-PROD-SIGNCLI'
MirrorRepo: sign
- template: /eng/common/templates-official/jobs/jobs.yml@self
parameters:
enableMicrobuild: true
enablePublishBuildArtifacts: true
enablePublishBuildAssets: true
enablePublishUsingPipelines: true
enableTelemetry: true
jobs:
- job: Windows
pool: # See https://helix.dot.net/ for VM names.
name: NetCore1ESPool-Internal
demands: ImageOverride -equals windows.vs2022.amd64
variables:
# Only enable publishing in official builds.
- ${{ if and(eq(variables['System.TeamProject'], 'internal'), notin(variables['Build.Reason'], 'PullRequest')) }}:
# Publish-Build-Assets provides: MaestroAccessToken, BotAccount-dotnet-maestro-bot-PAT
- group: Publish-Build-Assets
- name: _SignType
value: real
- name: _OfficialBuildArgs
value: /p:DotNetPublishUsingPipelines=true
/p:DotNetSignType=$(_SignType)
/p:OfficialBuildId=$(BUILD.BUILDNUMBER)
/p:TeamName=$(_TeamName)
- ${{ else }}:
- name: _SignType
value: test
- name: _OfficialBuildArgs
value: ''
strategy:
matrix:
Release:
_BuildConfig: Release
steps:
- task: CodeQL3000Init@0
displayName: Initialize CodeQL
condition: and(succeeded(), eq(variables['Codeql.Enabled'], 'true'))
- script: eng\common\CIBuild.cmd
-configuration $(_BuildConfig)
-prepareMachine
$(_OfficialBuildArgs)
name: Build
displayName: Build and run tests
condition: succeeded()
- task: CodeQL3000Finalize@0
displayName: Finalize CodeQL
condition: and(succeeded(), eq(variables['Codeql.Enabled'], 'true'))
# Guardian requires npm.
- task: NodeTool@0
inputs:
versionSpec: '18.x'
# Validates compiler/linker settings and other security-related binary characteristics.
# https://github.com/Microsoft/binskim
# YAML reference: https://eng.ms/docs/security-compliance-identity-and-management-scim/security/azure-security/cloudai-security-fundamentals-engineering/security-integration/guardian-wiki/sdl-azdo-extension/binskim-build-task#v4
- task: BinSkim@4
displayName: Run BinSkim
inputs:
InputType: Basic
Function: analyze
TargetPattern: binskimPattern
AnalyzeTargetBinskim: $(Build.SourcesDirectory)\artifacts\bin\Sign.Cli\$(_BuildConfig)\net8.0\publish\*.dll
AnalyzeSymPath: 'SRV*https://symweb'
condition: succeededOrFailed()
- task: PublishTestResults@2
displayName: 'Publish Unit Test Results'
inputs:
testResultsFormat: xUnit
testResultsFiles: '$(Build.SourcesDirectory)/artifacts/TestResults/**/*.xml'
mergeTestResults: true
searchFolder: $(System.DefaultWorkingDirectory)
testRunTitle: sign unit tests - $(Agent.JobName)
condition: succeededOrFailed()
- task: ComponentGovernanceComponentDetection@0
displayName: Component Governance scan
inputs:
ignoreDirectories: '$(Build.SourcesDirectory)/.packages,$(Build.SourcesDirectory)/artifacts/obj/Sign.Cli'
- template: /eng/common/templates-official/post-build/post-build.yml@self
parameters:
publishingInfraVersion: 3
enableSymbolValidation: true
enableSourceLinkValidation: true
validateDependsOn:
- Build_Windows
publishDependsOn:
- Validate
# This is to enable SDL runs part of Post-Build Validation Stage
SDLValidationParameters:
enable: true
params: ' -SourceToolsList @("policheck","credscan")
-TsaInstanceURL $(_TsaInstanceURL)
-TsaProjectName $(_TsaProjectName)
-TsaNotificationEmail $(_TsaNotificationEmail)
-TsaCodebaseAdmin $(_TsaCodebaseAdmin)
-TsaBugAreaPath $(_TsaBugAreaPath)
-TsaIterationPath $(_TsaIterationPath)
-TsaRepositoryName dotnet-sign
-TsaCodebaseName dotnet-sign
-TsaOnboard $True
-TsaPublish $True
-PoliCheckAdditionalRunConfigParams @("UserExclusionPath < $(Build.SourcesDirectory)/eng/PoliCheckExclusions.xml")'
================================================
FILE: .vsts-pr.yml
================================================
# Pipeline: https://dev.azure.com/dnceng-public/public/_build?definitionId=231
variables:
- name: _TeamName
value: DotNetCore
- name: Build.Repository.Clean
value: true
trigger:
batch: true
branches:
include:
- main
paths:
exclude:
- "*.md"
stages:
- stage: Build_Windows
displayName: Build Windows
jobs:
- template: /eng/common/templates/jobs/jobs.yml
parameters:
enableMicrobuild: true
jobs:
- job: Windows
pool: # See https://helix.dot.net/ for VM names.
name: NetCore-Public
demands: ImageOverride -equals windows.vs2022.amd64.open
variables:
- name: _SignType
value: test
strategy:
matrix:
Release:
_BuildConfig: Release
steps:
- script: eng\common\CIBuild.cmd
-configuration $(_BuildConfig)
-prepareMachine
name: Build
displayName: Build and run tests
condition: succeeded()
- task: PublishTestResults@2
displayName: 'Publish test results'
inputs:
testResultsFormat: xUnit
testResultsFiles: '$(Build.SourcesDirectory)/artifacts/TestResults/**/*.xml'
mergeTestResults: true
searchFolder: $(System.DefaultWorkingDirectory)
testRunTitle: sign unit tests - $(Agent.JobName)
condition: succeededOrFailed()
- task: PublishBuildArtifacts@1
displayName: 'Publish log files on failure'
inputs:
PathtoPublish: '$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)'
ArtifactName: 'Logs'
publishLocation: 'Container'
condition: failed()
================================================
FILE: CODE-OF-CONDUCT.md
================================================
# Code of Conduct
This project has adopted the code of conduct defined by the Contributor Covenant
to clarify expected behavior in our community.
For more information, see the [.NET Foundation Code of Conduct](https://dotnetfoundation.org/code-of-conduct).
================================================
FILE: Directory.Build.props
================================================
true$(CopyrightNetFoundation)trueembeddedtruetruestrictenablefalseLatestCS8002enableMITwin-x64net8.0Truetruefalse$(MSBuildThisFileDirectory)
================================================
FILE: Directory.Build.targets
================================================
================================================
FILE: Directory.Packages.props
================================================
truetrue
================================================
FILE: LICENSE.txt
================================================
The MIT License (MIT)
Copyright (c) .NET Foundation and Contributors
All rights reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
================================================
FILE: NuGet.Config
================================================
================================================
FILE: README.md
================================================
# Sign CLI
[](https://www.dotnetfoundation.org/)
This project aims to make it easier to integrate secure code signing into a CI pipeline by using cloud-based hardware security module(HSM)-protected keys. This project is part of the [.NET Foundation](https://www.dotnetfoundation.org/) and operates under their [code of conduct](https://www.dotnetfoundation.org/code-of-conduct). It is licensed under [MIT](https://opensource.org/licenses/MIT) (an OSI approved license).
You can find the latest version of Sign CLI on [NuGet.org](https://www.nuget.org/packages/sign).
## Prerequisites
- An up-to-date x64-based version of Windows currently in [mainstream support](https://learn.microsoft.com/lifecycle/products/)
- [.NET 8 SDK or later](https://dotnet.microsoft.com/download)
- [Microsoft Visual C++ 14 runtime](https://aka.ms/vs/17/release/vc_redist.x64.exe)
## Install
To install Sign CLI in the current directory, open a command prompt and execute:
```
dotnet tool install --tool-path . --prerelease sign
```
To run Sign CLI, execute `sign` from the same directory.
## Design
Given an initial file path or glob pattern, this tool recursively searches directories and containers to find signable files and containers. For each signable artifact, the tool uses an implementation of [`System.Security.Cryptography.RSA`](https://learn.microsoft.com/dotnet/api/system.security.cryptography.rsa?view=net-8.0) that delegates the signing operation to Azure Key Vault. The tool computes a digest (or hash) of the to-be-signed content and submits the digest --- not the original content --- to Azure Key Vault for digest signing. The returned raw signature value is then incorporated in whatever signature format is appropriate for the file type. Signable content is not sent to Azure Key Vault.
While the current version is limited to RSA and Azure Key Vault, it is desirable to support ECDSA and other cloud providers in the future.
## Supported File Types
- `.msi`, `.msp`, `.msm`, `.cab`, `.dll`, `.exe`, `.appx`, `.appxbundle`, `.msix`, `.msixbundle`, `.sys`, `.vxd`, `.ps1`, `.psm1`, and any portable executable (PE) file (via [AzureSignTool](https://github.com/vcsjones/AzureSignTool))
- `.vsix`
- ClickOnce `.application` and `.vsto` (via `Mage`). Notes below.
- `.nupkg`
## ClickOnce
There are a couple of possibilities for signing ClickOnce packages.
Generally you will want to sign an entire package and all its contents i.e. the deployment manifest (`.application` or `.vsto`),
application manifest (`.exe.manifest` or `.dll.manifest`) and the underlying `.exe` and `.dll` files themselves.
To do this, ensure that the entire contents of the package are available (i.e. the whole `publish` folder from your build) and pass
the deployment manifest as the file to sign - the rest of the files will be detected and signed in the proper order automatically.
You can also re-sign just the deployment manifest in case you want to e.g. change the Deployment URL but leave the rest of the contents the
same. To do this, pass the deployment manifest as the file to sign as in the case above, but just don't have the rest of the files
present on-disk alongside it. This tool will detect that they're missing and just update the signature on the deployment manifest.
Note that this is strictly for re-signing an already-signed deployment manifest - you cannot have a signed deployment manifest that
points to an un-signed application manifest. You must also take care to sign all manifests with the same certificate otherwise the application
will not install.
You should also use the `filter` parameter with the file list to sign, something like this:
```
**/ProjectAddIn1.*
**/setup.exe
```
## Best Practices
* Create a [ServicePrincipal with minimum permissions](https://learn.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal). Note that you do not need to assign any subscription-level roles to this identity. Only access to Key Vault is required.
* Follow [Best practices for using Azure Key Vault](https://learn.microsoft.com/azure/key-vault/general/best-practices). The Premium SKU is required for code signing certificates to meet key storage requirements.
* If using Azure role-based access control (RBAC), [configure your signing account to have these roles](https://learn.microsoft.com/azure/key-vault/general/rbac-guide?tabs=azure-portal):
- Key Vault Reader
- Key Vault Crypto User
* If using Azure Key Vault access policies, [configure an access policy](https://learn.microsoft.com/azure/key-vault/general/assign-access-policy?tabs=azure-portal) for your signing account to have minimal permissions:
- Key permissions
- Cryptographic Operations
- Sign
- Key Management Operations
- Get _(Note: this is only for the public key not the private key.)_
- Certificate permissions
- Certificate Management Operations
- Get
* Isolate signing operations in a separate leg of your build pipeline.
* Ensure that this CLI and all input and output files are in a directory under your control.
* Execute this CLI as a standard user. Elevation is not required.
* Use [OIDC authentication from your GitHub Action to Azure](https://learn.microsoft.com/azure/developer/github/connect-from-azure?tabs=azure-portal%2Cwindows#use-the-azure-login-action-with-openid-connect).
## Sample Workflows
* [Azure DevOps Pipelines](./docs/azdo-build-and-sign.yml)
* [GitHub Actions](./docs/gh-build-and-sign.yml)
Code signing is a complex process that may involve multiple signing formats and artifact types. Some artifacts are containers that contain other signable file types. For example, NuGet Packages (`.nupkg`) frequently contain `.dll` files. The signing tool will sign all files inside-out, starting with the most nested files and then the outer files, ensuring everything is signed in the correct order.
Signing `.exe`/`.dll` files, and other Authenticode file types is only possible on Windows at this time. The recommended solution is to build on one agent and sign on another using jobs or stages where the signing steps run on Windows. Running code signing on a separate stage to ensure secrets aren't exposed to the build stage.
### Build Variables
The following information is needed for the signing build:
* `Tenant Id` Azure AD tenant
* `Client Id` / `Application Id` ServicePrincipal identifier
* `Key Vault Url` Url to Key Vault. Must be a Premium Sku for EV code signing certificates and all certificates issued after June 2023
* `Certificate Id` Id of the certificate in Key Vault.
* `Client Secret` for Azure DevOps Pipelines
* `Subscription Id` for GitHub Actions
## Creating a code signing certificate in Azure Key Vault
Code signing certificates must use the `RSA-HSM` key type to ensure the private keys are stored in a FIPS 140-2 compliant manner. While you can import a certificate from a PFX file, if available, the most secure option is to create a new Certificate Signing Request to provide to your certificate authority, and then merge in the public certificate they issue. Detailed steps are available [here](https://learn.microsoft.com/answers/questions/732422/ev-code-signing-with-azure-keyvault-and-azure-pipe).
## Migrating from the legacy code signing service
If you've been using the legacy code signing service, using `SignClient.exe` to upload files for signing, you can use your existing certificate and Key Vault with this new tool. You will need to create a new ServicePrincipal and assign it permissions as described above.
## FAQ
### What signature algorithms are supported?
At this time, only RSA PKCS #1 v1.5 is supported.
ECDSA is not supported. Not only do some signature providers not support ECDSA, [the Microsoft Trusted Root Program does not support ECDSA code signing.](https://learn.microsoft.com/security/trusted-root/program-requirements#b-signature-requirements)
> **Please Note**: Signatures using elliptical curve cryptography (ECC), such as ECDSA, aren't supported in Windows and newer Windows security features. Users utilizing these algorithms and certificates will face various errors and potential security risks. The Microsoft Trusted Root Program recommends that ECC/ECDSA certificates shouldn't be issued to subscribers due to this known incompatibility and risk.
## Useful Links
* [Issue Triage Policy](triage-policy.md)
================================================
FILE: SECURITY.md
================================================
## Security
Microsoft takes the security of our software products and services seriously, which includes all source code repositories managed through our GitHub organizations, which include [Microsoft](https://github.com/Microsoft), [Azure](https://github.com/Azure), [DotNet](https://github.com/dotnet), [AspNet](https://github.com/aspnet), [Xamarin](https://github.com/xamarin), and [our GitHub organizations](https://opensource.microsoft.com/).
Microsoft serves as the primary maintainer of this repository. If you believe you have found a security vulnerability that meets [Microsoft's definition of a security vulnerability](https://aka.ms/opensource/security/definition), please report it to us as described below.
## Reporting Security Issues
**Please do not report security vulnerabilities through public GitHub issues.**
Instead, please report them to the Microsoft Security Response Center (MSRC) at [https://msrc.microsoft.com/create-report](https://aka.ms/opensource/security/create-report).
If you prefer to submit without logging in, send email to [secure@microsoft.com](mailto:secure@microsoft.com). If possible, encrypt your message with our PGP key; please download it from the [Microsoft Security Response Center PGP Key page](https://aka.ms/opensource/security/pgpkey).
You should receive a response within 24 hours. If for some reason you do not, please follow up via email to ensure we received your original message. Additional information can be found at [microsoft.com/msrc](https://aka.ms/opensource/security/msrc).
Please include the requested information listed below (as much as you can provide) to help us better understand the nature and scope of the possible issue:
* Type of issue (e.g. buffer overflow, SQL injection, cross-site scripting, etc.)
* Full paths of source file(s) related to the manifestation of the issue
* The location of the affected source code (tag/branch/commit or direct URL)
* Any special configuration required to reproduce the issue
* Step-by-step instructions to reproduce the issue
* Proof-of-concept or exploit code (if possible)
* Impact of the issue, including how an attacker might exploit the issue
This information will help us triage your report more quickly.
If you are reporting for a bug bounty, more complete reports can contribute to a higher bounty award. Please visit our [Microsoft Bug Bounty Program](https://aka.ms/opensource/security/bounty) page for more details about our active programs.
## Preferred Languages
We prefer all communications to be in English.
## Policy
Microsoft follows the principle of [Coordinated Vulnerability Disclosure](https://aka.ms/opensource/security/cvd).
================================================
FILE: SdkTools.props
================================================
$(MSBuildProgramFiles32)\Microsoft SDKs\Windows\v10.0A\bin\NETFX 4.8 Tools%WINDIR%\System32\WindowsPowerShell\v1.0\powershell.exe$(RepositoryRootDirectory)\scripts\UpdateWintrust.ps1truetools\$(TargetFramework)\any\tools\SDK\x64falsetruetools\$(TargetFramework)\any\tools\SDK\x64falsetruetools\$(TargetFramework)\any\tools\SDK\x86false
================================================
FILE: THIRD-PARTY-NOTICES.txt
================================================
.NET Core uses third-party libraries or other resources that may be
distributed under licenses different than the .NET Core software.
Attributions and license notices for test cases originally authored by
third parties can be found in the respective test directories.
In the event that we accidentally failed to list a required notice, please
bring it to our attention. Post an issue or email us:
dotnet@microsoft.com
The attached notices are provided for information only.
License notice for .NET Reference Source
-------------------------------
The MIT License (MIT)
Copyright (c) Microsoft Corporation
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
Available at https://github.com/microsoft/referencesource/blob/master/LICENSE.txt
License notice for Azure SDK for .NET
-------------------------------
The MIT License (MIT)
Copyright (c) 2015 Microsoft
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
Available at https://github.com/Azure/azure-sdk-for-net/blob/main/LICENSE.txt
License notice for FiddlerCert
-------------------------------
The MIT License (MIT)
Copyright (c) 2015 Kevin Jones
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
Available at https://github.com/vcsjones/FiddlerCert/blob/main/license.txt
License notice for Wyam
-------------------------------
The MIT License (MIT)
Copyright (c) 2014 Dave Glick
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
Available at https://github.com/Wyamio/Wyam/blob/develop/LICENSE
License notice for OpenOpcSignTool
-------------------------------
MIT License
Copyright (c) 2017 Kevin Jones
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
Available at https://github.com/vcsjones/OpenOpcSignTool/blob/main/LICENSE
License notice for NuGetKeyVaultSignTool
-------------------------------
The MIT License (MIT)
Copyright (c) Claire Novotny
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
Available at https://github.com/novotnyllc/NuGetKeyVaultSignTool/blob/main/LICENSE
================================================
FILE: docs/artifact-signing-integration.md
================================================
# Artifact Signing integration for Sign CLI
This document explains how to use the Sign CLI with a Artifact Signing account to perform code signing using the Artifact Signing provider. See `docs/signing-tool-spec.md` for higher-level background of this tool and the implementation at `src/Sign.SignatureProviders.TrustedSigning` for details.
## Overview
The Sign CLI includes a `artifact-signing` provider that invokes the Artifact Signing service to obtain certificates and perform remote sign operations. The CLI uses the Azure SDK (`Azure.Identity`) for authentication.
Key concepts for this provider:
- Endpoint: the service URL for the Artifact Signing account.
- Account name: the account within the Artifact Signing service.
- Certificate profile: the certificate profile configured in the account that will be used to sign.
For more information, see the Artifact Signing [setup documentation](https://learn.microsoft.com/azure/artifact-signing/quickstart).
## Prerequisites
- An Azure subscription and a Artifact Signing account with at least one active certificate profile.
- An identity (user, service principal, or managed identity) that has the `Artifact Signing Certificate Profile Signer` permission to perform signing.
## How the CLI authenticates
Sign CLI uses Azure.Identity's credential chain by default (DefaultAzureCredential). This means the CLI will try an authentication flow automatically (Azure CLI login, environment variables for a service principal, managed identity, etc.). You may also explicitly choose a credential type with `--azure-credential-type`.
## CLI options for Artifact Signing
The Artifact Signing subcommand is `sign code artifact-signing` and it requires the following options (short forms shown):
- `--artifact-signing-endpoint`, `-ase` : the Artifact Signing service endpoint (URL).
- `--artifact-signing-account`, `-asa` : the account name in the Artifact Signing service.
- `--artifact-signing-certificate-profile`, `-ascp` : the certificate profile name to use for signing.
The Azure authentication options are available on the same command and include `--azure-credential-type` (`-act`) and managed identity options such as `--managed-identity-client-id` (`-mici`). By default, the CLI uses DefaultAzureCredential.
## Examples
Replace placeholders with your values.
Example — sign a file using your current Azure CLI login (DefaultAzureCredential):
```powershell
# Ensure you're signed into Azure CLI
az login
# Sign a file using Artifact Signing
sign code artifact-signing `
-ase https:// `
-asa `
-ascp `
C:\path\to\artifact.dll
```
Example — service principal (PowerShell session variables; prefer secrets or pipeline variables in CI):
```powershell
$env:AZURE_CLIENT_ID = 'your-client-id'
$env:AZURE_TENANT_ID = 'your-tenant-id'
sign code artifact-signing `
-ase https:// `
-asa `
-ascp `
C:\path\to\artifact.dll
```
Example — managed identity (useful for Azure-hosted agents):
```powershell
# Use managed identity by selecting the credential type explicitly and, if needed, the client id
sign code artifact-signing `
-ase https:// `
-asa `
-ascp `
-act managed-identity `
-mici `
C:\path\to\artifact.dll
```
Notes:
- If you omit `-act`, the CLI uses DefaultAzureCredential, which already supports Azure CLI, environment variables for service principals, managed identities, and workload identity flows.
- The endpoint URL and exact account/profile names are provided by your Artifact Signing onboarding or Azure portal.
## CI/CD integration tips
- Prefer federated identity (OIDC) or managed identities for CI agents to avoid long-lived secrets. Sign CLI supports workload and managed identity credential flows.
- Store any required values (endpoint, account, certificate profile) as pipeline secrets or protected variables.
## Troubleshooting
- Authentication errors: verify the authentication method (Azure CLI login, environment variables, or managed identity) and that the identity has permission to the Artifact Signing account.
- Permission errors: ensure your principal has the necessary rights on the Artifact Signing account and certificate profile. If unsure, contact your Azure admin or the team that provisioned the Artifact Signing account.
- Endpoint/profile not found: confirm the exact endpoint URL, account name, and certificate profile name from your Artifact Signing account metadata or onboarding docs.
- See the [Artifact Signing FAQ](https://learn.microsoft.com/azure/artifact-signing/faq) for more information.
## Where to look in this repository
- Implementation of the provider: `src/Sign.SignatureProviders.ArtifactSigning` (see `ArtifactSigningService.cs`, `RSAArtifactSigning.cs` and `ArtifactSigningServiceProvider.cs`).
- CLI wiring: `src/Sign.Cli/ArtifactSigningCommand.cs` (shows required flags and how Azure credentials are constructed).
================================================
FILE: docs/azdo-build-and-sign.yml
================================================
trigger:
- main
- rel/*
pr:
- main
- rel/*
stages:
- stage: Build
jobs:
- job: Build
pool:
vmImage: ubuntu-latest
variables:
BuildConfiguration: Release
steps:
# Build steps
- task: UseDotNet@2
displayName: 'Use .NET SDK 6.x'
inputs:
version: 6.x
- task: DotNetCoreCLI@2
inputs:
command: pack
packagesToPack: src/AClassLibrary/AClassLibrary.csproj
configuration: $(BuildConfiguration)
packDirectory: $(Build.ArtifactStagingDirectory)/Packages
verbosityPack: Minimal
displayName: Build Package
# Publish the artifacts to sign and the file list, if any, as artifacts for the signing stage
- publish: $(Build.ArtifactStagingDirectory)/Packages
displayName: Publish Build Artifacts
artifact: BuildPackages
- publish: config
displayName: Publish signing file list
artifact: config
- stage: CodeSign
dependsOn: Build
condition: and(succeeded('Build'), not(eq(variables['build.reason'], 'PullRequest'))) # Only run this stage on pushes to the main branch
jobs:
- job: CodeSign
displayName: Code Signing
pool:
vmImage: windows-latest # Code signing must run on a Windows agent for Authenticode signing (dll/exe)
variables:
- group: Sign Client Credentials # This is a variable group with secrets in it
steps:
# Retreive unsigned artifacts and file list
- download: current
artifact: config
displayName: Download signing file list
- download: current
artifact: BuildPackages
displayName: Download build artifacts
- task: UseDotNet@2
displayName: 'Use .NET SDK 6.x'
inputs:
version: 6.x
# Install the code signing tool
- task: DotNetCoreCLI@2
inputs:
command: custom
custom: tool
arguments: install --tool-path . sign --version 0.9.0-beta.23127.3
displayName: Install SignTool tool
# Run the signing command
- pwsh: |
.\sign code azure-key-vault `
"**/*.nupkg" `
--base-directory "$(Pipeline.Workspace)\BuildPackages" `
--file-list "$(Pipeline.Workspace)\config\filelist.txt" `
--publisher-name "Contoso" `
--description "One Sign CLI demo" `
--description-url "https://github.com/dotnet/sign" `
--azure-key-vault-tenant-id "$(SignTenantId)" `
--azure-key-vault-client-id "$(SignClientId)" `
--azure-key-vault-client-secret '$(SignClientSecret)' `
--azure-key-vault-certificate "$(SignKeyVaultCertificate)" `
--azure-key-vault-url "$(SignKeyVaultUrl)"
displayName: Sign packages
# Publish the signed packages
- publish: $(Pipeline.Workspace)/BuildPackages
displayName: Publish Signed Packages
artifact: SignedPackages
================================================
FILE: docs/comparisons.md
================================================
# Signing Comparisons
## NuGet
The following tables summarize differences between NuGet, dotnet, and Sign CLI's.
### Features
Feature | NuGet CLI | dotnet CLI | Sign CLI
-- | -- | -- | --
Use signing certificate from the file system | ✔️ | ✔️ | ❌
Use signing certificate from a local store | ✔️ | ✔️ | ❌
Use signing certificate from Azure Key Vault | ❌ | ❌ | ✔️
Identify signing certificate by fingerprint | ✔️ | ✔️ | ❌
Identify signing certificate by subject name | ✔️ | ✔️ | ❌
Identify signing certificate by name (user-defined) | ❌ | ❌ | ✔️
Can skip timestamping | ✔️ | ✔️ | ❌
Opt-in required to overwrite already signed package | ✔️ | ✔️ | ❌
Can sign files (e.g.: *.dll) inside package | ❌ | ❌ | ✔️
Can verify signed package | ✔️ | ✔️ | ❌
### Platform support
Platform | NuGet CLI | dotnet CLI | Sign CLI
-- | -- | -- | --
Windows x86 | ✔️ | ✔️ | ❌
Windows x64 | ✔️ | ✔️ | ✔️
Windows ARM64 | ❌ | ✔️ | ❌
Linux | ❌ | ✔️* | ❌
macOS | ❌ | ✔️* | ❌
\* NuGet signs packages not files within a package (e.g.: DLL's). On every platform where signing is supported, it is possible to sign a package that contains signable files which are unsigned. Because Authenticode signing is only available on Windows, signing a NuGet package on Linux or macOS can more easily result in a signed package with unsigned files inside. See https://github.com/NuGet/Home/issues/12362.
### Requirements
Requirement | NuGet CLI | dotnet CLI | Sign CLI
-- | -- | -- | --
.NET Framework | ✔️ (>= 4.7.2) | ❌ | ❌
.NET SDK | ❌ | ✔️ (>= 5 on Windows, >= 7 on Linux, N/A on macOS) | ❌
.NET Runtime | ❌ | ❌ | ✔️ (>= 6)
## References
* [sign command (NuGet CLI)](https://learn.microsoft.com/en-us/nuget/reference/cli-reference/cli-ref-sign)
* [dotnet nuget sign](https://learn.microsoft.com/en-us/dotnet/core/tools/dotnet-nuget-sign)
================================================
FILE: docs/file-globbing.md
================================================
# File List Filtering and Globbing
The `code` signing command supports the `--file-list` or `-fl` option. This option specifies a file that contains paths of files to sign or to exclude from signing.
When using the file list option you must use a path relative to the working directory (or base directory, if used). You can change the base directory using `--base-directory` or `-b`.
Example:
`sign.exe code certificate-store -cf test.pfx -fl F:\Sign\file_sign_list.txt *`
## File List Format
You can provide a list of string patterns (one pattern per line) which describe files to include or exclude, or literal file paths. Filtering uses globbing, and supports advanced features such as brace expansion and negation.
The following is supported:
* Standard globbing: `*`, `?`, `**` wildcards.
* Brace expansion: `{a,b}` expands to both `a` and `b`.
- Nested braces also work: `a{b,c{d,e}f}g` expands to `abg` `acdfg` `acefg`
* Numeric ranges: `{1..3}` expands to `1`, `2`, `3`.
* Negation: Patterns starting with `!` exclude files matching that pattern.
* Escaping: Use `\{`, `\}`, or `\!` to treat these characters literally.
## Pattern Examples
| Pattern | Description | Matches Example(s) |
|------------------------|------------------------------------------|------------------------------|
|`File.appx` | Include `File.appx` | `File.appx` |
|`!Installer.msix` | Exclude `Installer.msix` | excludes `Installer.msix` |
|`*.txt` | All `.txt` files in the current directory | `file.txt`, `notes.txt` |
|`**/*.cs` | All `.cs` files in all subdirectories | `src/Program.cs` |
|`docs/{README,HELP}.md` | `docs/README.md` and `docs/HELP.md` | `docs/README.md`, `docs/HELP.md` |
|`images/*.{png,jpg}` | All `.png` and `.jpg` files in images | `images/a.png`, `images/b.jpg` |
|`file{1..3}.log` | `file1.log`, `file2.log`, `file3.log` | `file2.log` |
|`!bin/**` | Exclude everything under `bin` directory | excludes `bin/Debug/app.exe` |
|`foo/\{bar\}.txt` | Matches the literal file `foo/{bar}.txt` | `foo/{bar}.txt` |
|`!**/obj/**` | Exclude all files in any `obj` directory | excludes `foo/obj/out.log` |
================================================
FILE: docs/gh-build-and-sign.yml
================================================
name: Build and Sign
on:
push:
branches: [ "main" ]
pull_request:
branches: [ "main" ]
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
# Build steps
- name: Setup .NET
uses: actions/setup-dotnet@v3
with:
dotnet-version: 6.x
- name: Build Package
run: dotnet pack --configuration Release src/AClassLibrary/AClassLibrary.csproj
# Publish the artifacts to sign and the file list, if any, as artifacts for the signing stage
- name: Upload signing file list
uses: actions/upload-artifact@v3
with:
name: config
path: config
- name: Upload build artifacts
uses: actions/upload-artifact@v3
with:
name: BuildArtifacts
path: src/AClassLibrary/bin/Release/**/*.nupkg
sign:
needs: build
runs-on: windows-latest # Code signing must run on a Windows agent for Authenticode signing (dll/exe)
if: ${{ github.ref == 'refs/heads/main' }} # Only run this job on pushes to the main branch
permissions:
id-token: write # Required for requesting the JWT
steps:
# Download signing configuration and artifacts
- name: Download signing config
uses: actions/download-artifact@v3
with:
name: config
path: config
- name: Download build artifacts
uses: actions/download-artifact@v3
with:
name: BuildArtifacts
path: BuildArtifacts
# .NET is required on the agent for the tool to run
- name: Setup .NET
uses: actions/setup-dotnet@v3
with:
dotnet-version: '9.x'
# Install the code signing tool
- name: Install Sign CLI tool
run: dotnet tool install --tool-path . --prerelease sign
# Login to Azure using a ServicePrincipal configured to authenticate agaist a GitHub Action
- name: 'Az CLI login'
uses: azure/login@v1
with:
allow-no-subscriptions: true
client-id: ${{ secrets.AZURE_CLIENT_ID }} # This does not need to be a secret and is just a placeholder
tenant-id: ${{ secrets.AZURE_TENANT_ID }} # This does not need to be a secret and is just a placeholder
# Run the signing command
- name: Sign artifacts
shell: pwsh
run: >
./sign code azure-key-vault
**/*.nupkg
--base-directory "${{ github.workspace }}/BuildArtifacts"
--file-list "${{ github.workspace }}/config/filelist.txt"
--publisher-name "Contoso"
--description "One Sign CLI demo"
--description-url "https://github.com/dotnet/sign"
--azure-credential-type "azure-cli"
--azure-key-vault-url "${{ secrets.KEY_VAULT_URL }}" # This does not need to be a secret and is just a placeholder
--azure-key-vault-certificate "${{ secrets.KEY_VAULT_CERTIFICATE_ID }}" # This does not need to be a secret and is just a placeholder
# Publish the signed packages
- name: Upload build artifacts
uses: actions/upload-artifact@v3
with:
name: SignedArtifacts
path: BuildArtifacts
================================================
FILE: docs/signing-tool-spec.md
================================================
# Signing CLI tool
## Background
Code signing is a way to provide tamper detection to binary files and provide a way of establishing identity. There are different code signing mechanisms, but the most common on Windows and .NET are based on X.509 certificates.
There are several technology areas within the Windows and .NET ecosystem that support code signing:
- PE files & certain scripts via Authenticode (dll, exe, ps1, sys)
- MSIX via Authenticode (msix, msixbundle) & related manifests
- Visual Studio Extensions (VSIX) via Open Packaging Convention
- ClickOnce & VSTO via Mage (XML Digital Signatures)
- NuGet Packages
Today each of these areas has their own tools (SignTool, VISXSignTool, Mage, NuGet) to create signatures. Each tool has its own set of parameters and are written to assume use of the local certificate store API's by default. Without a shared implementation, a new code signing requirement can require individual updates to each tool. In May 2022, the CA/Browser Forum updated its [baseline requirements for publicly trusted code signing certificates](https://cabforum.org/wp-content/uploads/Baseline-Requirements-for-the-Issuance-and-Management-of-Code-Signing.v3.2.pdf) to require that all new code signing certificates issued after June 2023 use hardware security modules (HSM's) to prevent private key theft. While some HSM's contain CSP/KSP support to expose certificates through Windows' certificate store API's, they frequently contain significant limitations, such as requiring an interactive session to authenticate to the device. This makes signing code in the cloud and on build agents extremely difficult for mainline scenarios.
There are many HSM cloud services, including Azure Key Vault, that meet the updated key storage requirements, however we do not have first-party support for signing code with those services. There are open source community solutions to fill this gap, such as:
* [AzureSignTool](https://github.com/vcsjones/AzureSignTool)
* [NuGetKeyVaultSignTool](https://github.com/novotnyllc/NuGetKeyVaultSignTool)
* [OpenOpcSignTool](https://github.com/vcsjones/OpenOpcSignTool)
The [.NET Foundation Signing Service](https://github.com/dotnet/sign/tree/legacy-service/servicing) builds on these solutions, adds additional supported file formats, and orchestrates signing the various file types in the right order.
While existing community solutions help, they leave the complicated work of signing the files in the right order to each user and support only Azure Key Vault. With the [announcement](https://techcommunity.microsoft.com/t5/security-compliance-and-identity/azure-code-signing-democratizing-trust-for-developers-and/ba-p/3604669) of Azure Code Signing and the move towards HSM's, there's a need to support multiple code signing providers in our signing tools.
## Challenges
There are a few challenges around code signing:
### Local Certificates
Today the code signing tooling in the Windows and .NET SDK uses PFX (public/private certificate key pair files or the local certificate store for obtaining certificates). There risks to this approach:
- PFX files are targets in data breaches; their passwords can be cracked
- Certificates in a local store can be used by any app/malware
- There’s no revocation mechanism for a user's access to the certificate; they always have it
- No auditing of signing operations possible
- EV code signing certificates aren’t easily supported as they require FIPS 140-2 hardware devices with drivers
In May 2022, the CA/Browser Forum updated its baseline requirements to require HSM's so local certificates will no longer be issued for publicly trusted code signing certificates. The only support the current signing tools have for this scenario is via CSP/KSP drivers provided by some HSM vendors, and those do not work well for cloud-based build agents. The current tools would need new investment to support different backends.
### Orchestration
An application/library package typically contains multiple assets that need to be signed. For example, a NuGet package (`.nupkg` file) contains `.dll` files that also must be signed. A ClickOnce or MSIX package also contains `.dll` or `.exe` files that need to be signed. A `.vsix` file can contain `.dll` files and `.nupkg` file that must be signed. These files need to be signed "inside out" to ensure the proper sequence. That is, a `.vsix` containing a `.nupkg` needs to extract the inner `.nupkg` to sign the contained `.dll` files, then sign the `.nupkg` and any other `.dll` files, then repack and sign the `.vsix`. Other types, like ClickOnce and MSIX may contain manifest files that also must be updated during these operations.
To properly sign all assets, multiple signing tools must be used, and each tool has its own command line syntax, options, and default. The process of code signing is error-prone and hard to get right. The signing tool addresses these challenges by unifying the interface into a single set of options.
## Proposal
Create a modern signing tool to eventually replace the existing tools. The tool will handle all of our first party signing formats, orchestrate signing files in the right order, and have extensibility to support multiple raw signature providers. As our customers use a variety of clouds and HSM's, the extensibility will enable us to meet our customers' needs wherever they store their certificates.
While some of this could be done via an MSBuild task, a CLI tool is preferable to MSBuild tasks for a couple reasons:
- **Performance:** During a build, many binary artifacts are created that need to be signed. A multi-targeted NuGet package may contain several `.dll` files. An application will likely contain more than one file that needs to be signed. It's much more efficient to pass them all to a signing tool where parallelism is possible than to sign during the inner-loop.
- **Security:** Code signing is a sensitive operation that requires credentials/secrets. Use of these secrets should be as limited as possible to prevent leakage into the rest of a build pipeline, such as log files or unrelated build tasks. Ideally, a CI pipeline should contain a separate stage for code signing to ensure that credentials are never unintentionally exposed to a build stage.
- **Platform:** Authenticode is currently limited to Windows. Thus, while it's possible to sign a NuGet or VSIX cross-platform, the DLL's inside can't be signed unless running on Windows. With the NuGet packages being developer-only artifacts--they're not shipped with the apps--it's critical that the DLL's inside are also signed. Builds for binaries may run on any platform, but as signing is a discreet step in most CI pipelines, it's reasonable to require a Windows build agent for this task.
### Roadmap
The scope of the preview release will be limited to the existing funtionality currently in the service. The remaining functionality in this spec will be delivered in a later 1.0 release. The .NET Foundation has a dependency on this tool being delivered by [June 30, 2023](https://learn.microsoft.com/en-us/answers/questions/768833/when-is-adal-and-azure-ad-graph-reaching-end-of-li.html).
#### Preview
**Goals**
- Support for Authenticode, VSIX, NuGet (author signature), ClickOnce
- Only run on Windows x64.
- Support a single certificate for all files in the operation.
**Non-Goals**
- Strong Name signing won't be in v1; guidance is to use an snk not based on a cert. If easy, perhaps can revisit.
- Containers, including Notary v2 support.
- Extensibility. v1 will support different signing providers.
- Support Authenticode on platforms other than Windows x64. Future work will be required to support ARM64 and non-Windows hosts. Support for certain file types may be limited due to platform support.
- Offline distribution.
#### v1
**Goals**
- Extensibility mechanism to support different code signing providers with a dynamic lookup so the core client remains agnostic of the backend
- Offline distribution for core plus backend provider
- Three providers: Certificate Store, Azure Key Vault, Azure Code Signing
- Support for additional formats: [.HLKX](https://github.com/dotnet/sign/issues/422), [VBA](https://github.com/dotnet/sign/issues/364)
- Verification of signatures
================================================
FILE: eng/PoliCheckExclusions.xml
================================================
.DOTNET
================================================
FILE: eng/Signing.props
================================================
true
================================================
FILE: eng/Version.Details.xml
================================================
https://github.com/dotnet/arcade58713cb9a664ed67642127fcaf70b8c0c3b55ef2
================================================
FILE: eng/Versions.props
================================================
0.9.1betatrue6.0.04.6.36.1.31.1.14.5.56.0.19.0.0-beta.24223.14.3.02.0.36.13.26.13.26.13.26.13.26.13.25.0.06.0.46.0.46.0.49.0.100-baseline.1.23464.16.0.48.0.0-preview.6.23326.26.0.36.0.46.0.216.0.2215.2.302-preview.14.12216.0.5279.0.0-preview.6.24327.717.5.0
================================================
FILE: eng/common/BuildConfiguration/build-configuration.json
================================================
{
"RetryCountLimit": 1,
"RetryByAnyError": false
}
================================================
FILE: eng/common/CIBuild.cmd
================================================
@echo off
powershell -ExecutionPolicy ByPass -NoProfile -command "& """%~dp0Build.ps1""" -restore -build -test -sign -pack -publish -ci %*"
================================================
FILE: eng/common/PSScriptAnalyzerSettings.psd1
================================================
@{
IncludeRules=@('PSAvoidUsingCmdletAliases',
'PSAvoidUsingWMICmdlet',
'PSAvoidUsingPositionalParameters',
'PSAvoidUsingInvokeExpression',
'PSUseDeclaredVarsMoreThanAssignments',
'PSUseCmdletCorrectly',
'PSStandardDSCFunctionsInResource',
'PSUseIdenticalMandatoryParametersForDSC',
'PSUseIdenticalParametersForDSC')
}
================================================
FILE: eng/common/README.md
================================================
# Don't touch this folder
uuuuuuuuuuuuuuuuuuuu
u" uuuuuuuuuuuuuuuuuu "u
u" u$$$$$$$$$$$$$$$$$$$$u "u
u" u$$$$$$$$$$$$$$$$$$$$$$$$u "u
u" u$$$$$$$$$$$$$$$$$$$$$$$$$$$$u "u
u" u$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$u "u
u" u$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$u "u
$ $$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$ $
$ $$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$ $
$ $$$" ... "$... ...$" ... "$$$ ... "$$$ $
$ $$$u `"$$$$$$$ $$$ $$$$$ $$ $$$ $$$ $
$ $$$$$$uu "$$$$ $$$ $$$$$ $$ """ u$$$ $
$ $$$""$$$ $$$$ $$$u "$$$" u$$ $$$$$$$$ $
$ $$$$....,$$$$$..$$$$$....,$$$$..$$$$$$$$ $
$ $$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$ $
"u "$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$" u"
"u "$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$" u"
"u "$$$$$$$$$$$$$$$$$$$$$$$$$$$$" u"
"u "$$$$$$$$$$$$$$$$$$$$$$$$" u"
"u "$$$$$$$$$$$$$$$$$$$$" u"
"u """""""""""""""""" u"
""""""""""""""""""""
!!! Changes made in this directory are subject to being overwritten by automation !!!
The files in this directory are shared by all Arcade repos and managed by automation. If you need to make changes to these files, open an issue or submit a pull request to https://github.com/dotnet/arcade first.
================================================
FILE: eng/common/SetupNugetSources.ps1
================================================
# This script adds internal feeds required to build commits that depend on internal package sources. For instance,
# dotnet6-internal would be added automatically if dotnet6 was found in the nuget.config file. Similarly,
# dotnet-eng-internal and dotnet-tools-internal are added if dotnet-eng and dotnet-tools are present.
# In addition, this script also enables disabled internal Maestro (darc-int*) feeds.
#
# Optionally, this script also adds a credential entry for each of the internal feeds if supplied.
#
# See example call for this script below.
#
# - task: PowerShell@2
# displayName: Setup internal Feeds Credentials
# condition: eq(variables['Agent.OS'], 'Windows_NT')
# inputs:
# filePath: $(System.DefaultWorkingDirectory)/eng/common/SetupNugetSources.ps1
# arguments: -ConfigFile $(System.DefaultWorkingDirectory)/NuGet.config -Password $Env:Token
# env:
# Token: $(dn-bot-dnceng-artifact-feeds-rw)
#
# Note that the NuGetAuthenticate task should be called after SetupNugetSources.
# This ensures that:
# - Appropriate creds are set for the added internal feeds (if not supplied to the scrupt)
# - The credential provider is installed.
#
# This logic is also abstracted into enable-internal-sources.yml.
[CmdletBinding()]
param (
[Parameter(Mandatory = $true)][string]$ConfigFile,
$Password
)
$ErrorActionPreference = "Stop"
Set-StrictMode -Version 2.0
[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12
. $PSScriptRoot\tools.ps1
# Adds or enables the package source with the given name
function AddOrEnablePackageSource($sources, $disabledPackageSources, $SourceName, $SourceEndPoint, $creds, $Username, $pwd) {
if ($disabledPackageSources -eq $null -or -not (EnableInternalPackageSource -DisabledPackageSources $disabledPackageSources -Creds $creds -PackageSourceName $SourceName)) {
AddPackageSource -Sources $sources -SourceName $SourceName -SourceEndPoint $SourceEndPoint -Creds $creds -Username $userName -pwd $Password
}
}
# Add source entry to PackageSources
function AddPackageSource($sources, $SourceName, $SourceEndPoint, $creds, $Username, $pwd) {
$packageSource = $sources.SelectSingleNode("add[@key='$SourceName']")
if ($packageSource -eq $null)
{
Write-Host "Adding package source $SourceName"
$packageSource = $doc.CreateElement("add")
$packageSource.SetAttribute("key", $SourceName)
$packageSource.SetAttribute("value", $SourceEndPoint)
$sources.AppendChild($packageSource) | Out-Null
}
else {
Write-Host "Package source $SourceName already present and enabled."
}
AddCredential -Creds $creds -Source $SourceName -Username $Username -pwd $pwd
}
# Add a credential node for the specified source
function AddCredential($creds, $source, $username, $pwd) {
# If no cred supplied, don't do anything.
if (!$pwd) {
return;
}
Write-Host "Inserting credential for feed: " $source
# Looks for credential configuration for the given SourceName. Create it if none is found.
$sourceElement = $creds.SelectSingleNode($Source)
if ($sourceElement -eq $null)
{
$sourceElement = $doc.CreateElement($Source)
$creds.AppendChild($sourceElement) | Out-Null
}
# Add the node to the credential if none is found.
$usernameElement = $sourceElement.SelectSingleNode("add[@key='Username']")
if ($usernameElement -eq $null)
{
$usernameElement = $doc.CreateElement("add")
$usernameElement.SetAttribute("key", "Username")
$sourceElement.AppendChild($usernameElement) | Out-Null
}
$usernameElement.SetAttribute("value", $Username)
# Add the to the credential if none is found.
# Add it as a clear text because there is no support for encrypted ones in non-windows .Net SDKs.
# -> https://github.com/NuGet/Home/issues/5526
$passwordElement = $sourceElement.SelectSingleNode("add[@key='ClearTextPassword']")
if ($passwordElement -eq $null)
{
$passwordElement = $doc.CreateElement("add")
$passwordElement.SetAttribute("key", "ClearTextPassword")
$sourceElement.AppendChild($passwordElement) | Out-Null
}
$passwordElement.SetAttribute("value", $pwd)
}
# Enable all darc-int package sources.
function EnableMaestroInternalPackageSources($DisabledPackageSources, $Creds) {
$maestroInternalSources = $DisabledPackageSources.SelectNodes("add[contains(@key,'darc-int')]")
ForEach ($DisabledPackageSource in $maestroInternalSources) {
EnableInternalPackageSource -DisabledPackageSources $DisabledPackageSources -Creds $Creds -PackageSourceName $DisabledPackageSource.key
}
}
# Enables an internal package source by name, if found. Returns true if the package source was found and enabled, false otherwise.
function EnableInternalPackageSource($DisabledPackageSources, $Creds, $PackageSourceName) {
$DisabledPackageSource = $DisabledPackageSources.SelectSingleNode("add[@key='$PackageSourceName']")
if ($DisabledPackageSource) {
Write-Host "Enabling internal source '$($DisabledPackageSource.key)'."
# Due to https://github.com/NuGet/Home/issues/10291, we must actually remove the disabled entries
$DisabledPackageSources.RemoveChild($DisabledPackageSource)
AddCredential -Creds $creds -Source $DisabledPackageSource.Key -Username $userName -pwd $Password
return $true
}
return $false
}
if (!(Test-Path $ConfigFile -PathType Leaf)) {
Write-PipelineTelemetryError -Category 'Build' -Message "Eng/common/SetupNugetSources.ps1 returned a non-zero exit code. Couldn't find the NuGet config file: $ConfigFile"
ExitWithExitCode 1
}
# Load NuGet.config
$doc = New-Object System.Xml.XmlDocument
$filename = (Get-Item $ConfigFile).FullName
$doc.Load($filename)
# Get reference to - fail if none exist
$sources = $doc.DocumentElement.SelectSingleNode("packageSources")
if ($sources -eq $null) {
Write-PipelineTelemetryError -Category 'Build' -Message "Eng/common/SetupNugetSources.ps1 returned a non-zero exit code. NuGet config file must contain a packageSources section: $ConfigFile"
ExitWithExitCode 1
}
$creds = $null
$feedSuffix = "v3/index.json"
if ($Password) {
$feedSuffix = "v2"
# Looks for a node. Create it if none is found.
$creds = $doc.DocumentElement.SelectSingleNode("packageSourceCredentials")
if ($creds -eq $null) {
$creds = $doc.CreateElement("packageSourceCredentials")
$doc.DocumentElement.AppendChild($creds) | Out-Null
}
}
$userName = "dn-bot"
# Check for disabledPackageSources; we'll enable any darc-int ones we find there
$disabledSources = $doc.DocumentElement.SelectSingleNode("disabledPackageSources")
if ($disabledSources -ne $null) {
Write-Host "Checking for any darc-int disabled package sources in the disabledPackageSources node"
EnableMaestroInternalPackageSources -DisabledPackageSources $disabledSources -Creds $creds
}
$dotnetVersions = @('5','6','7','8','9','10')
foreach ($dotnetVersion in $dotnetVersions) {
$feedPrefix = "dotnet" + $dotnetVersion;
$dotnetSource = $sources.SelectSingleNode("add[@key='$feedPrefix']")
if ($dotnetSource -ne $null) {
AddOrEnablePackageSource -Sources $sources -DisabledPackageSources $disabledSources -SourceName "$feedPrefix-internal" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/internal/_packaging/$feedPrefix-internal/nuget/$feedSuffix" -Creds $creds -Username $userName -pwd $Password
AddOrEnablePackageSource -Sources $sources -DisabledPackageSources $disabledSources -SourceName "$feedPrefix-internal-transport" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/internal/_packaging/$feedPrefix-internal-transport/nuget/$feedSuffix" -Creds $creds -Username $userName -pwd $Password
}
}
# Check for dotnet-eng and add dotnet-eng-internal if present
$dotnetEngSource = $sources.SelectSingleNode("add[@key='dotnet-eng']")
if ($dotnetEngSource -ne $null) {
AddOrEnablePackageSource -Sources $sources -DisabledPackageSources $disabledSources -SourceName "dotnet-eng-internal" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/internal/_packaging/dotnet-eng-internal/nuget/$feedSuffix" -Creds $creds -Username $userName -pwd $Password
}
# Check for dotnet-tools and add dotnet-tools-internal if present
$dotnetToolsSource = $sources.SelectSingleNode("add[@key='dotnet-tools']")
if ($dotnetToolsSource -ne $null) {
AddOrEnablePackageSource -Sources $sources -DisabledPackageSources $disabledSources -SourceName "dotnet-tools-internal" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/internal/_packaging/dotnet-tools-internal/nuget/$feedSuffix" -Creds $creds -Username $userName -pwd $Password
}
$doc.Save($filename)
================================================
FILE: eng/common/SetupNugetSources.sh
================================================
#!/usr/bin/env bash
# This script adds internal feeds required to build commits that depend on internal package sources. For instance,
# dotnet6-internal would be added automatically if dotnet6 was found in the nuget.config file. Similarly,
# dotnet-eng-internal and dotnet-tools-internal are added if dotnet-eng and dotnet-tools are present.
# In addition, this script also enables disabled internal Maestro (darc-int*) feeds.
#
# Optionally, this script also adds a credential entry for each of the internal feeds if supplied.
#
# See example call for this script below.
#
# - task: Bash@3
# displayName: Setup Internal Feeds
# inputs:
# filePath: $(System.DefaultWorkingDirectory)/eng/common/SetupNugetSources.sh
# arguments: $(System.DefaultWorkingDirectory)/NuGet.config
# condition: ne(variables['Agent.OS'], 'Windows_NT')
# - task: NuGetAuthenticate@1
#
# Note that the NuGetAuthenticate task should be called after SetupNugetSources.
# This ensures that:
# - Appropriate creds are set for the added internal feeds (if not supplied to the scrupt)
# - The credential provider is installed.
#
# This logic is also abstracted into enable-internal-sources.yml.
ConfigFile=$1
CredToken=$2
NL='\n'
TB=' '
source="${BASH_SOURCE[0]}"
# resolve $source until the file is no longer a symlink
while [[ -h "$source" ]]; do
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
source="$(readlink "$source")"
# if $source was a relative symlink, we need to resolve it relative to the path where the
# symlink file was located
[[ $source != /* ]] && source="$scriptroot/$source"
done
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
. "$scriptroot/tools.sh"
if [ ! -f "$ConfigFile" ]; then
Write-PipelineTelemetryError -Category 'Build' "Error: Eng/common/SetupNugetSources.sh returned a non-zero exit code. Couldn't find the NuGet config file: $ConfigFile"
ExitWithExitCode 1
fi
if [[ `uname -s` == "Darwin" ]]; then
NL=$'\\\n'
TB=''
fi
# Enables an internal package source by name, if found. Returns 0 if found and enabled, 1 if not found.
EnableInternalPackageSource() {
local PackageSourceName="$1"
# Check if disabledPackageSources section exists
grep -i "" "$ConfigFile" > /dev/null
if [ "$?" != "0" ]; then
return 1 # No disabled sources section
fi
# Check if this source name is disabled
grep -i " /dev/null
if [ "$?" == "0" ]; then
echo "Enabling internal source '$PackageSourceName'."
# Remove the disabled entry (including any surrounding comments or whitespace on the same line)
sed -i.bak "//d" "$ConfigFile"
# Add the source name to PackageSources for credential handling
PackageSources+=("$PackageSourceName")
return 0 # Found and enabled
fi
return 1 # Not found in disabled sources
}
# Add source entry to PackageSources
AddPackageSource() {
local SourceName="$1"
local SourceEndPoint="$2"
# Check if source already exists
grep -i " /dev/null
if [ "$?" == "0" ]; then
echo "Package source $SourceName already present and enabled."
PackageSources+=("$SourceName")
return
fi
echo "Adding package source $SourceName"
PackageSourcesNodeFooter=""
PackageSourceTemplate="${TB}"
sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourceTemplate${NL}$PackageSourcesNodeFooter|" "$ConfigFile"
PackageSources+=("$SourceName")
}
# Adds or enables the package source with the given name
AddOrEnablePackageSource() {
local SourceName="$1"
local SourceEndPoint="$2"
# Try to enable if disabled, if not found then add new source
EnableInternalPackageSource "$SourceName"
if [ "$?" != "0" ]; then
AddPackageSource "$SourceName" "$SourceEndPoint"
fi
}
# Enable all darc-int package sources
EnableMaestroInternalPackageSources() {
# Check if disabledPackageSources section exists
grep -i "" "$ConfigFile" > /dev/null
if [ "$?" != "0" ]; then
return # No disabled sources section
fi
# Find all darc-int disabled sources
local DisabledDarcIntSources=()
DisabledDarcIntSources+=$(grep -oh '"darc-int-[^"]*" value="true"' "$ConfigFile" | tr -d '"')
for DisabledSourceName in ${DisabledDarcIntSources[@]} ; do
if [[ $DisabledSourceName == darc-int* ]]; then
EnableInternalPackageSource "$DisabledSourceName"
fi
done
}
# Ensure there is a ... section.
grep -i "" $ConfigFile
if [ "$?" != "0" ]; then
Write-PipelineTelemetryError -Category 'Build' "Error: Eng/common/SetupNugetSources.sh returned a non-zero exit code. NuGet config file must contain a packageSources section: $ConfigFile"
ExitWithExitCode 1
fi
PackageSources=()
# Set feed suffix based on whether credentials are provided
FeedSuffix="v3/index.json"
if [ -n "$CredToken" ]; then
FeedSuffix="v2"
# Ensure there is a ... section.
grep -i "" $ConfigFile
if [ "$?" != "0" ]; then
echo "Adding ... section."
PackageSourcesNodeFooter=""
PackageSourceCredentialsTemplate="${TB}${NL}${TB}"
sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourcesNodeFooter${NL}$PackageSourceCredentialsTemplate|" $ConfigFile
fi
fi
# Check for disabledPackageSources; we'll enable any darc-int ones we find there
grep -i "" $ConfigFile > /dev/null
if [ "$?" == "0" ]; then
echo "Checking for any darc-int disabled package sources in the disabledPackageSources node"
EnableMaestroInternalPackageSources
fi
DotNetVersions=('5' '6' '7' '8' '9' '10')
for DotNetVersion in ${DotNetVersions[@]} ; do
FeedPrefix="dotnet${DotNetVersion}";
grep -i " /dev/null
if [ "$?" == "0" ]; then
AddOrEnablePackageSource "$FeedPrefix-internal" "https://pkgs.dev.azure.com/dnceng/internal/_packaging/$FeedPrefix-internal/nuget/$FeedSuffix"
AddOrEnablePackageSource "$FeedPrefix-internal-transport" "https://pkgs.dev.azure.com/dnceng/internal/_packaging/$FeedPrefix-internal-transport/nuget/$FeedSuffix"
fi
done
# Check for dotnet-eng and add dotnet-eng-internal if present
grep -i " /dev/null
if [ "$?" == "0" ]; then
AddOrEnablePackageSource "dotnet-eng-internal" "https://pkgs.dev.azure.com/dnceng/internal/_packaging/dotnet-eng-internal/nuget/$FeedSuffix"
fi
# Check for dotnet-tools and add dotnet-tools-internal if present
grep -i " /dev/null
if [ "$?" == "0" ]; then
AddOrEnablePackageSource "dotnet-tools-internal" "https://pkgs.dev.azure.com/dnceng/internal/_packaging/dotnet-tools-internal/nuget/$FeedSuffix"
fi
# I want things split line by line
PrevIFS=$IFS
IFS=$'\n'
PackageSources+="$IFS"
PackageSources+=$(grep -oh '"darc-int-[^"]*"' $ConfigFile | tr -d '"')
IFS=$PrevIFS
if [ "$CredToken" ]; then
for FeedName in ${PackageSources[@]} ; do
# Check if there is no existing credential for this FeedName
grep -i "<$FeedName>" $ConfigFile
if [ "$?" != "0" ]; then
echo " Inserting credential for feed: $FeedName"
PackageSourceCredentialsNodeFooter=""
NewCredential="${TB}${TB}<$FeedName>${NL}${TB}${NL}${TB}${TB}${NL}${TB}${TB}$FeedName>"
sed -i.bak "s|$PackageSourceCredentialsNodeFooter|$NewCredential${NL}$PackageSourceCredentialsNodeFooter|" $ConfigFile
fi
done
fi
================================================
FILE: eng/common/build.cmd
================================================
@echo off
powershell -ExecutionPolicy ByPass -NoProfile -command "& """%~dp0build.ps1""" %*"
exit /b %ErrorLevel%
================================================
FILE: eng/common/build.ps1
================================================
[CmdletBinding(PositionalBinding=$false)]
Param(
[string][Alias('c')]$configuration = "Debug",
[string]$platform = $null,
[string] $projects,
[string][Alias('v')]$verbosity = "minimal",
[string] $msbuildEngine = $null,
[bool] $warnAsError = $true,
[bool] $nodeReuse = $true,
[switch] $buildCheck = $false,
[switch][Alias('r')]$restore,
[switch] $deployDeps,
[switch][Alias('b')]$build,
[switch] $rebuild,
[switch] $deploy,
[switch][Alias('t')]$test,
[switch] $integrationTest,
[switch] $performanceTest,
[switch] $sign,
[switch] $pack,
[switch] $publish,
[switch] $clean,
[switch][Alias('pb')]$productBuild,
[switch]$fromVMR,
[switch][Alias('bl')]$binaryLog,
[switch][Alias('nobl')]$excludeCIBinarylog,
[switch] $ci,
[switch] $prepareMachine,
[string] $runtimeSourceFeed = '',
[string] $runtimeSourceFeedKey = '',
[switch] $excludePrereleaseVS,
[switch] $nativeToolsOnMachine,
[switch] $help,
[Parameter(ValueFromRemainingArguments=$true)][String[]]$properties
)
# Unset 'Platform' environment variable to avoid unwanted collision in InstallDotNetCore.targets file
# some computer has this env var defined (e.g. Some HP)
if($env:Platform) {
$env:Platform=""
}
function Print-Usage() {
Write-Host "Common settings:"
Write-Host " -configuration Build configuration: 'Debug' or 'Release' (short: -c)"
Write-Host " -platform Platform configuration: 'x86', 'x64' or any valid Platform value to pass to msbuild"
Write-Host " -verbosity Msbuild verbosity: q[uiet], m[inimal], n[ormal], d[etailed], and diag[nostic] (short: -v)"
Write-Host " -binaryLog Output binary log (short: -bl)"
Write-Host " -help Print help and exit"
Write-Host ""
Write-Host "Actions:"
Write-Host " -restore Restore dependencies (short: -r)"
Write-Host " -build Build solution (short: -b)"
Write-Host " -rebuild Rebuild solution"
Write-Host " -deploy Deploy built VSIXes"
Write-Host " -deployDeps Deploy dependencies (e.g. VSIXes for integration tests)"
Write-Host " -test Run all unit tests in the solution (short: -t)"
Write-Host " -integrationTest Run all integration tests in the solution"
Write-Host " -performanceTest Run all performance tests in the solution"
Write-Host " -pack Package build outputs into NuGet packages and Willow components"
Write-Host " -sign Sign build outputs"
Write-Host " -publish Publish artifacts (e.g. symbols)"
Write-Host " -clean Clean the solution"
Write-Host " -productBuild Build the solution in the way it will be built in the full .NET product (VMR) build (short: -pb)"
Write-Host ""
Write-Host "Advanced settings:"
Write-Host " -projects Semi-colon delimited list of sln/proj's to build. Globbing is supported (*.sln)"
Write-Host " -ci Set when running on CI server"
Write-Host " -excludeCIBinarylog Don't output binary log (short: -nobl)"
Write-Host " -prepareMachine Prepare machine for CI run, clean up processes after build"
Write-Host " -warnAsError Sets warnaserror msbuild parameter ('true' or 'false')"
Write-Host " -msbuildEngine Msbuild engine to use to run build ('dotnet', 'vs', or unspecified)."
Write-Host " -excludePrereleaseVS Set to exclude build engines in prerelease versions of Visual Studio"
Write-Host " -nativeToolsOnMachine Sets the native tools on machine environment variable (indicating that the script should use native tools on machine)"
Write-Host " -nodeReuse Sets nodereuse msbuild parameter ('true' or 'false')"
Write-Host " -buildCheck Sets /check msbuild parameter"
Write-Host " -fromVMR Set when building from within the VMR"
Write-Host ""
Write-Host "Command line arguments not listed above are passed thru to msbuild."
Write-Host "The above arguments can be shortened as much as to be unambiguous (e.g. -co for configuration, -t for test, etc.)."
}
. $PSScriptRoot\tools.ps1
function InitializeCustomToolset {
if (-not $restore) {
return
}
$script = Join-Path $EngRoot 'restore-toolset.ps1'
if (Test-Path $script) {
. $script
}
}
function Build {
$toolsetBuildProj = InitializeToolset
InitializeCustomToolset
$bl = if ($binaryLog) { '/bl:' + (Join-Path $LogDir 'Build.binlog') } else { '' }
$platformArg = if ($platform) { "/p:Platform=$platform" } else { '' }
$check = if ($buildCheck) { '/check' } else { '' }
if ($projects) {
# Re-assign properties to a new variable because PowerShell doesn't let us append properties directly for unclear reasons.
# Explicitly set the type as string[] because otherwise PowerShell would make this char[] if $properties is empty.
[string[]] $msbuildArgs = $properties
# Resolve relative project paths into full paths
$projects = ($projects.Split(';').ForEach({Resolve-Path $_}) -join ';')
$msbuildArgs += "/p:Projects=$projects"
$properties = $msbuildArgs
}
MSBuild $toolsetBuildProj `
$bl `
$platformArg `
$check `
/p:Configuration=$configuration `
/p:RepoRoot=$RepoRoot `
/p:Restore=$restore `
/p:DeployDeps=$deployDeps `
/p:Build=$build `
/p:Rebuild=$rebuild `
/p:Deploy=$deploy `
/p:Test=$test `
/p:Pack=$pack `
/p:DotNetBuild=$productBuild `
/p:DotNetBuildFromVMR=$fromVMR `
/p:IntegrationTest=$integrationTest `
/p:PerformanceTest=$performanceTest `
/p:Sign=$sign `
/p:Publish=$publish `
/p:RestoreStaticGraphEnableBinaryLogger=$binaryLog `
@properties
}
try {
if ($clean) {
if (Test-Path $ArtifactsDir) {
Remove-Item -Recurse -Force $ArtifactsDir
Write-Host 'Artifacts directory deleted.'
}
exit 0
}
if ($help -or (($null -ne $properties) -and ($properties.Contains('/help') -or $properties.Contains('/?')))) {
Print-Usage
exit 0
}
if ($ci) {
if (-not $excludeCIBinarylog) {
$binaryLog = $true
}
$nodeReuse = $false
}
if ($nativeToolsOnMachine) {
$env:NativeToolsOnMachine = $true
}
if ($restore) {
InitializeNativeTools
}
Build
}
catch {
Write-Host $_.ScriptStackTrace
Write-PipelineTelemetryError -Category 'InitializeToolset' -Message $_
ExitWithExitCode 1
}
ExitWithExitCode 0
================================================
FILE: eng/common/build.sh
================================================
#!/usr/bin/env bash
# Stop script if unbound variable found (use ${var:-} if intentional)
set -u
# Stop script if command returns non-zero exit code.
# Prevents hidden errors caused by missing error code propagation.
set -e
usage()
{
echo "Common settings:"
echo " --configuration Build configuration: 'Debug' or 'Release' (short: -c)"
echo " --verbosity Msbuild verbosity: q[uiet], m[inimal], n[ormal], d[etailed], and diag[nostic] (short: -v)"
echo " --binaryLog Create MSBuild binary log (short: -bl)"
echo " --help Print help and exit (short: -h)"
echo ""
echo "Actions:"
echo " --restore Restore dependencies (short: -r)"
echo " --build Build solution (short: -b)"
echo " --sourceBuild Source-build the solution (short: -sb)"
echo " Will additionally trigger the following actions: --restore, --build, --pack"
echo " If --configuration is not set explicitly, will also set it to 'Release'"
echo " --productBuild Build the solution in the way it will be built in the full .NET product (VMR) build (short: -pb)"
echo " Will additionally trigger the following actions: --restore, --build, --pack"
echo " If --configuration is not set explicitly, will also set it to 'Release'"
echo " --rebuild Rebuild solution"
echo " --test Run all unit tests in the solution (short: -t)"
echo " --integrationTest Run all integration tests in the solution"
echo " --performanceTest Run all performance tests in the solution"
echo " --pack Package build outputs into NuGet packages and Willow components"
echo " --sign Sign build outputs"
echo " --publish Publish artifacts (e.g. symbols)"
echo " --clean Clean the solution"
echo ""
echo "Advanced settings:"
echo " --projects Project or solution file(s) to build"
echo " --ci Set when running on CI server"
echo " --excludeCIBinarylog Don't output binary log (short: -nobl)"
echo " --prepareMachine Prepare machine for CI run, clean up processes after build"
echo " --nodeReuse Sets nodereuse msbuild parameter ('true' or 'false')"
echo " --warnAsError Sets warnaserror msbuild parameter ('true' or 'false')"
echo " --buildCheck Sets /check msbuild parameter"
echo " --fromVMR Set when building from within the VMR"
echo ""
echo "Command line arguments not listed above are passed thru to msbuild."
echo "Arguments can also be passed in with a single hyphen."
}
source="${BASH_SOURCE[0]}"
# resolve $source until the file is no longer a symlink
while [[ -h "$source" ]]; do
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
source="$(readlink "$source")"
# if $source was a relative symlink, we need to resolve it relative to the path where the
# symlink file was located
[[ $source != /* ]] && source="$scriptroot/$source"
done
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
restore=false
build=false
source_build=false
product_build=false
from_vmr=false
rebuild=false
test=false
integration_test=false
performance_test=false
pack=false
publish=false
sign=false
public=false
ci=false
clean=false
warn_as_error=true
node_reuse=true
build_check=false
binary_log=false
exclude_ci_binary_log=false
pipelines_log=false
projects=''
configuration=''
prepare_machine=false
verbosity='minimal'
runtime_source_feed=''
runtime_source_feed_key=''
properties=()
while [[ $# > 0 ]]; do
opt="$(echo "${1/#--/-}" | tr "[:upper:]" "[:lower:]")"
case "$opt" in
-help|-h)
usage
exit 0
;;
-clean)
clean=true
;;
-configuration|-c)
configuration=$2
shift
;;
-verbosity|-v)
verbosity=$2
shift
;;
-binarylog|-bl)
binary_log=true
;;
-excludecibinarylog|-nobl)
exclude_ci_binary_log=true
;;
-pipelineslog|-pl)
pipelines_log=true
;;
-restore|-r)
restore=true
;;
-build|-b)
build=true
;;
-rebuild)
rebuild=true
;;
-pack)
pack=true
;;
-sourcebuild|-source-build|-sb)
build=true
source_build=true
product_build=true
restore=true
pack=true
;;
-productbuild|-product-build|-pb)
build=true
product_build=true
restore=true
pack=true
;;
-fromvmr|-from-vmr)
from_vmr=true
;;
-test|-t)
test=true
;;
-integrationtest)
integration_test=true
;;
-performancetest)
performance_test=true
;;
-sign)
sign=true
;;
-publish)
publish=true
;;
-preparemachine)
prepare_machine=true
;;
-projects)
projects=$2
shift
;;
-ci)
ci=true
;;
-warnaserror)
warn_as_error=$2
shift
;;
-nodereuse)
node_reuse=$2
shift
;;
-buildcheck)
build_check=true
;;
-runtimesourcefeed)
runtime_source_feed=$2
shift
;;
-runtimesourcefeedkey)
runtime_source_feed_key=$2
shift
;;
*)
properties+=("$1")
;;
esac
shift
done
if [[ -z "$configuration" ]]; then
if [[ "$source_build" = true ]]; then configuration="Release"; else configuration="Debug"; fi
fi
if [[ "$ci" == true ]]; then
pipelines_log=true
node_reuse=false
if [[ "$exclude_ci_binary_log" == false ]]; then
binary_log=true
fi
fi
. "$scriptroot/tools.sh"
function InitializeCustomToolset {
local script="$eng_root/restore-toolset.sh"
if [[ -a "$script" ]]; then
. "$script"
fi
}
function Build {
InitializeToolset
InitializeCustomToolset
if [[ ! -z "$projects" ]]; then
properties+=("/p:Projects=$projects")
fi
local bl=""
if [[ "$binary_log" == true ]]; then
bl="/bl:\"$log_dir/Build.binlog\""
fi
local check=""
if [[ "$build_check" == true ]]; then
check="/check"
fi
MSBuild $_InitializeToolset \
$bl \
$check \
/p:Configuration=$configuration \
/p:RepoRoot="$repo_root" \
/p:Restore=$restore \
/p:Build=$build \
/p:DotNetBuild=$product_build \
/p:DotNetBuildSourceOnly=$source_build \
/p:DotNetBuildFromVMR=$from_vmr \
/p:Rebuild=$rebuild \
/p:Test=$test \
/p:Pack=$pack \
/p:IntegrationTest=$integration_test \
/p:PerformanceTest=$performance_test \
/p:Sign=$sign \
/p:Publish=$publish \
/p:RestoreStaticGraphEnableBinaryLogger=$binary_log \
${properties[@]+"${properties[@]}"}
ExitWithExitCode 0
}
if [[ "$clean" == true ]]; then
if [ -d "$artifacts_dir" ]; then
rm -rf $artifacts_dir
echo "Artifacts directory deleted."
fi
exit 0
fi
if [[ "$restore" == true ]]; then
InitializeNativeTools
fi
Build
================================================
FILE: eng/common/cibuild.sh
================================================
#!/usr/bin/env bash
source="${BASH_SOURCE[0]}"
# resolve $SOURCE until the file is no longer a symlink
while [[ -h $source ]]; do
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
source="$(readlink "$source")"
# if $source was a relative symlink, we need to resolve it relative to the path where
# the symlink file was located
[[ $source != /* ]] && source="$scriptroot/$source"
done
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
. "$scriptroot/build.sh" --restore --build --test --pack --publish --ci $@
================================================
FILE: eng/common/core-templates/job/job.yml
================================================
parameters:
# Job schema parameters - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job
cancelTimeoutInMinutes: ''
condition: ''
container: ''
continueOnError: false
dependsOn: ''
displayName: ''
pool: ''
steps: []
strategy: ''
timeoutInMinutes: ''
variables: []
workspace: ''
templateContext: {}
# Job base template specific parameters
# See schema documentation - https://github.com/dotnet/arcade/blob/master/Documentation/AzureDevOps/TemplateSchema.md
# publishing defaults
artifacts: ''
enableMicrobuild: false
enableMicrobuildForMacAndLinux: false
microbuildUseESRP: true
enablePublishBuildArtifacts: false
enablePublishBuildAssets: false
enablePublishTestResults: false
enablePublishing: false
enableBuildRetry: false
mergeTestResults: false
testRunTitle: ''
testResultsFormat: ''
name: ''
preSteps: []
artifactPublishSteps: []
runAsPublic: false
# 1es specific parameters
is1ESPipeline: ''
jobs:
- job: ${{ parameters.name }}
${{ if ne(parameters.cancelTimeoutInMinutes, '') }}:
cancelTimeoutInMinutes: ${{ parameters.cancelTimeoutInMinutes }}
${{ if ne(parameters.condition, '') }}:
condition: ${{ parameters.condition }}
${{ if ne(parameters.container, '') }}:
container: ${{ parameters.container }}
${{ if ne(parameters.continueOnError, '') }}:
continueOnError: ${{ parameters.continueOnError }}
${{ if ne(parameters.dependsOn, '') }}:
dependsOn: ${{ parameters.dependsOn }}
${{ if ne(parameters.displayName, '') }}:
displayName: ${{ parameters.displayName }}
${{ if ne(parameters.pool, '') }}:
pool: ${{ parameters.pool }}
${{ if ne(parameters.strategy, '') }}:
strategy: ${{ parameters.strategy }}
${{ if ne(parameters.timeoutInMinutes, '') }}:
timeoutInMinutes: ${{ parameters.timeoutInMinutes }}
${{ if ne(parameters.templateContext, '') }}:
templateContext: ${{ parameters.templateContext }}
variables:
- ${{ if ne(parameters.enableTelemetry, 'false') }}:
- name: DOTNET_CLI_TELEMETRY_PROFILE
value: '$(Build.Repository.Uri)'
# Retry signature validation up to three times, waiting 2 seconds between attempts.
# See https://learn.microsoft.com/en-us/nuget/reference/errors-and-warnings/nu3028#retry-untrusted-root-failures
- name: NUGET_EXPERIMENTAL_CHAIN_BUILD_RETRY_POLICY
value: 3,2000
- ${{ each variable in parameters.variables }}:
# handle name-value variable syntax
# example:
# - name: [key]
# value: [value]
- ${{ if ne(variable.name, '') }}:
- name: ${{ variable.name }}
value: ${{ variable.value }}
# handle variable groups
- ${{ if ne(variable.group, '') }}:
- group: ${{ variable.group }}
# handle template variable syntax
# example:
# - template: path/to/template.yml
# parameters:
# [key]: [value]
- ${{ if ne(variable.template, '') }}:
- template: ${{ variable.template }}
${{ if ne(variable.parameters, '') }}:
parameters: ${{ variable.parameters }}
# handle key-value variable syntax.
# example:
# - [key]: [value]
- ${{ if and(eq(variable.name, ''), eq(variable.group, ''), eq(variable.template, '')) }}:
- ${{ each pair in variable }}:
- name: ${{ pair.key }}
value: ${{ pair.value }}
# DotNet-HelixApi-Access provides 'HelixApiAccessToken' for internal builds
- ${{ if and(eq(parameters.enableTelemetry, 'true'), eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- group: DotNet-HelixApi-Access
${{ if ne(parameters.workspace, '') }}:
workspace: ${{ parameters.workspace }}
steps:
- ${{ if eq(parameters.is1ESPipeline, '') }}:
- 'Illegal entry point, is1ESPipeline is not defined. Repository yaml should not directly reference templates in core-templates folder.': error
- ${{ if ne(parameters.preSteps, '') }}:
- ${{ each preStep in parameters.preSteps }}:
- ${{ preStep }}
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- template: /eng/common/core-templates/steps/install-microbuild.yml
parameters:
enableMicrobuild: ${{ parameters.enableMicrobuild }}
enableMicrobuildForMacAndLinux: ${{ parameters.enableMicrobuildForMacAndLinux }}
microbuildUseESRP: ${{ parameters.microbuildUseESRP }}
continueOnError: ${{ parameters.continueOnError }}
- ${{ if and(eq(parameters.runAsPublic, 'false'), eq(variables['System.TeamProject'], 'internal')) }}:
- task: NuGetAuthenticate@1
- ${{ if and(ne(parameters.artifacts.download, 'false'), ne(parameters.artifacts.download, '')) }}:
- task: DownloadPipelineArtifact@2
inputs:
buildType: current
artifactName: ${{ coalesce(parameters.artifacts.download.name, 'Artifacts_$(Agent.OS)_$(_BuildConfig)') }}
targetPath: ${{ coalesce(parameters.artifacts.download.path, 'artifacts') }}
itemPattern: ${{ coalesce(parameters.artifacts.download.pattern, '**') }}
- ${{ each step in parameters.steps }}:
- ${{ step }}
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- template: /eng/common/core-templates/steps/cleanup-microbuild.yml
parameters:
enableMicrobuild: ${{ parameters.enableMicrobuild }}
enableMicrobuildForMacAndLinux: ${{ parameters.enableMicrobuildForMacAndLinux }}
continueOnError: ${{ parameters.continueOnError }}
# Publish test results
- ${{ if or(and(eq(parameters.enablePublishTestResults, 'true'), eq(parameters.testResultsFormat, '')), eq(parameters.testResultsFormat, 'xunit')) }}:
- task: PublishTestResults@2
displayName: Publish XUnit Test Results
inputs:
testResultsFormat: 'xUnit'
testResultsFiles: '*.xml'
searchFolder: '$(System.DefaultWorkingDirectory)/artifacts/TestResults/$(_BuildConfig)'
testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-xunit
mergeTestResults: ${{ parameters.mergeTestResults }}
continueOnError: true
condition: always()
- ${{ if or(and(eq(parameters.enablePublishTestResults, 'true'), eq(parameters.testResultsFormat, '')), eq(parameters.testResultsFormat, 'vstest')) }}:
- task: PublishTestResults@2
displayName: Publish TRX Test Results
inputs:
testResultsFormat: 'VSTest'
testResultsFiles: '*.trx'
searchFolder: '$(System.DefaultWorkingDirectory)/artifacts/TestResults/$(_BuildConfig)'
testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-trx
mergeTestResults: ${{ parameters.mergeTestResults }}
continueOnError: true
condition: always()
# gather artifacts
- ${{ if ne(parameters.artifacts.publish, '') }}:
- ${{ if and(ne(parameters.artifacts.publish.artifacts, 'false'), ne(parameters.artifacts.publish.artifacts, '')) }}:
- task: CopyFiles@2
displayName: Gather binaries for publish to artifacts
inputs:
SourceFolder: 'artifacts/bin'
Contents: '**'
TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/bin'
- task: CopyFiles@2
displayName: Gather packages for publish to artifacts
inputs:
SourceFolder: 'artifacts/packages'
Contents: '**'
TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/packages'
- ${{ if and(ne(parameters.artifacts.publish.logs, 'false'), ne(parameters.artifacts.publish.logs, '')) }}:
- task: CopyFiles@2
displayName: Gather logs for publish to artifacts
inputs:
SourceFolder: 'artifacts/log'
Contents: '**'
TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/log'
continueOnError: true
condition: always()
- ${{ if eq(parameters.enablePublishBuildArtifacts, 'true') }}:
- task: CopyFiles@2
displayName: Gather logs for publish to artifacts
inputs:
SourceFolder: 'artifacts/log/$(_BuildConfig)'
Contents: '**'
TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/log/$(_BuildConfig)'
continueOnError: true
condition: always()
- ${{ if eq(parameters.enableBuildRetry, 'true') }}:
- task: CopyFiles@2
displayName: Gather buildconfiguration for build retry
inputs:
SourceFolder: '$(System.DefaultWorkingDirectory)/eng/common/BuildConfiguration'
Contents: '**'
TargetFolder: '$(Build.ArtifactStagingDirectory)/eng/common/BuildConfiguration'
continueOnError: true
condition: always()
- ${{ each step in parameters.artifactPublishSteps }}:
- ${{ step }}
================================================
FILE: eng/common/core-templates/job/onelocbuild.yml
================================================
parameters:
# Optional: dependencies of the job
dependsOn: ''
# Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool
pool: ''
CeapexPat: $(dn-bot-ceapex-package-r) # PAT for the loc AzDO instance https://dev.azure.com/ceapex
GithubPat: $(BotAccount-dotnet-bot-repo-PAT)
SourcesDirectory: $(System.DefaultWorkingDirectory)
CreatePr: true
AutoCompletePr: false
ReusePr: true
UseLfLineEndings: true
UseCheckedInLocProjectJson: false
SkipLocProjectJsonGeneration: false
LanguageSet: VS_Main_Languages
LclSource: lclFilesInRepo
LclPackageId: ''
RepoType: gitHub
GitHubOrg: dotnet
MirrorRepo: ''
MirrorBranch: main
condition: ''
JobNameSuffix: ''
is1ESPipeline: ''
jobs:
- job: OneLocBuild${{ parameters.JobNameSuffix }}
dependsOn: ${{ parameters.dependsOn }}
displayName: OneLocBuild${{ parameters.JobNameSuffix }}
variables:
- group: OneLocBuildVariables # Contains the CeapexPat and GithubPat
- name: _GenerateLocProjectArguments
value: -SourcesDirectory ${{ parameters.SourcesDirectory }}
-LanguageSet "${{ parameters.LanguageSet }}"
-CreateNeutralXlfs
- ${{ if eq(parameters.UseCheckedInLocProjectJson, 'true') }}:
- name: _GenerateLocProjectArguments
value: ${{ variables._GenerateLocProjectArguments }} -UseCheckedInLocProjectJson
- template: /eng/common/core-templates/variables/pool-providers.yml
parameters:
is1ESPipeline: ${{ parameters.is1ESPipeline }}
${{ if ne(parameters.pool, '') }}:
pool: ${{ parameters.pool }}
${{ if eq(parameters.pool, '') }}:
pool:
# We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
name: AzurePipelines-EO
image: 1ESPT-Windows2025
demands: Cmd
os: windows
# If it's not devdiv, it's dnceng
${{ if ne(variables['System.TeamProject'], 'DevDiv') }}:
name: $(DncEngInternalBuildPool)
image: windows.vs2026.amd64
os: windows
steps:
- ${{ if eq(parameters.is1ESPipeline, '') }}:
- 'Illegal entry point, is1ESPipeline is not defined. Repository yaml should not directly reference templates in core-templates folder.': error
- ${{ if ne(parameters.SkipLocProjectJsonGeneration, 'true') }}:
- task: Powershell@2
inputs:
filePath: $(System.DefaultWorkingDirectory)/eng/common/generate-locproject.ps1
arguments: $(_GenerateLocProjectArguments)
displayName: Generate LocProject.json
condition: ${{ parameters.condition }}
- task: OneLocBuild@2
displayName: OneLocBuild
env:
SYSTEM_ACCESSTOKEN: $(System.AccessToken)
inputs:
locProj: eng/Localize/LocProject.json
outDir: $(Build.ArtifactStagingDirectory)
lclSource: ${{ parameters.LclSource }}
lclPackageId: ${{ parameters.LclPackageId }}
isCreatePrSelected: ${{ parameters.CreatePr }}
isAutoCompletePrSelected: ${{ parameters.AutoCompletePr }}
${{ if eq(parameters.CreatePr, true) }}:
isUseLfLineEndingsSelected: ${{ parameters.UseLfLineEndings }}
isShouldReusePrSelected: ${{ parameters.ReusePr }}
packageSourceAuth: patAuth
patVariable: ${{ parameters.CeapexPat }}
${{ if eq(parameters.RepoType, 'gitHub') }}:
repoType: ${{ parameters.RepoType }}
gitHubPatVariable: "${{ parameters.GithubPat }}"
${{ if ne(parameters.MirrorRepo, '') }}:
isMirrorRepoSelected: true
gitHubOrganization: ${{ parameters.GitHubOrg }}
mirrorRepo: ${{ parameters.MirrorRepo }}
mirrorBranch: ${{ parameters.MirrorBranch }}
condition: ${{ parameters.condition }}
# Copy the locProject.json to the root of the Loc directory, then publish a pipeline artifact
- task: CopyFiles@2
displayName: Copy LocProject.json
inputs:
SourceFolder: '$(System.DefaultWorkingDirectory)/eng/Localize/'
Contents: 'LocProject.json'
TargetFolder: '$(Build.ArtifactStagingDirectory)/loc'
condition: ${{ parameters.condition }}
- template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml
parameters:
is1ESPipeline: ${{ parameters.is1ESPipeline }}
args:
targetPath: '$(Build.ArtifactStagingDirectory)/loc'
artifactName: 'Loc'
displayName: 'Publish Localization Files'
condition: ${{ parameters.condition }}
================================================
FILE: eng/common/core-templates/job/publish-build-assets.yml
================================================
parameters:
configuration: 'Debug'
# Optional: condition for the job to run
condition: ''
# Optional: 'true' if future jobs should run even if this job fails
continueOnError: false
# Optional: dependencies of the job
dependsOn: ''
# Optional: Include PublishBuildArtifacts task
enablePublishBuildArtifacts: false
# Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool
pool: {}
# Optional: should run as a public build even in the internal project
# if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects.
runAsPublic: false
# Optional: whether the build's artifacts will be published using release pipelines or direct feed publishing
publishAssetsImmediately: false
artifactsPublishingAdditionalParameters: ''
signingValidationAdditionalParameters: ''
is1ESPipeline: ''
# Optional: 🌤️ or not the build has assets it wants to publish to BAR
isAssetlessBuild: false
# Optional, publishing version
publishingVersion: 3
# Optional: A minimatch pattern for the asset manifests to publish to BAR
assetManifestsPattern: '*/manifests/**/*.xml'
repositoryAlias: self
officialBuildId: ''
jobs:
- job: Asset_Registry_Publish
dependsOn: ${{ parameters.dependsOn }}
timeoutInMinutes: 150
${{ if eq(parameters.publishAssetsImmediately, 'true') }}:
displayName: Publish Assets
${{ else }}:
displayName: Publish to Build Asset Registry
variables:
- template: /eng/common/core-templates/variables/pool-providers.yml
parameters:
is1ESPipeline: ${{ parameters.is1ESPipeline }}
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- group: Publish-Build-Assets
- group: AzureDevOps-Artifact-Feeds-Pats
- name: runCodesignValidationInjection
value: false
# unconditional - needed for logs publishing (redactor tool version)
- template: /eng/common/core-templates/post-build/common-variables.yml
- name: OfficialBuildId
${{ if ne(parameters.officialBuildId, '') }}:
value: ${{ parameters.officialBuildId }}
${{ else }}:
value: $(Build.BuildNumber)
pool:
# We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
name: AzurePipelines-EO
image: 1ESPT-Windows2025
demands: Cmd
os: windows
# If it's not devdiv, it's dnceng
${{ if ne(variables['System.TeamProject'], 'DevDiv') }}:
name: NetCore1ESPool-Publishing-Internal
image: windows.vs2026.amd64
os: windows
steps:
- ${{ if eq(parameters.is1ESPipeline, '') }}:
- 'Illegal entry point, is1ESPipeline is not defined. Repository yaml should not directly reference templates in core-templates folder.': error
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- checkout: ${{ parameters.repositoryAlias }}
fetchDepth: 3
clean: true
- ${{ if eq(parameters.isAssetlessBuild, 'false') }}:
- ${{ if eq(parameters.publishingVersion, 3) }}:
- task: DownloadPipelineArtifact@2
displayName: Download Asset Manifests
inputs:
artifactName: AssetManifests
targetPath: '$(Build.StagingDirectory)/AssetManifests'
condition: ${{ parameters.condition }}
continueOnError: ${{ parameters.continueOnError }}
- ${{ if eq(parameters.publishingVersion, 4) }}:
- task: DownloadPipelineArtifact@2
displayName: Download V4 asset manifests
inputs:
itemPattern: '*/manifests/**/*.xml'
targetPath: '$(Build.StagingDirectory)/AllAssetManifests'
condition: ${{ parameters.condition }}
continueOnError: ${{ parameters.continueOnError }}
- task: CopyFiles@2
displayName: Copy V4 asset manifests to AssetManifests
inputs:
SourceFolder: '$(Build.StagingDirectory)/AllAssetManifests'
Contents: ${{ parameters.assetManifestsPattern }}
TargetFolder: '$(Build.StagingDirectory)/AssetManifests'
flattenFolders: true
condition: ${{ parameters.condition }}
continueOnError: ${{ parameters.continueOnError }}
- task: NuGetAuthenticate@1
# Populate internal runtime variables.
- template: /eng/common/templates/steps/enable-internal-sources.yml
${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
parameters:
legacyCredential: $(dn-bot-dnceng-artifact-feeds-rw)
- template: /eng/common/templates/steps/enable-internal-runtimes.yml
- task: AzureCLI@2
displayName: Publish Build Assets
inputs:
azureSubscription: "Darc: Maestro Production"
scriptType: ps
scriptLocation: scriptPath
scriptPath: $(System.DefaultWorkingDirectory)/eng/common/sdk-task.ps1
arguments: -task PublishBuildAssets -restore -msbuildEngine dotnet
/p:ManifestsPath='$(Build.StagingDirectory)/AssetManifests'
/p:IsAssetlessBuild=${{ parameters.isAssetlessBuild }}
/p:MaestroApiEndpoint=https://maestro.dot.net
/p:OfficialBuildId=$(OfficialBuildId)
-runtimeSourceFeed https://ci.dot.net/internal
-runtimeSourceFeedKey '$(dotnetbuilds-internal-container-read-token-base64)'
condition: ${{ parameters.condition }}
continueOnError: ${{ parameters.continueOnError }}
- task: powershell@2
displayName: Create ReleaseConfigs Artifact
inputs:
targetType: inline
script: |
New-Item -Path "$(Build.StagingDirectory)/ReleaseConfigs" -ItemType Directory -Force
$filePath = "$(Build.StagingDirectory)/ReleaseConfigs/ReleaseConfigs.txt"
Add-Content -Path $filePath -Value $(BARBuildId)
Add-Content -Path $filePath -Value "$(DefaultChannels)"
Add-Content -Path $filePath -Value $(IsStableBuild)
$symbolExclusionfile = "$(System.DefaultWorkingDirectory)/eng/SymbolPublishingExclusionsFile.txt"
if (Test-Path -Path $symbolExclusionfile)
{
Write-Host "SymbolExclusionFile exists"
Copy-Item -Path $symbolExclusionfile -Destination "$(Build.StagingDirectory)/ReleaseConfigs"
}
- ${{ if eq(parameters.publishingVersion, 4) }}:
- template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml
parameters:
is1ESPipeline: ${{ parameters.is1ESPipeline }}
args:
targetPath: '$(Build.ArtifactStagingDirectory)/MergedManifest.xml'
artifactName: AssetManifests
displayName: 'Publish Merged Manifest'
retryCountOnTaskFailure: 10 # for any files being locked
isProduction: false # just metadata for publishing
- template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml
parameters:
is1ESPipeline: ${{ parameters.is1ESPipeline }}
args:
displayName: Publish ReleaseConfigs Artifact
targetPath: '$(Build.StagingDirectory)/ReleaseConfigs'
artifactName: ReleaseConfigs
retryCountOnTaskFailure: 10 # for any files being locked
isProduction: false # just metadata for publishing
- ${{ if or(eq(parameters.publishAssetsImmediately, 'true'), eq(parameters.isAssetlessBuild, 'true')) }}:
- template: /eng/common/core-templates/post-build/setup-maestro-vars.yml
parameters:
BARBuildId: ${{ parameters.BARBuildId }}
PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
is1ESPipeline: ${{ parameters.is1ESPipeline }}
# Darc is targeting 8.0, so make sure it's installed
- task: UseDotNet@2
inputs:
version: 8.0.x
- task: AzureCLI@2
displayName: Publish Using Darc
inputs:
azureSubscription: "Darc: Maestro Production"
scriptType: ps
scriptLocation: scriptPath
scriptPath: $(System.DefaultWorkingDirectory)/eng/common/post-build/publish-using-darc.ps1
arguments: >
-BuildId $(BARBuildId)
-PublishingInfraVersion 3
-AzdoToken '$(System.AccessToken)'
-WaitPublishingFinish true
-ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}'
-SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}'
-SkipAssetsPublishing '${{ parameters.isAssetlessBuild }}'
-runtimeSourceFeed https://ci.dot.net/internal
-runtimeSourceFeedKey '$(dotnetbuilds-internal-container-read-token-base64)'
- ${{ if eq(parameters.enablePublishBuildArtifacts, 'true') }}:
- template: /eng/common/core-templates/steps/publish-logs.yml
parameters:
is1ESPipeline: ${{ parameters.is1ESPipeline }}
StageLabel: 'BuildAssetRegistry'
JobLabel: 'Publish_Artifacts_Logs'
================================================
FILE: eng/common/core-templates/job/source-build.yml
================================================
parameters:
# This template adds arcade-powered source-build to CI. The template produces a server job with a
# default ID 'Source_Build_Complete' to put in a dependency list if necessary.
# Specifies the prefix for source-build jobs added to pipeline. Use this if disambiguation needed.
jobNamePrefix: 'Source_Build'
# Defines the platform on which to run the job. By default, a linux-x64 machine, suitable for
# managed-only repositories. This is an object with these properties:
#
# name: ''
# The name of the job. This is included in the job ID.
# targetRID: ''
# The name of the target RID to use, instead of the one auto-detected by Arcade.
# portableBuild: false
# Enables non-portable mode. This means a more specific RID (e.g. fedora.32-x64 rather than
# linux-x64), and compiling against distro-provided packages rather than portable ones. The
# default is portable mode.
# skipPublishValidation: false
# Disables publishing validation. By default, a check is performed to ensure no packages are
# published by source-build.
# container: ''
# A container to use. Runs in docker.
# pool: {}
# A pool to use. Runs directly on an agent.
# buildScript: ''
# Specifies the build script to invoke to perform the build in the repo. The default
# './build.sh' should work for typical Arcade repositories, but this is customizable for
# difficult situations.
# buildArguments: ''
# Specifies additional build arguments to pass to the build script.
# jobProperties: {}
# A list of job properties to inject at the top level, for potential extensibility beyond
# container and pool.
platform: {}
is1ESPipeline: ''
# If set to true and running on a non-public project,
# Internal nuget and blob storage locations will be enabled.
# This is not enabled by default because many repositories do not need internal sources
# and do not need to have the required service connections approved in the pipeline.
enableInternalSources: false
jobs:
- job: ${{ parameters.jobNamePrefix }}_${{ parameters.platform.name }}
displayName: Source-Build (${{ parameters.platform.name }})
${{ each property in parameters.platform.jobProperties }}:
${{ property.key }}: ${{ property.value }}
${{ if ne(parameters.platform.container, '') }}:
container: ${{ parameters.platform.container }}
${{ if eq(parameters.platform.pool, '') }}:
# The default VM host AzDO pool. This should be capable of running Docker containers: almost all
# source-build builds run in Docker, including the default managed platform.
# /eng/common/core-templates/variables/pool-providers.yml can't be used here (some customers declare variables already), so duplicate its logic
${{ if eq(parameters.is1ESPipeline, 'true') }}:
pool:
${{ if eq(variables['System.TeamProject'], 'public') }}:
name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore-Svc-Public' ), False, 'NetCore-Public')]
demands: ImageOverride -equals build.azurelinux.3.amd64.open
${{ if eq(variables['System.TeamProject'], 'internal') }}:
name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore1ESPool-Svc-Internal'), False, 'NetCore1ESPool-Internal')]
image: build.azurelinux.3.amd64
os: linux
${{ else }}:
pool:
${{ if eq(variables['System.TeamProject'], 'public') }}:
name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore-Svc-Public' ), False, 'NetCore-Public')]
demands: ImageOverride -equals build.azurelinux.3.amd64.open
${{ if eq(variables['System.TeamProject'], 'internal') }}:
name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore1ESPool-Svc-Internal'), False, 'NetCore1ESPool-Internal')]
demands: ImageOverride -equals build.azurelinux.3.amd64
${{ if ne(parameters.platform.pool, '') }}:
pool: ${{ parameters.platform.pool }}
workspace:
clean: all
steps:
- ${{ if eq(parameters.is1ESPipeline, '') }}:
- 'Illegal entry point, is1ESPipeline is not defined. Repository yaml should not directly reference templates in core-templates folder.': error
- ${{ if eq(parameters.enableInternalSources, true) }}:
- template: /eng/common/core-templates/steps/enable-internal-sources.yml
parameters:
is1ESPipeline: ${{ parameters.is1ESPipeline }}
- template: /eng/common/core-templates/steps/enable-internal-runtimes.yml
parameters:
is1ESPipeline: ${{ parameters.is1ESPipeline }}
- template: /eng/common/core-templates/steps/source-build.yml
parameters:
is1ESPipeline: ${{ parameters.is1ESPipeline }}
platform: ${{ parameters.platform }}
================================================
FILE: eng/common/core-templates/job/source-index-stage1.yml
================================================
parameters:
runAsPublic: false
sourceIndexBuildCommand: powershell -NoLogo -NoProfile -ExecutionPolicy Bypass -Command "eng/common/build.ps1 -restore -build -binarylog -ci"
preSteps: []
binlogPath: artifacts/log/Debug/Build.binlog
condition: eq(variables['Build.SourceBranch'], 'refs/heads/main')
dependsOn: ''
pool: ''
is1ESPipeline: ''
jobs:
- job: SourceIndexStage1
dependsOn: ${{ parameters.dependsOn }}
condition: ${{ parameters.condition }}
variables:
- name: BinlogPath
value: ${{ parameters.binlogPath }}
- template: /eng/common/core-templates/variables/pool-providers.yml
parameters:
is1ESPipeline: ${{ parameters.is1ESPipeline }}
${{ if ne(parameters.pool, '') }}:
pool: ${{ parameters.pool }}
${{ if eq(parameters.pool, '') }}:
pool:
${{ if eq(variables['System.TeamProject'], 'public') }}:
name: $(DncEngPublicBuildPool)
image: windows.vs2026preview.scout.amd64.open
${{ if eq(variables['System.TeamProject'], 'internal') }}:
name: $(DncEngInternalBuildPool)
image: windows.vs2026preview.scout.amd64
steps:
- ${{ if eq(parameters.is1ESPipeline, '') }}:
- 'Illegal entry point, is1ESPipeline is not defined. Repository yaml should not directly reference templates in core-templates folder.': error
- ${{ each preStep in parameters.preSteps }}:
- ${{ preStep }}
- script: ${{ parameters.sourceIndexBuildCommand }}
displayName: Build Repository
- template: /eng/common/core-templates/steps/source-index-stage1-publish.yml
parameters:
binLogPath: ${{ parameters.binLogPath }}
================================================
FILE: eng/common/core-templates/jobs/codeql-build.yml
================================================
parameters:
# See schema documentation in /Documentation/AzureDevOps/TemplateSchema.md
continueOnError: false
# Required: A collection of jobs to run - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job
jobs: []
# Optional: if specified, restore and use this version of Guardian instead of the default.
overrideGuardianVersion: ''
is1ESPipeline: ''
jobs:
- template: /eng/common/core-templates/jobs/jobs.yml
parameters:
is1ESPipeline: ${{ parameters.is1ESPipeline }}
enableMicrobuild: false
enablePublishBuildArtifacts: false
enablePublishTestResults: false
enablePublishBuildAssets: false
enableTelemetry: true
variables:
- group: Publish-Build-Assets
# The Guardian version specified in 'eng/common/sdl/packages.config'. This value must be kept in
# sync with the packages.config file.
- name: DefaultGuardianVersion
value: 0.109.0
- name: GuardianPackagesConfigFile
value: $(System.DefaultWorkingDirectory)\eng\common\sdl\packages.config
- name: GuardianVersion
value: ${{ coalesce(parameters.overrideGuardianVersion, '$(DefaultGuardianVersion)') }}
jobs: ${{ parameters.jobs }}
================================================
FILE: eng/common/core-templates/jobs/jobs.yml
================================================
parameters:
# See schema documentation in /Documentation/AzureDevOps/TemplateSchema.md
continueOnError: false
# Optional: Include PublishBuildArtifacts task
enablePublishBuildArtifacts: false
# Optional: Enable running the source-build jobs to build repo from source
enableSourceBuild: false
# Optional: Parameters for source-build template.
# See /eng/common/core-templates/jobs/source-build.yml for options
sourceBuildParameters: []
graphFileGeneration:
# Optional: Enable generating the graph files at the end of the build
enabled: false
# Optional: Include toolset dependencies in the generated graph files
includeToolset: false
# Required: A collection of jobs to run - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job
jobs: []
# Optional: Override automatically derived dependsOn value for "publish build assets" job
publishBuildAssetsDependsOn: ''
# Optional: Publish the assets as soon as the publish to BAR stage is complete, rather doing so in a separate stage.
publishAssetsImmediately: false
# Optional: 🌤️ or not the build has assets it wants to publish to BAR
isAssetlessBuild: false
# Optional: If using publishAssetsImmediately and additional parameters are needed, can be used to send along additional parameters (normally sent to post-build.yml)
artifactsPublishingAdditionalParameters: ''
signingValidationAdditionalParameters: ''
# Optional: should run as a public build even in the internal project
# if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects.
runAsPublic: false
enableSourceIndex: false
sourceIndexParams: {}
artifacts: {}
is1ESPipeline: ''
# Publishing version w/default.
publishingVersion: 3
repositoryAlias: self
officialBuildId: ''
# Internal resources (telemetry, microbuild) can only be accessed from non-public projects,
# and some (Microbuild) should only be applied to non-PR cases for internal builds.
jobs:
- ${{ each job in parameters.jobs }}:
- ${{ if eq(parameters.is1ESPipeline, 'true') }}:
- template: /eng/common/templates-official/job/job.yml
parameters:
# pass along parameters
${{ each parameter in parameters }}:
${{ if ne(parameter.key, 'jobs') }}:
${{ parameter.key }}: ${{ parameter.value }}
# pass along job properties
${{ each property in job }}:
${{ if ne(property.key, 'job') }}:
${{ property.key }}: ${{ property.value }}
name: ${{ job.job }}
- ${{ else }}:
- template: /eng/common/templates/job/job.yml
parameters:
# pass along parameters
${{ each parameter in parameters }}:
${{ if ne(parameter.key, 'jobs') }}:
${{ parameter.key }}: ${{ parameter.value }}
# pass along job properties
${{ each property in job }}:
${{ if ne(property.key, 'job') }}:
${{ property.key }}: ${{ property.value }}
name: ${{ job.job }}
- ${{ if eq(parameters.enableSourceBuild, true) }}:
- template: /eng/common/core-templates/jobs/source-build.yml
parameters:
is1ESPipeline: ${{ parameters.is1ESPipeline }}
${{ each parameter in parameters.sourceBuildParameters }}:
${{ parameter.key }}: ${{ parameter.value }}
- ${{ if eq(parameters.enableSourceIndex, 'true') }}:
- template: ../job/source-index-stage1.yml
parameters:
is1ESPipeline: ${{ parameters.is1ESPipeline }}
runAsPublic: ${{ parameters.runAsPublic }}
${{ each parameter in parameters.sourceIndexParams }}:
${{ parameter.key }}: ${{ parameter.value }}
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- ${{ if or(eq(parameters.enablePublishBuildAssets, true), eq(parameters.artifacts.publish.manifests, 'true'), ne(parameters.artifacts.publish.manifests, ''), eq(parameters.isAssetlessBuild, true)) }}:
- template: ../job/publish-build-assets.yml
parameters:
is1ESPipeline: ${{ parameters.is1ESPipeline }}
continueOnError: ${{ parameters.continueOnError }}
publishingVersion: ${{ parameters.publishingVersion }}
dependsOn:
- ${{ if ne(parameters.publishBuildAssetsDependsOn, '') }}:
- ${{ each job in parameters.publishBuildAssetsDependsOn }}:
- ${{ job.job }}
- ${{ if eq(parameters.publishBuildAssetsDependsOn, '') }}:
- ${{ each job in parameters.jobs }}:
- ${{ job.job }}
runAsPublic: ${{ parameters.runAsPublic }}
publishAssetsImmediately: ${{ or(parameters.publishAssetsImmediately, parameters.isAssetlessBuild) }}
isAssetlessBuild: ${{ parameters.isAssetlessBuild }}
enablePublishBuildArtifacts: ${{ parameters.enablePublishBuildArtifacts }}
artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
signingValidationAdditionalParameters: ${{ parameters.signingValidationAdditionalParameters }}
repositoryAlias: ${{ parameters.repositoryAlias }}
officialBuildId: ${{ parameters.officialBuildId }}
================================================
FILE: eng/common/core-templates/jobs/source-build.yml
================================================
parameters:
# This template adds arcade-powered source-build to CI. A job is created for each platform, as
# well as an optional server job that completes when all platform jobs complete.
# See /eng/common/core-templates/job/source-build.yml
jobNamePrefix: 'Source_Build'
# This is the default platform provided by Arcade, intended for use by a managed-only repo.
defaultManagedPlatform:
name: 'Managed'
container: 'mcr.microsoft.com/dotnet-buildtools/prereqs:centos-stream-10-amd64'
# Defines the platforms on which to run build jobs. One job is created for each platform, and the
# object in this array is sent to the job template as 'platform'. If no platforms are specified,
# one job runs on 'defaultManagedPlatform'.
platforms: []
is1ESPipeline: ''
# If set to true and running on a non-public project,
# Internal nuget and blob storage locations will be enabled.
# This is not enabled by default because many repositories do not need internal sources
# and do not need to have the required service connections approved in the pipeline.
enableInternalSources: false
jobs:
- ${{ each platform in parameters.platforms }}:
- template: /eng/common/core-templates/job/source-build.yml
parameters:
is1ESPipeline: ${{ parameters.is1ESPipeline }}
jobNamePrefix: ${{ parameters.jobNamePrefix }}
platform: ${{ platform }}
enableInternalSources: ${{ parameters.enableInternalSources }}
- ${{ if eq(length(parameters.platforms), 0) }}:
- template: /eng/common/core-templates/job/source-build.yml
parameters:
is1ESPipeline: ${{ parameters.is1ESPipeline }}
jobNamePrefix: ${{ parameters.jobNamePrefix }}
platform: ${{ parameters.defaultManagedPlatform }}
enableInternalSources: ${{ parameters.enableInternalSources }}
================================================
FILE: eng/common/core-templates/post-build/common-variables.yml
================================================
variables:
- group: Publish-Build-Assets
# Whether the build is internal or not
- name: IsInternalBuild
value: ${{ and(ne(variables['System.TeamProject'], 'public'), contains(variables['Build.SourceBranch'], 'internal')) }}
# Default Maestro++ API Endpoint and API Version
- name: MaestroApiEndPoint
value: "https://maestro.dot.net"
- name: MaestroApiVersion
value: "2020-02-20"
- name: SourceLinkCLIVersion
value: 3.0.0
- name: SymbolToolVersion
value: 1.0.1
- name: BinlogToolVersion
value: 1.0.11
- name: runCodesignValidationInjection
value: false
================================================
FILE: eng/common/core-templates/post-build/post-build.yml
================================================
parameters:
# Which publishing infra should be used. THIS SHOULD MATCH THE VERSION ON THE BUILD MANIFEST.
# Publishing V1 is no longer supported
# Publishing V2 is no longer supported
# Publishing V3 is the default
- name: publishingInfraVersion
displayName: Which version of publishing should be used to promote the build definition?
type: number
default: 3
values:
- 3
- 4
- name: BARBuildId
displayName: BAR Build Id
type: number
default: 0
- name: PromoteToChannelIds
displayName: Channel to promote BARBuildId to
type: string
default: ''
- name: enableSourceLinkValidation
displayName: Enable SourceLink validation
type: boolean
default: false
- name: enableSigningValidation
displayName: Enable signing validation
type: boolean
default: true
- name: enableSymbolValidation
displayName: Enable symbol validation
type: boolean
default: false
- name: enableNugetValidation
displayName: Enable NuGet validation
type: boolean
default: true
- name: publishInstallersAndChecksums
displayName: Publish installers and checksums
type: boolean
default: true
- name: requireDefaultChannels
displayName: Fail the build if there are no default channel(s) registrations for the current build
type: boolean
default: false
- name: SDLValidationParameters
type: object
default:
enable: false
publishGdn: false
continueOnError: false
params: ''
artifactNames: ''
downloadArtifacts: true
- name: isAssetlessBuild
type: boolean
displayName: Is Assetless Build
default: false
# These parameters let the user customize the call to sdk-task.ps1 for publishing
# symbols & general artifacts as well as for signing validation
- name: symbolPublishingAdditionalParameters
displayName: Symbol publishing additional parameters
type: string
default: ''
- name: artifactsPublishingAdditionalParameters
displayName: Artifact publishing additional parameters
type: string
default: ''
- name: signingValidationAdditionalParameters
displayName: Signing validation additional parameters
type: string
default: ''
# Which stages should finish execution before post-build stages start
- name: validateDependsOn
type: object
default:
- build
- name: publishDependsOn
type: object
default:
- Validate
# Optional: Call asset publishing rather than running in a separate stage
- name: publishAssetsImmediately
type: boolean
default: false
- name: is1ESPipeline
type: boolean
default: false
stages:
- ${{ if or(eq( parameters.enableNugetValidation, 'true'), eq(parameters.enableSigningValidation, 'true'), eq(parameters.enableSourceLinkValidation, 'true'), eq(parameters.SDLValidationParameters.enable, 'true')) }}:
- stage: Validate
dependsOn: ${{ parameters.validateDependsOn }}
displayName: Validate Build Assets
variables:
- template: /eng/common/core-templates/post-build/common-variables.yml
- template: /eng/common/core-templates/variables/pool-providers.yml
parameters:
is1ESPipeline: ${{ parameters.is1ESPipeline }}
jobs:
- job:
displayName: NuGet Validation
condition: and(succeededOrFailed(), eq( ${{ parameters.enableNugetValidation }}, 'true'))
pool:
# We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
name: AzurePipelines-EO
image: 1ESPT-Windows2025
demands: Cmd
os: windows
# If it's not devdiv, it's dnceng
${{ else }}:
${{ if eq(parameters.is1ESPipeline, true) }}:
name: $(DncEngInternalBuildPool)
image: windows.vs2026preview.scout.amd64
os: windows
${{ else }}:
name: $(DncEngInternalBuildPool)
demands: ImageOverride -equals windows.vs2026preview.scout.amd64
steps:
- template: /eng/common/core-templates/post-build/setup-maestro-vars.yml
parameters:
BARBuildId: ${{ parameters.BARBuildId }}
PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
is1ESPipeline: ${{ parameters.is1ESPipeline }}
- ${{ if ne(parameters.publishingInfraVersion, 4) }}:
- task: DownloadBuildArtifacts@0
displayName: Download Package Artifacts
inputs:
buildType: specific
buildVersionToDownload: specific
project: $(AzDOProjectName)
pipeline: $(AzDOPipelineId)
buildId: $(AzDOBuildId)
artifactName: PackageArtifacts
checkDownloadedFiles: true
- ${{ if eq(parameters.publishingInfraVersion, 4) }}:
- task: DownloadPipelineArtifact@2
displayName: Download Pipeline Artifacts (V4)
inputs:
itemPattern: '*/packages/**/*.nupkg'
targetPath: '$(Build.ArtifactStagingDirectory)/PipelineArtifactsDownload'
- task: CopyFiles@2
displayName: Flatten packages to PackageArtifacts
inputs:
SourceFolder: '$(Build.ArtifactStagingDirectory)/PipelineArtifactsDownload'
Contents: '**/*.nupkg'
TargetFolder: '$(Build.ArtifactStagingDirectory)/PackageArtifacts'
flattenFolders: true
- task: PowerShell@2
displayName: Validate
inputs:
filePath: $(System.DefaultWorkingDirectory)/eng/common/post-build/nuget-validation.ps1
arguments: -PackagesPath $(Build.ArtifactStagingDirectory)/PackageArtifacts/
- job:
displayName: Signing Validation
condition: and( eq( ${{ parameters.enableSigningValidation }}, 'true'), ne( variables['PostBuildSign'], 'true'))
pool:
# We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
name: AzurePipelines-EO
image: 1ESPT-Windows2025
demands: Cmd
os: windows
# If it's not devdiv, it's dnceng
${{ else }}:
${{ if eq(parameters.is1ESPipeline, true) }}:
name: $(DncEngInternalBuildPool)
image: windows.vs2026.amd64
os: windows
${{ else }}:
name: $(DncEngInternalBuildPool)
demands: ImageOverride -equals windows.vs2026preview.scout.amd64
steps:
- template: /eng/common/core-templates/post-build/setup-maestro-vars.yml
parameters:
BARBuildId: ${{ parameters.BARBuildId }}
PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
is1ESPipeline: ${{ parameters.is1ESPipeline }}
- ${{ if ne(parameters.publishingInfraVersion, 4) }}:
- task: DownloadBuildArtifacts@0
displayName: Download Package Artifacts
inputs:
buildType: specific
buildVersionToDownload: specific
project: $(AzDOProjectName)
pipeline: $(AzDOPipelineId)
buildId: $(AzDOBuildId)
artifactName: PackageArtifacts
checkDownloadedFiles: true
- ${{ if eq(parameters.publishingInfraVersion, 4) }}:
- task: DownloadPipelineArtifact@2
displayName: Download Pipeline Artifacts (V4)
inputs:
itemPattern: '*/packages/**/*.nupkg'
targetPath: '$(Build.ArtifactStagingDirectory)/PipelineArtifactsDownload'
- task: CopyFiles@2
displayName: Flatten packages to PackageArtifacts
inputs:
SourceFolder: '$(Build.ArtifactStagingDirectory)/PipelineArtifactsDownload'
Contents: '**/*.nupkg'
TargetFolder: '$(Build.ArtifactStagingDirectory)/PackageArtifacts'
flattenFolders: true
# This is necessary whenever we want to publish/restore to an AzDO private feed
# Since sdk-task.ps1 tries to restore packages we need to do this authentication here
# otherwise it'll complain about accessing a private feed.
- task: NuGetAuthenticate@1
displayName: 'Authenticate to AzDO Feeds'
# Signing validation will optionally work with the buildmanifest file which is downloaded from
# Azure DevOps above.
- task: PowerShell@2
displayName: Validate
inputs:
filePath: eng\common\sdk-task.ps1
arguments: -task SigningValidation -restore -msbuildEngine vs
/p:PackageBasePath='$(Build.ArtifactStagingDirectory)/PackageArtifacts'
/p:SignCheckExclusionsFile='$(System.DefaultWorkingDirectory)/eng/SignCheckExclusionsFile.txt'
${{ parameters.signingValidationAdditionalParameters }}
- template: /eng/common/core-templates/steps/publish-logs.yml
parameters:
is1ESPipeline: ${{ parameters.is1ESPipeline }}
StageLabel: 'Validation'
JobLabel: 'Signing'
BinlogToolVersion: $(BinlogToolVersion)
- job:
displayName: SourceLink Validation
condition: eq( ${{ parameters.enableSourceLinkValidation }}, 'true')
pool:
# We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
name: AzurePipelines-EO
image: 1ESPT-Windows2025
demands: Cmd
os: windows
# If it's not devdiv, it's dnceng
${{ else }}:
${{ if eq(parameters.is1ESPipeline, true) }}:
name: $(DncEngInternalBuildPool)
image: windows.vs2026.amd64
os: windows
${{ else }}:
name: $(DncEngInternalBuildPool)
demands: ImageOverride -equals windows.vs2026preview.scout.amd64
steps:
- template: /eng/common/core-templates/post-build/setup-maestro-vars.yml
parameters:
BARBuildId: ${{ parameters.BARBuildId }}
PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
is1ESPipeline: ${{ parameters.is1ESPipeline }}
- ${{ if ne(parameters.publishingInfraVersion, 4) }}:
- task: DownloadBuildArtifacts@0
displayName: Download Blob Artifacts
inputs:
buildType: specific
buildVersionToDownload: specific
project: $(AzDOProjectName)
pipeline: $(AzDOPipelineId)
buildId: $(AzDOBuildId)
artifactName: BlobArtifacts
checkDownloadedFiles: true
- ${{ if eq(parameters.publishingInfraVersion, 4) }}:
- task: DownloadPipelineArtifact@2
displayName: Download Pipeline Artifacts (V4)
inputs:
itemPattern: '*/assets/**'
targetPath: '$(Build.ArtifactStagingDirectory)/PipelineArtifactsDownload'
- task: CopyFiles@2
displayName: Flatten assets to BlobArtifacts
inputs:
SourceFolder: '$(Build.ArtifactStagingDirectory)/PipelineArtifactsDownload'
Contents: '**/*'
TargetFolder: '$(Build.ArtifactStagingDirectory)/BlobArtifacts'
flattenFolders: true
- task: PowerShell@2
displayName: Validate
inputs:
filePath: $(System.DefaultWorkingDirectory)/eng/common/post-build/sourcelink-validation.ps1
arguments: -InputPath $(Build.ArtifactStagingDirectory)/BlobArtifacts/
-ExtractPath $(Agent.BuildDirectory)/Extract/
-GHRepoName $(Build.Repository.Name)
-GHCommit $(Build.SourceVersion)
-SourcelinkCliVersion $(SourceLinkCLIVersion)
continueOnError: true
- ${{ if ne(parameters.publishAssetsImmediately, 'true') }}:
- stage: publish_using_darc
${{ if or(eq(parameters.enableNugetValidation, 'true'), eq(parameters.enableSigningValidation, 'true'), eq(parameters.enableSourceLinkValidation, 'true'), eq(parameters.SDLValidationParameters.enable, 'true')) }}:
dependsOn: ${{ parameters.publishDependsOn }}
${{ else }}:
dependsOn: ${{ parameters.validateDependsOn }}
displayName: Publish using Darc
variables:
- template: /eng/common/core-templates/post-build/common-variables.yml
- template: /eng/common/core-templates/variables/pool-providers.yml
parameters:
is1ESPipeline: ${{ parameters.is1ESPipeline }}
jobs:
- job:
displayName: Publish Using Darc
timeoutInMinutes: 120
pool:
# We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
name: AzurePipelines-EO
image: 1ESPT-Windows2025
demands: Cmd
os: windows
# If it's not devdiv, it's dnceng
${{ else }}:
${{ if eq(parameters.is1ESPipeline, true) }}:
name: NetCore1ESPool-Publishing-Internal
image: windows.vs2026.amd64
os: windows
${{ else }}:
name: NetCore1ESPool-Publishing-Internal
demands: ImageOverride -equals windows.vs2026.amd64
steps:
- template: /eng/common/core-templates/post-build/setup-maestro-vars.yml
parameters:
BARBuildId: ${{ parameters.BARBuildId }}
PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
is1ESPipeline: ${{ parameters.is1ESPipeline }}
- task: NuGetAuthenticate@1
# Populate internal runtime variables.
- template: /eng/common/templates/steps/enable-internal-sources.yml
parameters:
legacyCredential: $(dn-bot-dnceng-artifact-feeds-rw)
- template: /eng/common/templates/steps/enable-internal-runtimes.yml
# Darc is targeting 8.0, so make sure it's installed
- task: UseDotNet@2
inputs:
version: 8.0.x
- task: AzureCLI@2
displayName: Publish Using Darc
inputs:
azureSubscription: "Darc: Maestro Production"
scriptType: ps
scriptLocation: scriptPath
scriptPath: $(System.DefaultWorkingDirectory)/eng/common/post-build/publish-using-darc.ps1
arguments: >
-BuildId $(BARBuildId)
-PublishingInfraVersion 3
-AzdoToken '$(System.AccessToken)'
-WaitPublishingFinish true
-RequireDefaultChannels ${{ parameters.requireDefaultChannels }}
-ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}'
-SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}'
-SkipAssetsPublishing '${{ parameters.isAssetlessBuild }}'
-runtimeSourceFeed https://ci.dot.net/internal
-runtimeSourceFeedKey '$(dotnetbuilds-internal-container-read-token-base64)'
================================================
FILE: eng/common/core-templates/post-build/setup-maestro-vars.yml
================================================
parameters:
BARBuildId: ''
PromoteToChannelIds: ''
is1ESPipeline: ''
steps:
- ${{ if eq(parameters.is1ESPipeline, '') }}:
- 'Illegal entry point, is1ESPipeline is not defined. Repository yaml should not directly reference templates in core-templates folder.': error
- ${{ if eq(coalesce(parameters.PromoteToChannelIds, 0), 0) }}:
- task: DownloadPipelineArtifact@2
displayName: Download Release Configs
inputs:
artifactName: ReleaseConfigs
targetPath: '$(Build.StagingDirectory)/ReleaseConfigs'
- task: AzureCLI@2
name: setReleaseVars
displayName: Set Release Configs Vars
inputs:
azureSubscription: "Darc: Maestro Production"
scriptType: pscore
scriptLocation: inlineScript
inlineScript: |
try {
if (!$Env:PromoteToMaestroChannels -or $Env:PromoteToMaestroChannels.Trim() -eq '') {
$Content = Get-Content $(Build.StagingDirectory)/ReleaseConfigs/ReleaseConfigs.txt
$BarId = $Content | Select -Index 0
$Channels = $Content | Select -Index 1
$IsStableBuild = $Content | Select -Index 2
$AzureDevOpsProject = $Env:System_TeamProject
$AzureDevOpsBuildDefinitionId = $Env:System_DefinitionId
$AzureDevOpsBuildId = $Env:Build_BuildId
}
else {
. $(System.DefaultWorkingDirectory)\eng\common\tools.ps1
$darc = Get-Darc
$buildInfo = & $darc get-build `
--id ${{ parameters.BARBuildId }} `
--extended `
--output-format json `
--ci `
| convertFrom-Json
$BarId = ${{ parameters.BARBuildId }}
$Channels = $Env:PromoteToMaestroChannels -split ","
$Channels = $Channels -join "]["
$Channels = "[$Channels]"
$IsStableBuild = $buildInfo.stable
$AzureDevOpsProject = $buildInfo.azureDevOpsProject
$AzureDevOpsBuildDefinitionId = $buildInfo.azureDevOpsBuildDefinitionId
$AzureDevOpsBuildId = $buildInfo.azureDevOpsBuildId
}
Write-Host "##vso[task.setvariable variable=BARBuildId]$BarId"
Write-Host "##vso[task.setvariable variable=TargetChannels]$Channels"
Write-Host "##vso[task.setvariable variable=IsStableBuild]$IsStableBuild"
Write-Host "##vso[task.setvariable variable=AzDOProjectName]$AzureDevOpsProject"
Write-Host "##vso[task.setvariable variable=AzDOPipelineId]$AzureDevOpsBuildDefinitionId"
Write-Host "##vso[task.setvariable variable=AzDOBuildId]$AzureDevOpsBuildId"
}
catch {
Write-Host $_
Write-Host $_.Exception
Write-Host $_.ScriptStackTrace
exit 1
}
env:
PromoteToMaestroChannels: ${{ parameters.PromoteToChannelIds }}
================================================
FILE: eng/common/core-templates/steps/cleanup-microbuild.yml
================================================
parameters:
# Enable cleanup tasks for MicroBuild
enableMicrobuild: false
# Enable cleanup tasks for MicroBuild on Mac and Linux
# Will be ignored if 'enableMicrobuild' is false or 'Agent.Os' is 'Windows_NT'
enableMicrobuildForMacAndLinux: false
continueOnError: false
steps:
- ${{ if eq(parameters.enableMicrobuild, 'true') }}:
- task: MicroBuildCleanup@1
displayName: Execute Microbuild cleanup tasks
condition: and(
always(),
or(
and(
eq(variables['Agent.Os'], 'Windows_NT'),
in(variables['_SignType'], 'real', 'test')
),
and(
${{ eq(parameters.enableMicrobuildForMacAndLinux, true) }},
ne(variables['Agent.Os'], 'Windows_NT'),
eq(variables['_SignType'], 'real')
)
))
continueOnError: ${{ parameters.continueOnError }}
env:
TeamName: $(_TeamName)
================================================
FILE: eng/common/core-templates/steps/enable-internal-runtimes.yml
================================================
# Obtains internal runtime download credentials and populates the 'dotnetbuilds-internal-container-read-token-base64'
# variable with the base64-encoded SAS token, by default
parameters:
- name: federatedServiceConnection
type: string
default: 'dotnetbuilds-internal-read'
- name: outputVariableName
type: string
default: 'dotnetbuilds-internal-container-read-token-base64'
- name: expiryInHours
type: number
default: 1
- name: base64Encode
type: boolean
default: true
- name: is1ESPipeline
type: boolean
default: false
steps:
- ${{ if ne(variables['System.TeamProject'], 'public') }}:
- template: /eng/common/core-templates/steps/get-delegation-sas.yml
parameters:
federatedServiceConnection: ${{ parameters.federatedServiceConnection }}
outputVariableName: ${{ parameters.outputVariableName }}
expiryInHours: ${{ parameters.expiryInHours }}
base64Encode: ${{ parameters.base64Encode }}
storageAccount: dotnetbuilds
container: internal
permissions: rl
is1ESPipeline: ${{ parameters.is1ESPipeline }}
================================================
FILE: eng/common/core-templates/steps/enable-internal-sources.yml
================================================
parameters:
# This is the Azure federated service connection that we log into to get an access token.
- name: nugetFederatedServiceConnection
type: string
default: 'dnceng-artifacts-feeds-read'
- name: is1ESPipeline
type: boolean
default: false
# Legacy parameters to allow for PAT usage
- name: legacyCredential
type: string
default: ''
steps:
- ${{ if ne(variables['System.TeamProject'], 'public') }}:
- ${{ if ne(parameters.legacyCredential, '') }}:
- task: PowerShell@2
displayName: Setup Internal Feeds
inputs:
filePath: $(System.DefaultWorkingDirectory)/eng/common/SetupNugetSources.ps1
arguments: -ConfigFile $(System.DefaultWorkingDirectory)/NuGet.config -Password $Env:Token
env:
Token: ${{ parameters.legacyCredential }}
# If running on dnceng (internal project), just use the default behavior for NuGetAuthenticate.
# If running on DevDiv, NuGetAuthenticate is not really an option. It's scoped to a single feed, and we have many feeds that
# may be added. Instead, we'll use the traditional approach (add cred to nuget.config), but use an account token.
- ${{ else }}:
- ${{ if eq(variables['System.TeamProject'], 'internal') }}:
- task: PowerShell@2
displayName: Setup Internal Feeds
inputs:
filePath: $(System.DefaultWorkingDirectory)/eng/common/SetupNugetSources.ps1
arguments: -ConfigFile $(System.DefaultWorkingDirectory)/NuGet.config
- ${{ else }}:
- template: /eng/common/templates/steps/get-federated-access-token.yml
parameters:
federatedServiceConnection: ${{ parameters.nugetFederatedServiceConnection }}
outputVariableName: 'dnceng-artifacts-feeds-read-access-token'
- task: PowerShell@2
displayName: Setup Internal Feeds
inputs:
filePath: $(System.DefaultWorkingDirectory)/eng/common/SetupNugetSources.ps1
arguments: -ConfigFile $(System.DefaultWorkingDirectory)/NuGet.config -Password $(dnceng-artifacts-feeds-read-access-token)
# This is required in certain scenarios to install the ADO credential provider.
# It installed by default in some msbuild invocations (e.g. VS msbuild), but needs to be installed for others
# (e.g. dotnet msbuild).
- task: NuGetAuthenticate@1
================================================
FILE: eng/common/core-templates/steps/generate-sbom.yml
================================================
parameters:
PackageVersion: unused
BuildDropPath: unused
PackageName: unused
ManifestDirPath: unused
IgnoreDirectories: unused
sbomContinueOnError: unused
is1ESPipeline: unused
publishArtifacts: unused
steps:
- script: |
echo "##vso[task.logissue type=warning]Including generate-sbom.yml is deprecated, SBOM generation is handled 1ES PT now. Remove this include."
displayName: Issue generate-sbom.yml deprecation warning
================================================
FILE: eng/common/core-templates/steps/get-delegation-sas.yml
================================================
parameters:
- name: federatedServiceConnection
type: string
- name: outputVariableName
type: string
- name: expiryInHours
type: number
default: 1
- name: base64Encode
type: boolean
default: false
- name: storageAccount
type: string
- name: container
type: string
- name: permissions
type: string
default: 'rl'
- name: is1ESPipeline
type: boolean
default: false
steps:
- task: AzureCLI@2
displayName: 'Generate delegation SAS Token for ${{ parameters.storageAccount }}/${{ parameters.container }}'
inputs:
azureSubscription: ${{ parameters.federatedServiceConnection }}
scriptType: 'pscore'
scriptLocation: 'inlineScript'
inlineScript: |
# Calculate the expiration of the SAS token and convert to UTC
$expiry = (Get-Date).AddHours(${{ parameters.expiryInHours }}).ToUniversalTime().ToString("yyyy-MM-ddTHH:mm:ssZ")
$sas = az storage container generate-sas --account-name ${{ parameters.storageAccount }} --name ${{ parameters.container }} --permissions ${{ parameters.permissions }} --expiry $expiry --auth-mode login --as-user -o tsv
if ($LASTEXITCODE -ne 0) {
Write-Error "Failed to generate SAS token."
exit 1
}
if ('${{ parameters.base64Encode }}' -eq 'true') {
$sas = [Convert]::ToBase64String([System.Text.Encoding]::UTF8.GetBytes($sas))
}
Write-Host "Setting '${{ parameters.outputVariableName }}' with the access token value"
Write-Host "##vso[task.setvariable variable=${{ parameters.outputVariableName }};issecret=true]$sas"
================================================
FILE: eng/common/core-templates/steps/get-federated-access-token.yml
================================================
parameters:
- name: federatedServiceConnection
type: string
- name: outputVariableName
type: string
- name: is1ESPipeline
type: boolean
- name: stepName
type: string
default: 'getFederatedAccessToken'
- name: condition
type: string
default: ''
# Resource to get a token for. Common values include:
# - '499b84ac-1321-427f-aa17-267ca6975798' for Azure DevOps
# - 'https://storage.azure.com/' for storage
# Defaults to Azure DevOps
- name: resource
type: string
default: '499b84ac-1321-427f-aa17-267ca6975798'
- name: isStepOutputVariable
type: boolean
default: false
steps:
- task: AzureCLI@2
displayName: 'Getting federated access token for feeds'
name: ${{ parameters.stepName }}
${{ if ne(parameters.condition, '') }}:
condition: ${{ parameters.condition }}
inputs:
azureSubscription: ${{ parameters.federatedServiceConnection }}
scriptType: 'pscore'
scriptLocation: 'inlineScript'
inlineScript: |
$accessToken = az account get-access-token --query accessToken --resource ${{ parameters.resource }} --output tsv
if ($LASTEXITCODE -ne 0) {
Write-Error "Failed to get access token for resource '${{ parameters.resource }}'"
exit 1
}
Write-Host "Setting '${{ parameters.outputVariableName }}' with the access token value"
Write-Host "##vso[task.setvariable variable=${{ parameters.outputVariableName }};issecret=true;isOutput=${{ parameters.isStepOutputVariable }}]$accessToken"
================================================
FILE: eng/common/core-templates/steps/install-microbuild.yml
================================================
parameters:
# Enable install tasks for MicroBuild
enableMicrobuild: false
# Enable install tasks for MicroBuild on Mac and Linux
# Will be ignored if 'enableMicrobuild' is false or 'Agent.Os' is 'Windows_NT'
enableMicrobuildForMacAndLinux: false
# Determines whether the ESRP service connection information should be passed to the signing plugin.
# This overlaps with _SignType to some degree. We only need the service connection for real signing.
# It's important that the service connection not be passed to the MicroBuildSigningPlugin task in this place.
# Doing so will cause the service connection to be authorized for the pipeline, which isn't allowed and won't work for non-prod.
# Unfortunately, _SignType can't be used to exclude the use of the service connection in non-real sign scenarios. The
# variable is not available in template expression. _SignType has a very large proliferation across .NET, so replacing it is tough.
microbuildUseESRP: true
# Microbuild installation directory
microBuildOutputFolder: $(Agent.TempDirectory)/MicroBuild
continueOnError: false
steps:
- ${{ if eq(parameters.enableMicrobuild, 'true') }}:
- ${{ if eq(parameters.enableMicrobuildForMacAndLinux, 'true') }}:
# Needed to download the MicroBuild plugin nupkgs on Mac and Linux when nuget.exe is unavailable
- task: UseDotNet@2
displayName: Install .NET 8.0 SDK for MicroBuild Plugin
inputs:
packageType: sdk
version: 8.0.x
installationPath: ${{ parameters.microBuildOutputFolder }}/.dotnet-microbuild
condition: and(succeeded(), ne(variables['Agent.Os'], 'Windows_NT'))
- script: |
set -euo pipefail
# UseDotNet@2 prepends the dotnet executable path to the PATH variable, so we can call dotnet directly
version=$(dotnet --version)
cat << 'EOF' > ${{ parameters.microBuildOutputFolder }}/global.json
{
"sdk": {
"version": "$version",
"paths": [
"${{ parameters.microBuildOutputFolder }}/.dotnet-microbuild"
],
"errorMessage": "The .NET SDK version $version is required to install the MicroBuild signing plugin."
}
}
EOF
displayName: 'Add global.json to MicroBuild Installation path'
workingDirectory: ${{ parameters.microBuildOutputFolder }}
condition: and(succeeded(), ne(variables['Agent.Os'], 'Windows_NT'))
- script: |
REM Check if ESRP is disabled while SignType is real
if /I "${{ parameters.microbuildUseESRP }}"=="false" if /I "$(_SignType)"=="real" (
echo Error: ESRP must be enabled when SignType is real.
exit /b 1
)
displayName: 'Validate ESRP usage (Windows)'
condition: and(succeeded(), eq(variables['Agent.Os'], 'Windows_NT'))
- script: |
# Check if ESRP is disabled while SignType is real
if [ "${{ parameters.microbuildUseESRP }}" = "false" ] && [ "$(_SignType)" = "real" ]; then
echo "Error: ESRP must be enabled when SignType is real."
exit 1
fi
displayName: 'Validate ESRP usage (Non-Windows)'
condition: and(succeeded(), ne(variables['Agent.Os'], 'Windows_NT'))
# Two different MB install steps. This is due to not being able to use the agent OS during
# YAML expansion, and Windows vs. Linux/Mac uses different service connections. However,
# we can avoid including the MB install step if not enabled at all. This avoids a bunch of
# extra pipeline authorizations, since most pipelines do not sign on non-Windows.
- task: MicroBuildSigningPlugin@4
displayName: Install MicroBuild plugin (Windows)
inputs:
signType: $(_SignType)
zipSources: false
feedSource: https://dnceng.pkgs.visualstudio.com/_packaging/MicroBuildToolset/nuget/v3/index.json
${{ if eq(parameters.microbuildUseESRP, true) }}:
ConnectedServiceName: 'MicroBuild Signing Task (DevDiv)'
${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
ConnectedPMEServiceName: 6cc74545-d7b9-4050-9dfa-ebefcc8961ea
${{ else }}:
ConnectedPMEServiceName: 248d384a-b39b-46e3-8ad5-c2c210d5e7ca
env:
TeamName: $(_TeamName)
MicroBuildOutputFolderOverride: ${{ parameters.microBuildOutputFolder }}
SYSTEM_ACCESSTOKEN: $(System.AccessToken)
continueOnError: ${{ parameters.continueOnError }}
condition: and(succeeded(), eq(variables['Agent.Os'], 'Windows_NT'), in(variables['_SignType'], 'real', 'test'))
- ${{ if eq(parameters.enableMicrobuildForMacAndLinux, true) }}:
- task: MicroBuildSigningPlugin@4
displayName: Install MicroBuild plugin (non-Windows)
inputs:
signType: $(_SignType)
zipSources: false
feedSource: https://dnceng.pkgs.visualstudio.com/_packaging/MicroBuildToolset/nuget/v3/index.json
workingDirectory: ${{ parameters.microBuildOutputFolder }}
${{ if eq(parameters.microbuildUseESRP, true) }}:
ConnectedServiceName: 'MicroBuild Signing Task (DevDiv)'
${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
ConnectedPMEServiceName: beb8cb23-b303-4c95-ab26-9e44bc958d39
${{ else }}:
ConnectedPMEServiceName: c24de2a5-cc7a-493d-95e4-8e5ff5cad2bc
env:
TeamName: $(_TeamName)
MicroBuildOutputFolderOverride: ${{ parameters.microBuildOutputFolder }}
SYSTEM_ACCESSTOKEN: $(System.AccessToken)
continueOnError: ${{ parameters.continueOnError }}
condition: and(succeeded(), ne(variables['Agent.Os'], 'Windows_NT'), eq(variables['_SignType'], 'real'))
================================================
FILE: eng/common/core-templates/steps/publish-build-artifacts.yml
================================================
parameters:
- name: is1ESPipeline
type: boolean
default: false
- name: args
type: object
default: {}
steps:
- ${{ if ne(parameters.is1ESPipeline, true) }}:
- template: /eng/common/templates/steps/publish-build-artifacts.yml
parameters:
is1ESPipeline: ${{ parameters.is1ESPipeline }}
${{ each parameter in parameters.args }}:
${{ parameter.key }}: ${{ parameter.value }}
- ${{ else }}:
- template: /eng/common/templates-official/steps/publish-build-artifacts.yml
parameters:
is1ESPipeline: ${{ parameters.is1ESPipeline }}
${{ each parameter in parameters.args }}:
${{ parameter.key }}: ${{ parameter.value }}
================================================
FILE: eng/common/core-templates/steps/publish-logs.yml
================================================
parameters:
StageLabel: ''
JobLabel: ''
CustomSensitiveDataList: ''
# A default - in case value from eng/common/core-templates/post-build/common-variables.yml is not passed
BinlogToolVersion: '1.0.11'
is1ESPipeline: false
steps:
- task: Powershell@2
displayName: Prepare Binlogs to Upload
inputs:
targetType: inline
script: |
New-Item -ItemType Directory $(System.DefaultWorkingDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/
Move-Item -Path $(System.DefaultWorkingDirectory)/artifacts/log/Debug/* $(System.DefaultWorkingDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/
continueOnError: true
condition: always()
- task: PowerShell@2
displayName: Redact Logs
inputs:
filePath: $(System.DefaultWorkingDirectory)/eng/common/post-build/redact-logs.ps1
# For now this needs to have explicit list of all sensitive data. Taken from eng/publishing/v3/publish.yml
# Sensitive data can as well be added to $(System.DefaultWorkingDirectory)/eng/BinlogSecretsRedactionFile.txt'
# If the file exists - sensitive data for redaction will be sourced from it
# (single entry per line, lines starting with '# ' are considered comments and skipped)
arguments: -InputPath '$(System.DefaultWorkingDirectory)/PostBuildLogs'
-BinlogToolVersion '${{parameters.BinlogToolVersion}}'
-TokensFilePath '$(System.DefaultWorkingDirectory)/eng/BinlogSecretsRedactionFile.txt'
-runtimeSourceFeed https://ci.dot.net/internal
-runtimeSourceFeedKey '$(dotnetbuilds-internal-container-read-token-base64)'
'$(publishing-dnceng-devdiv-code-r-build-re)'
'$(dn-bot-all-orgs-artifact-feeds-rw)'
'$(akams-client-id)'
'$(microsoft-symbol-server-pat)'
'$(symweb-symbol-server-pat)'
'$(dnceng-symbol-server-pat)'
'$(dn-bot-all-orgs-build-rw-code-rw)'
'$(System.AccessToken)'
${{parameters.CustomSensitiveDataList}}
continueOnError: true
condition: always()
- task: CopyFiles@2
displayName: Gather post build logs
inputs:
SourceFolder: '$(System.DefaultWorkingDirectory)/PostBuildLogs'
Contents: '**'
TargetFolder: '$(Build.ArtifactStagingDirectory)/PostBuildLogs'
condition: always()
- template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml
parameters:
is1ESPipeline: ${{ parameters.is1ESPipeline }}
args:
displayName: Publish Logs
targetPath: '$(Build.ArtifactStagingDirectory)/PostBuildLogs'
artifactName: PostBuildLogs_${{ parameters.StageLabel }}_${{ parameters.JobLabel }}_Attempt$(System.JobAttempt)
continueOnError: true
condition: always()
retryCountOnTaskFailure: 10 # for any files being locked
isProduction: false # logs are non-production artifacts
================================================
FILE: eng/common/core-templates/steps/publish-pipeline-artifacts.yml
================================================
parameters:
- name: is1ESPipeline
type: boolean
default: false
- name: args
type: object
default: {}
steps:
- ${{ if ne(parameters.is1ESPipeline, true) }}:
- template: /eng/common/templates/steps/publish-pipeline-artifacts.yml
parameters:
${{ each parameter in parameters }}:
${{ parameter.key }}: ${{ parameter.value }}
- ${{ else }}:
- template: /eng/common/templates-official/steps/publish-pipeline-artifacts.yml
parameters:
${{ each parameter in parameters }}:
${{ parameter.key }}: ${{ parameter.value }}
================================================
FILE: eng/common/core-templates/steps/retain-build.yml
================================================
parameters:
# Optional azure devops PAT with build execute permissions for the build's organization,
# only needed if the build that should be retained ran on a different organization than
# the pipeline where this template is executing from
Token: ''
# Optional BuildId to retain, defaults to the current running build
BuildId: ''
# Azure devops Organization URI for the build in the https://dev.azure.com/ format.
# Defaults to the organization the current pipeline is running on
AzdoOrgUri: '$(System.CollectionUri)'
# Azure devops project for the build. Defaults to the project the current pipeline is running on
AzdoProject: '$(System.TeamProject)'
steps:
- task: powershell@2
inputs:
targetType: 'filePath'
filePath: eng/common/retain-build.ps1
pwsh: true
arguments: >
-AzdoOrgUri: ${{parameters.AzdoOrgUri}}
-AzdoProject ${{parameters.AzdoProject}}
-Token ${{coalesce(parameters.Token, '$env:SYSTEM_ACCESSTOKEN') }}
-BuildId ${{coalesce(parameters.BuildId, '$env:BUILD_ID')}}
displayName: Enable permanent build retention
env:
SYSTEM_ACCESSTOKEN: $(System.AccessToken)
BUILD_ID: $(Build.BuildId)
================================================
FILE: eng/common/core-templates/steps/send-to-helix.yml
================================================
# Please remember to update the documentation if you make changes to these parameters!
parameters:
HelixSource: 'pr/default' # required -- sources must start with pr/, official/, prodcon/, or agent/
HelixType: 'tests/default/' # required -- Helix telemetry which identifies what type of data this is; should include "test" for clarity and must end in '/'
HelixBuild: $(Build.BuildNumber) # required -- the build number Helix will use to identify this -- automatically set to the AzDO build number
HelixTargetQueues: '' # required -- semicolon-delimited list of Helix queues to test on; see https://helix.dot.net/ for a list of queues
HelixAccessToken: '' # required -- access token to make Helix API requests; should be provided by the appropriate variable group
HelixProjectPath: 'eng/common/helixpublish.proj' # optional -- path to the project file to build relative to BUILD_SOURCESDIRECTORY
HelixProjectArguments: '' # optional -- arguments passed to the build command
HelixConfiguration: '' # optional -- additional property attached to a job
HelixPreCommands: '' # optional -- commands to run before Helix work item execution
HelixPostCommands: '' # optional -- commands to run after Helix work item execution
WorkItemDirectory: '' # optional -- a payload directory to zip up and send to Helix; requires WorkItemCommand; incompatible with XUnitProjects
WorkItemCommand: '' # optional -- a command to execute on the payload; requires WorkItemDirectory; incompatible with XUnitProjects
WorkItemTimeout: '' # optional -- a timeout in TimeSpan.Parse-ready value (e.g. 00:02:00) for the work item command; requires WorkItemDirectory; incompatible with XUnitProjects
CorrelationPayloadDirectory: '' # optional -- a directory to zip up and send to Helix as a correlation payload
XUnitProjects: '' # optional -- semicolon-delimited list of XUnitProjects to parse and send to Helix; requires XUnitRuntimeTargetFramework, XUnitPublishTargetFramework, XUnitRunnerVersion, and IncludeDotNetCli=true
XUnitWorkItemTimeout: '' # optional -- the workitem timeout in seconds for all workitems created from the xUnit projects specified by XUnitProjects
XUnitPublishTargetFramework: '' # optional -- framework to use to publish your xUnit projects
XUnitRuntimeTargetFramework: '' # optional -- framework to use for the xUnit console runner
XUnitRunnerVersion: '' # optional -- version of the xUnit nuget package you wish to use on Helix; required for XUnitProjects
IncludeDotNetCli: false # optional -- true will download a version of the .NET CLI onto the Helix machine as a correlation payload; requires DotNetCliPackageType and DotNetCliVersion
DotNetCliPackageType: '' # optional -- either 'sdk', 'runtime' or 'aspnetcore-runtime'; determines whether the sdk or runtime will be sent to Helix; see https://raw.githubusercontent.com/dotnet/core/main/release-notes/releases-index.json
DotNetCliVersion: '' # optional -- version of the CLI to send to Helix; based on this: https://raw.githubusercontent.com/dotnet/core/main/release-notes/releases-index.json
WaitForWorkItemCompletion: true # optional -- true will make the task wait until work items have been completed and fail the build if work items fail. False is "fire and forget."
IsExternal: false # [DEPRECATED] -- doesn't do anything, jobs are external if HelixAccessToken is empty and Creator is set
HelixBaseUri: 'https://helix.dot.net/' # optional -- sets the Helix API base URI (allows targeting https://helix.int-dot.net )
Creator: '' # optional -- if the build is external, use this to specify who is sending the job
DisplayNamePrefix: 'Run Tests' # optional -- rename the beginning of the displayName of the steps in AzDO
condition: succeeded() # optional -- condition for step to execute; defaults to succeeded()
continueOnError: false # optional -- determines whether to continue the build if the step errors; defaults to false
steps:
- powershell: 'powershell "$env:BUILD_SOURCESDIRECTORY\eng\common\msbuild.ps1 $env:BUILD_SOURCESDIRECTORY/${{ parameters.HelixProjectPath }} /restore /p:TreatWarningsAsErrors=false ${{ parameters.HelixProjectArguments }} /t:Test /bl:$env:BUILD_SOURCESDIRECTORY\artifacts\log\$env:BuildConfig\SendToHelix.binlog"'
displayName: ${{ parameters.DisplayNamePrefix }} (Windows)
env:
BuildConfig: $(_BuildConfig)
HelixSource: ${{ parameters.HelixSource }}
HelixType: ${{ parameters.HelixType }}
HelixBuild: ${{ parameters.HelixBuild }}
HelixConfiguration: ${{ parameters.HelixConfiguration }}
HelixTargetQueues: ${{ parameters.HelixTargetQueues }}
HelixAccessToken: ${{ parameters.HelixAccessToken }}
HelixPreCommands: ${{ parameters.HelixPreCommands }}
HelixPostCommands: ${{ parameters.HelixPostCommands }}
WorkItemDirectory: ${{ parameters.WorkItemDirectory }}
WorkItemCommand: ${{ parameters.WorkItemCommand }}
WorkItemTimeout: ${{ parameters.WorkItemTimeout }}
CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }}
XUnitProjects: ${{ parameters.XUnitProjects }}
XUnitWorkItemTimeout: ${{ parameters.XUnitWorkItemTimeout }}
XUnitPublishTargetFramework: ${{ parameters.XUnitPublishTargetFramework }}
XUnitRuntimeTargetFramework: ${{ parameters.XUnitRuntimeTargetFramework }}
XUnitRunnerVersion: ${{ parameters.XUnitRunnerVersion }}
IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }}
DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }}
DotNetCliVersion: ${{ parameters.DotNetCliVersion }}
WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }}
HelixBaseUri: ${{ parameters.HelixBaseUri }}
Creator: ${{ parameters.Creator }}
SYSTEM_ACCESSTOKEN: $(System.AccessToken)
condition: and(${{ parameters.condition }}, eq(variables['Agent.Os'], 'Windows_NT'))
continueOnError: ${{ parameters.continueOnError }}
- script: $BUILD_SOURCESDIRECTORY/eng/common/msbuild.sh $BUILD_SOURCESDIRECTORY/${{ parameters.HelixProjectPath }} /restore /p:TreatWarningsAsErrors=false ${{ parameters.HelixProjectArguments }} /t:Test /bl:$BUILD_SOURCESDIRECTORY/artifacts/log/$BuildConfig/SendToHelix.binlog
displayName: ${{ parameters.DisplayNamePrefix }} (Unix)
env:
BuildConfig: $(_BuildConfig)
HelixSource: ${{ parameters.HelixSource }}
HelixType: ${{ parameters.HelixType }}
HelixBuild: ${{ parameters.HelixBuild }}
HelixConfiguration: ${{ parameters.HelixConfiguration }}
HelixTargetQueues: ${{ parameters.HelixTargetQueues }}
HelixAccessToken: ${{ parameters.HelixAccessToken }}
HelixPreCommands: ${{ parameters.HelixPreCommands }}
HelixPostCommands: ${{ parameters.HelixPostCommands }}
WorkItemDirectory: ${{ parameters.WorkItemDirectory }}
WorkItemCommand: ${{ parameters.WorkItemCommand }}
WorkItemTimeout: ${{ parameters.WorkItemTimeout }}
CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }}
XUnitProjects: ${{ parameters.XUnitProjects }}
XUnitWorkItemTimeout: ${{ parameters.XUnitWorkItemTimeout }}
XUnitPublishTargetFramework: ${{ parameters.XUnitPublishTargetFramework }}
XUnitRuntimeTargetFramework: ${{ parameters.XUnitRuntimeTargetFramework }}
XUnitRunnerVersion: ${{ parameters.XUnitRunnerVersion }}
IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }}
DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }}
DotNetCliVersion: ${{ parameters.DotNetCliVersion }}
WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }}
HelixBaseUri: ${{ parameters.HelixBaseUri }}
Creator: ${{ parameters.Creator }}
SYSTEM_ACCESSTOKEN: $(System.AccessToken)
condition: and(${{ parameters.condition }}, ne(variables['Agent.Os'], 'Windows_NT'))
continueOnError: ${{ parameters.continueOnError }}
================================================
FILE: eng/common/core-templates/steps/source-build.yml
================================================
parameters:
# This template adds arcade-powered source-build to CI.
# This is a 'steps' template, and is intended for advanced scenarios where the existing build
# infra has a careful build methodology that must be followed. For example, a repo
# (dotnet/runtime) might choose to clone the GitHub repo only once and store it as a pipeline
# artifact for all subsequent jobs to use, to reduce dependence on a strong network connection to
# GitHub. Using this steps template leaves room for that infra to be included.
# Defines the platform on which to run the steps. See 'eng/common/core-templates/job/source-build.yml'
# for details. The entire object is described in the 'job' template for simplicity, even though
# the usage of the properties on this object is split between the 'job' and 'steps' templates.
platform: {}
is1ESPipeline: false
steps:
# Build. Keep it self-contained for simple reusability. (No source-build-specific job variables.)
- script: |
set -x
df -h
# If building on the internal project, the internal storage variable may be available (usually only if needed)
# In that case, add variables to allow the download of internal runtimes if the specified versions are not found
# in the default public locations.
internalRuntimeDownloadArgs=
if [ '$(dotnetbuilds-internal-container-read-token-base64)' != '$''(dotnetbuilds-internal-container-read-token-base64)' ]; then
internalRuntimeDownloadArgs='/p:DotNetRuntimeSourceFeed=https://ci.dot.net/internal /p:DotNetRuntimeSourceFeedKey=$(dotnetbuilds-internal-container-read-token-base64) --runtimesourcefeed https://ci.dot.net/internal --runtimesourcefeedkey '$(dotnetbuilds-internal-container-read-token-base64)''
fi
buildConfig=Release
# Check if AzDO substitutes in a build config from a variable, and use it if so.
if [ '$(_BuildConfig)' != '$''(_BuildConfig)' ]; then
buildConfig='$(_BuildConfig)'
fi
targetRidArgs=
if [ '${{ parameters.platform.targetRID }}' != '' ]; then
targetRidArgs='/p:TargetRid=${{ parameters.platform.targetRID }}'
fi
portableBuildArgs=
if [ '${{ parameters.platform.portableBuild }}' != '' ]; then
portableBuildArgs='/p:PortableBuild=${{ parameters.platform.portableBuild }}'
fi
${{ coalesce(parameters.platform.buildScript, './build.sh') }} --ci \
--configuration $buildConfig \
--restore --build --pack -bl \
--source-build \
${{ parameters.platform.buildArguments }} \
$internalRuntimeDownloadArgs \
$targetRidArgs \
$portableBuildArgs \
displayName: Build
- template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml
parameters:
is1ESPipeline: ${{ parameters.is1ESPipeline }}
args:
displayName: Publish BuildLogs
targetPath: artifacts/log/${{ coalesce(variables._BuildConfig, 'Release') }}
artifactName: BuildLogs_SourceBuild_${{ parameters.platform.name }}_Attempt$(System.JobAttempt)
continueOnError: true
condition: succeededOrFailed()
isProduction: false # logs are non-production artifacts
================================================
FILE: eng/common/core-templates/steps/source-index-stage1-publish.yml
================================================
parameters:
sourceIndexUploadPackageVersion: 2.0.0-20250818.1
sourceIndexProcessBinlogPackageVersion: 1.0.1-20250818.1
sourceIndexPackageSource: https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json
binlogPath: artifacts/log/Debug/Build.binlog
steps:
- task: UseDotNet@2
displayName: "Source Index: Use .NET 9 SDK"
inputs:
packageType: sdk
version: 9.0.x
installationPath: $(Agent.TempDirectory)/dotnet
workingDirectory: $(Agent.TempDirectory)
- script: |
$(Agent.TempDirectory)/dotnet/dotnet tool install BinLogToSln --version ${{parameters.sourceIndexProcessBinlogPackageVersion}} --add-source ${{parameters.SourceIndexPackageSource}} --tool-path $(Agent.TempDirectory)/.source-index/tools
$(Agent.TempDirectory)/dotnet/dotnet tool install UploadIndexStage1 --version ${{parameters.sourceIndexUploadPackageVersion}} --add-source ${{parameters.SourceIndexPackageSource}} --tool-path $(Agent.TempDirectory)/.source-index/tools
displayName: "Source Index: Download netsourceindex Tools"
# Set working directory to temp directory so 'dotnet' doesn't try to use global.json and use the repo's sdk.
workingDirectory: $(Agent.TempDirectory)
- script: $(Agent.TempDirectory)/.source-index/tools/BinLogToSln -i ${{parameters.BinlogPath}} -r $(System.DefaultWorkingDirectory) -n $(Build.Repository.Name) -o .source-index/stage1output
displayName: "Source Index: Process Binlog into indexable sln"
- ${{ if and(ne(parameters.runAsPublic, 'true'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- task: AzureCLI@2
displayName: "Source Index: Upload Source Index stage1 artifacts to Azure"
inputs:
azureSubscription: 'SourceDotNet Stage1 Publish'
addSpnToEnvironment: true
scriptType: 'ps'
scriptLocation: 'inlineScript'
inlineScript: |
$(Agent.TempDirectory)/.source-index/tools/UploadIndexStage1 -i .source-index/stage1output -n $(Build.Repository.Name) -s netsourceindexstage1 -b stage1
================================================
FILE: eng/common/core-templates/variables/pool-providers.yml
================================================
parameters:
is1ESPipeline: false
variables:
- ${{ if eq(parameters.is1ESPipeline, 'true') }}:
- template: /eng/common/templates-official/variables/pool-providers.yml
- ${{ else }}:
- template: /eng/common/templates/variables/pool-providers.yml
================================================
FILE: eng/common/cross/arm/tizen/tizen.patch
================================================
diff -u -r a/usr/lib/libc.so b/usr/lib/libc.so
--- a/usr/lib/libc.so 2016-12-30 23:00:08.284951863 +0900
+++ b/usr/lib/libc.so 2016-12-30 23:00:32.140951815 +0900
@@ -2,4 +2,4 @@
Use the shared library, but some functions are only in
the static library, so try that secondarily. */
OUTPUT_FORMAT(elf32-littlearm)
-GROUP ( /lib/libc.so.6 /usr/lib/libc_nonshared.a AS_NEEDED ( /lib/ld-linux-armhf.so.3 ) )
+GROUP ( libc.so.6 libc_nonshared.a AS_NEEDED ( ld-linux-armhf.so.3 ) )
================================================
FILE: eng/common/cross/arm64/tizen/tizen.patch
================================================
diff -u -r a/usr/lib/libc.so b/usr/lib/libc.so
--- a/usr/lib64/libc.so 2016-12-30 23:00:08.284951863 +0900
+++ b/usr/lib64/libc.so 2016-12-30 23:00:32.140951815 +0900
@@ -2,4 +2,4 @@
Use the shared library, but some functions are only in
the static library, so try that secondarily. */
OUTPUT_FORMAT(elf64-littleaarch64)
-GROUP ( /lib64/libc.so.6 /usr/lib64/libc_nonshared.a AS_NEEDED ( /lib64/ld-linux-aarch64.so.1 ) )
+GROUP ( libc.so.6 libc_nonshared.a AS_NEEDED ( ld-linux-aarch64.so.1 ) )
================================================
FILE: eng/common/cross/armel/tizen/tizen.patch
================================================
diff -u -r a/usr/lib/libc.so b/usr/lib/libc.so
--- a/usr/lib/libc.so 2016-12-30 23:00:08.284951863 +0900
+++ b/usr/lib/libc.so 2016-12-30 23:00:32.140951815 +0900
@@ -2,4 +2,4 @@
Use the shared library, but some functions are only in
the static library, so try that secondarily. */
OUTPUT_FORMAT(elf32-littlearm)
-GROUP ( /lib/libc.so.6 /usr/lib/libc_nonshared.a AS_NEEDED ( /lib/ld-linux.so.3 ) )
+GROUP ( libc.so.6 libc_nonshared.a AS_NEEDED ( ld-linux.so.3 ) )
================================================
FILE: eng/common/cross/build-android-rootfs.sh
================================================
#!/usr/bin/env bash
set -e
__NDK_Version=r21
usage()
{
echo "Creates a toolchain and sysroot used for cross-compiling for Android."
echo
echo "Usage: $0 [BuildArch] [ApiLevel] [--ndk NDKVersion]"
echo
echo "BuildArch is the target architecture of Android. Currently only arm64 is supported."
echo "ApiLevel is the target Android API level. API levels usually match to Android releases. See https://source.android.com/source/build-numbers.html"
echo "NDKVersion is the version of Android NDK. The default is r21. See https://developer.android.com/ndk/downloads/revision_history"
echo
echo "By default, the toolchain and sysroot will be generated in cross/android-rootfs/toolchain/[BuildArch]. You can change this behavior"
echo "by setting the TOOLCHAIN_DIR environment variable"
echo
echo "By default, the NDK will be downloaded into the cross/android-rootfs/android-ndk-$__NDK_Version directory. If you already have an NDK installation,"
echo "you can set the NDK_DIR environment variable to have this script use that installation of the NDK."
echo "By default, this script will generate a file, android_platform, in the root of the ROOTFS_DIR directory that contains the RID for the supported and tested Android build: android.28-arm64. This file is to replace '/etc/os-release', which is not available for Android."
exit 1
}
__ApiLevel=28 # The minimum platform for arm64 is API level 21 but the minimum version that support glob(3) is 28. See $ANDROID_NDK/toolchains/llvm/prebuilt/linux-x86_64/sysroot/usr/include/glob.h
__BuildArch=arm64
__AndroidArch=aarch64
__AndroidToolchain=aarch64-linux-android
while :; do
if [[ "$#" -le 0 ]]; then
break
fi
i=$1
lowerI="$(echo $i | tr "[:upper:]" "[:lower:]")"
case $lowerI in
-?|-h|--help)
usage
exit 1
;;
arm64)
__BuildArch=arm64
__AndroidArch=aarch64
__AndroidToolchain=aarch64-linux-android
;;
arm)
__BuildArch=arm
__AndroidArch=arm
__AndroidToolchain=arm-linux-androideabi
;;
--ndk)
shift
__NDK_Version=$1
;;
*[0-9])
__ApiLevel=$i
;;
*)
__UnprocessedBuildArgs="$__UnprocessedBuildArgs $i"
;;
esac
shift
done
if [[ "$__NDK_Version" == "r21" ]] || [[ "$__NDK_Version" == "r22" ]]; then
__NDK_File_Arch_Spec=-x86_64
__SysRoot=sysroot
else
__NDK_File_Arch_Spec=
__SysRoot=toolchains/llvm/prebuilt/linux-x86_64/sysroot
fi
# Obtain the location of the bash script to figure out where the root of the repo is.
__ScriptBaseDir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
__CrossDir="$__ScriptBaseDir/../../../.tools/android-rootfs"
if [[ ! -f "$__CrossDir" ]]; then
mkdir -p "$__CrossDir"
fi
# Resolve absolute path to avoid `../` in build logs
__CrossDir="$( cd "$__CrossDir" && pwd )"
__NDK_Dir="$__CrossDir/android-ndk-$__NDK_Version"
__lldb_Dir="$__CrossDir/lldb"
__ToolchainDir="$__CrossDir/android-ndk-$__NDK_Version"
if [[ -n "$TOOLCHAIN_DIR" ]]; then
__ToolchainDir=$TOOLCHAIN_DIR
fi
if [[ -n "$NDK_DIR" ]]; then
__NDK_Dir=$NDK_DIR
fi
echo "Target API level: $__ApiLevel"
echo "Target architecture: $__BuildArch"
echo "NDK version: $__NDK_Version"
echo "NDK location: $__NDK_Dir"
echo "Target Toolchain location: $__ToolchainDir"
# Download the NDK if required
if [ ! -d $__NDK_Dir ]; then
echo Downloading the NDK into $__NDK_Dir
mkdir -p $__NDK_Dir
wget -q --progress=bar:force:noscroll --show-progress https://dl.google.com/android/repository/android-ndk-$__NDK_Version-linux$__NDK_File_Arch_Spec.zip -O $__CrossDir/android-ndk-$__NDK_Version-linux.zip
unzip -q $__CrossDir/android-ndk-$__NDK_Version-linux.zip -d $__CrossDir
fi
if [ ! -d $__lldb_Dir ]; then
mkdir -p $__lldb_Dir
echo Downloading LLDB into $__lldb_Dir
wget -q --progress=bar:force:noscroll --show-progress https://dl.google.com/android/repository/lldb-2.3.3614996-linux-x86_64.zip -O $__CrossDir/lldb-2.3.3614996-linux-x86_64.zip
unzip -q $__CrossDir/lldb-2.3.3614996-linux-x86_64.zip -d $__lldb_Dir
fi
echo "Download dependencies..."
__TmpDir=$__CrossDir/tmp/$__BuildArch/
mkdir -p "$__TmpDir"
# combined dependencies for coreclr, installer and libraries
__AndroidPackages="libicu"
__AndroidPackages+=" libandroid-glob"
__AndroidPackages+=" liblzma"
__AndroidPackages+=" krb5"
__AndroidPackages+=" openssl"
for path in $(wget -qO- https://packages.termux.dev/termux-main-21/dists/stable/main/binary-$__AndroidArch/Packages |\
grep -A15 "Package: \(${__AndroidPackages// /\\|}\)" | grep -v "static\|tool" | grep Filename); do
if [[ "$path" != "Filename:" ]]; then
echo "Working on: $path"
wget -qO- https://packages.termux.dev/termux-main-21/$path | dpkg -x - "$__TmpDir"
fi
done
cp -R "$__TmpDir/data/data/com.termux/files/usr/"* "$__ToolchainDir/$__SysRoot/usr/"
# Generate platform file for build.sh script to assign to __DistroRid
echo "Generating platform file..."
echo "RID=android.${__ApiLevel}-${__BuildArch}" > $__ToolchainDir/$__SysRoot/android_platform
echo "Now to build coreclr, libraries and host; run:"
echo ROOTFS_DIR=$(realpath $__ToolchainDir/$__SysRoot) ./build.sh clr+libs+host --cross --arch $__BuildArch
================================================
FILE: eng/common/cross/build-rootfs.sh
================================================
#!/usr/bin/env bash
set -e
usage()
{
echo "Usage: $0 [BuildArch] [CodeName] [lldbx.y] [llvmx[.y]] [--skipunmount] --rootfsdir ]"
echo "BuildArch can be: arm(default), arm64, armel, armv6, loongarch64, ppc64le, riscv64, s390x, x64, x86"
echo "CodeName - optional, Code name for Linux, can be: xenial(default), zesty, bionic, alpine"
echo " for alpine can be specified with version: alpineX.YY or alpineedge"
echo " for FreeBSD can be: freebsd13, freebsd14"
echo " for illumos can be: illumos"
echo " for Haiku can be: haiku."
echo "lldbx.y - optional, LLDB version, can be: lldb3.9(default), lldb4.0, lldb5.0, lldb6.0 no-lldb. Ignored for alpine and FreeBSD"
echo "llvmx[.y] - optional, LLVM version for LLVM related packages."
echo "--skipunmount - optional, will skip the unmount of rootfs folder."
echo "--skipsigcheck - optional, will skip package signature checks (allowing untrusted packages)."
echo "--skipemulation - optional, will skip qemu and debootstrap requirement when building environment for debian based systems."
echo "--use-mirror - optional, use mirror URL to fetch resources, when available."
echo "--jobs N - optional, restrict to N jobs."
exit 1
}
__CodeName=xenial
__CrossDir=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
__BuildArch=arm
__AlpineArch=armv7
__FreeBSDArch=arm
__FreeBSDMachineArch=armv7
__IllumosArch=arm7
__HaikuArch=arm
__QEMUArch=arm
__UbuntuArch=armhf
__UbuntuRepo=
__UbuntuSuites="updates security backports"
__LLDB_Package="liblldb-3.9-dev"
__SkipUnmount=0
# base development support
__UbuntuPackages="build-essential"
__AlpinePackages="alpine-base"
__AlpinePackages+=" build-base"
__AlpinePackages+=" linux-headers"
__AlpinePackages+=" lldb-dev"
__AlpinePackages+=" python3"
__AlpinePackages+=" libedit"
# symlinks fixer
__UbuntuPackages+=" symlinks"
# runtime dependencies
__UbuntuPackages+=" libicu-dev"
__UbuntuPackages+=" liblttng-ust-dev"
__UbuntuPackages+=" libunwind8-dev"
__AlpinePackages+=" gettext-dev"
__AlpinePackages+=" icu-dev"
__AlpinePackages+=" libunwind-dev"
__AlpinePackages+=" lttng-ust-dev"
__AlpinePackages+=" compiler-rt"
# runtime libraries' dependencies
__UbuntuPackages+=" libcurl4-openssl-dev"
__UbuntuPackages+=" libkrb5-dev"
__UbuntuPackages+=" libssl-dev"
__UbuntuPackages+=" zlib1g-dev"
__UbuntuPackages+=" libbrotli-dev"
__AlpinePackages+=" curl-dev"
__AlpinePackages+=" krb5-dev"
__AlpinePackages+=" openssl-dev"
__AlpinePackages+=" zlib-dev"
__FreeBSDBase="13.4-RELEASE"
__FreeBSDPkg="1.21.3"
__FreeBSDABI="13"
__FreeBSDPackages="libunwind"
__FreeBSDPackages+=" icu"
__FreeBSDPackages+=" libinotify"
__FreeBSDPackages+=" openssl"
__FreeBSDPackages+=" krb5"
__FreeBSDPackages+=" terminfo-db"
__IllumosPackages="icu"
__IllumosPackages+=" mit-krb5"
__IllumosPackages+=" openssl"
__IllumosPackages+=" zlib"
__HaikuPackages="gcc_syslibs"
__HaikuPackages+=" gcc_syslibs_devel"
__HaikuPackages+=" gmp"
__HaikuPackages+=" gmp_devel"
__HaikuPackages+=" icu[0-9]+"
__HaikuPackages+=" icu[0-9]*_devel"
__HaikuPackages+=" krb5"
__HaikuPackages+=" krb5_devel"
__HaikuPackages+=" libiconv"
__HaikuPackages+=" libiconv_devel"
__HaikuPackages+=" llvm[0-9]*_libunwind"
__HaikuPackages+=" llvm[0-9]*_libunwind_devel"
__HaikuPackages+=" mpfr"
__HaikuPackages+=" mpfr_devel"
__HaikuPackages+=" openssl3"
__HaikuPackages+=" openssl3_devel"
__HaikuPackages+=" zlib"
__HaikuPackages+=" zlib_devel"
# ML.NET dependencies
__UbuntuPackages+=" libomp5"
__UbuntuPackages+=" libomp-dev"
# Taken from https://github.com/alpinelinux/alpine-chroot-install/blob/6d08f12a8a70dd9b9dc7d997c88aa7789cc03c42/alpine-chroot-install#L85-L133
__AlpineKeys='
4a6a0840:MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA1yHJxQgsHQREclQu4Ohe\nqxTxd1tHcNnvnQTu/UrTky8wWvgXT+jpveroeWWnzmsYlDI93eLI2ORakxb3gA2O\nQ0Ry4ws8vhaxLQGC74uQR5+/yYrLuTKydFzuPaS1dK19qJPXB8GMdmFOijnXX4SA\njixuHLe1WW7kZVtjL7nufvpXkWBGjsfrvskdNA/5MfxAeBbqPgaq0QMEfxMAn6/R\nL5kNepi/Vr4S39Xvf2DzWkTLEK8pcnjNkt9/aafhWqFVW7m3HCAII6h/qlQNQKSo\nGuH34Q8GsFG30izUENV9avY7hSLq7nggsvknlNBZtFUcmGoQrtx3FmyYsIC8/R+B\nywIDAQAB
5243ef4b:MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAvNijDxJ8kloskKQpJdx+\nmTMVFFUGDoDCbulnhZMJoKNkSuZOzBoFC94omYPtxnIcBdWBGnrm6ncbKRlR+6oy\nDO0W7c44uHKCFGFqBhDasdI4RCYP+fcIX/lyMh6MLbOxqS22TwSLhCVjTyJeeH7K\naA7vqk+QSsF4TGbYzQDDpg7+6aAcNzg6InNePaywA6hbT0JXbxnDWsB+2/LLSF2G\nmnhJlJrWB1WGjkz23ONIWk85W4S0XB/ewDefd4Ly/zyIciastA7Zqnh7p3Ody6Q0\nsS2MJzo7p3os1smGjUF158s6m/JbVh4DN6YIsxwl2OjDOz9R0OycfJSDaBVIGZzg\ncQIDAQAB
524d27bb:MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAr8s1q88XpuJWLCZALdKj\nlN8wg2ePB2T9aIcaxryYE/Jkmtu+ZQ5zKq6BT3y/udt5jAsMrhHTwroOjIsF9DeG\ne8Y3vjz+Hh4L8a7hZDaw8jy3CPag47L7nsZFwQOIo2Cl1SnzUc6/owoyjRU7ab0p\niWG5HK8IfiybRbZxnEbNAfT4R53hyI6z5FhyXGS2Ld8zCoU/R4E1P0CUuXKEN4p0\n64dyeUoOLXEWHjgKiU1mElIQj3k/IF02W89gDj285YgwqA49deLUM7QOd53QLnx+\nxrIrPv3A+eyXMFgexNwCKQU9ZdmWa00MjjHlegSGK8Y2NPnRoXhzqSP9T9i2HiXL\nVQIDAQAB
5261cecb:MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAwlzMkl7b5PBdfMzGdCT0\ncGloRr5xGgVmsdq5EtJvFkFAiN8Ac9MCFy/vAFmS8/7ZaGOXoCDWbYVLTLOO2qtX\nyHRl+7fJVh2N6qrDDFPmdgCi8NaE+3rITWXGrrQ1spJ0B6HIzTDNEjRKnD4xyg4j\ng01FMcJTU6E+V2JBY45CKN9dWr1JDM/nei/Pf0byBJlMp/mSSfjodykmz4Oe13xB\nCa1WTwgFykKYthoLGYrmo+LKIGpMoeEbY1kuUe04UiDe47l6Oggwnl+8XD1MeRWY\nsWgj8sF4dTcSfCMavK4zHRFFQbGp/YFJ/Ww6U9lA3Vq0wyEI6MCMQnoSMFwrbgZw\nwwIDAQAB
58199dcc:MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA3v8/ye/V/t5xf4JiXLXa\nhWFRozsnmn3hobON20GdmkrzKzO/eUqPOKTpg2GtvBhK30fu5oY5uN2ORiv2Y2ht\neLiZ9HVz3XP8Fm9frha60B7KNu66FO5P2o3i+E+DWTPqqPcCG6t4Znk2BypILcit\nwiPKTsgbBQR2qo/cO01eLLdt6oOzAaF94NH0656kvRewdo6HG4urbO46tCAizvCR\nCA7KGFMyad8WdKkTjxh8YLDLoOCtoZmXmQAiwfRe9pKXRH/XXGop8SYptLqyVVQ+\ntegOD9wRs2tOlgcLx4F/uMzHN7uoho6okBPiifRX+Pf38Vx+ozXh056tjmdZkCaV\naQIDAQAB
58cbb476:MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAoSPnuAGKtRIS5fEgYPXD\n8pSGvKAmIv3A08LBViDUe+YwhilSHbYXUEAcSH1KZvOo1WT1x2FNEPBEFEFU1Eyc\n+qGzbA03UFgBNvArurHQ5Z/GngGqE7IarSQFSoqewYRtFSfp+TL9CUNBvM0rT7vz\n2eMu3/wWG+CBmb92lkmyWwC1WSWFKO3x8w+Br2IFWvAZqHRt8oiG5QtYvcZL6jym\nY8T6sgdDlj+Y+wWaLHs9Fc+7vBuyK9C4O1ORdMPW15qVSl4Lc2Wu1QVwRiKnmA+c\nDsH/m7kDNRHM7TjWnuj+nrBOKAHzYquiu5iB3Qmx+0gwnrSVf27Arc3ozUmmJbLj\nzQIDAQAB
58e4f17d:MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAvBxJN9ErBgdRcPr5g4hV\nqyUSGZEKuvQliq2Z9SRHLh2J43+EdB6A+yzVvLnzcHVpBJ+BZ9RV30EM9guck9sh\nr+bryZcRHyjG2wiIEoduxF2a8KeWeQH7QlpwGhuobo1+gA8L0AGImiA6UP3LOirl\nI0G2+iaKZowME8/tydww4jx5vG132JCOScMjTalRsYZYJcjFbebQQolpqRaGB4iG\nWqhytWQGWuKiB1A22wjmIYf3t96l1Mp+FmM2URPxD1gk/BIBnX7ew+2gWppXOK9j\n1BJpo0/HaX5XoZ/uMqISAAtgHZAqq+g3IUPouxTphgYQRTRYpz2COw3NF43VYQrR\nbQIDAQAB
60ac2099:MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAwR4uJVtJOnOFGchnMW5Y\nj5/waBdG1u5BTMlH+iQMcV5+VgWhmpZHJCBz3ocD+0IGk2I68S5TDOHec/GSC0lv\n6R9o6F7h429GmgPgVKQsc8mPTPtbjJMuLLs4xKc+viCplXc0Nc0ZoHmCH4da6fCV\ntdpHQjVe6F9zjdquZ4RjV6R6JTiN9v924dGMAkbW/xXmamtz51FzondKC52Gh8Mo\n/oA0/T0KsCMCi7tb4QNQUYrf+Xcha9uus4ww1kWNZyfXJB87a2kORLiWMfs2IBBJ\nTmZ2Fnk0JnHDb8Oknxd9PvJPT0mvyT8DA+KIAPqNvOjUXP4bnjEHJcoCP9S5HkGC\nIQIDAQAB
6165ee59:MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAutQkua2CAig4VFSJ7v54\nALyu/J1WB3oni7qwCZD3veURw7HxpNAj9hR+S5N/pNeZgubQvJWyaPuQDm7PTs1+\ntFGiYNfAsiibX6Rv0wci3M+z2XEVAeR9Vzg6v4qoofDyoTbovn2LztaNEjTkB+oK\ntlvpNhg1zhou0jDVYFniEXvzjckxswHVb8cT0OMTKHALyLPrPOJzVtM9C1ew2Nnc\n3848xLiApMu3NBk0JqfcS3Bo5Y2b1FRVBvdt+2gFoKZix1MnZdAEZ8xQzL/a0YS5\nHd0wj5+EEKHfOd3A75uPa/WQmA+o0cBFfrzm69QDcSJSwGpzWrD1ScH3AK8nWvoj\nv7e9gukK/9yl1b4fQQ00vttwJPSgm9EnfPHLAtgXkRloI27H6/PuLoNvSAMQwuCD\nhQRlyGLPBETKkHeodfLoULjhDi1K2gKJTMhtbnUcAA7nEphkMhPWkBpgFdrH+5z4\nLxy+3ek0cqcI7K68EtrffU8jtUj9LFTUC8dERaIBs7NgQ/LfDbDfGh9g6qVj1hZl\nk9aaIPTm/xsi8v3u+0qaq7KzIBc9s59JOoA8TlpOaYdVgSQhHHLBaahOuAigH+VI\nisbC9vmqsThF2QdDtQt37keuqoda2E6sL7PUvIyVXDRfwX7uMDjlzTxHTymvq2Ck\nhtBqojBnThmjJQFgZXocHG8CAwEAAQ==
61666e3f:MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAlEyxkHggKCXC2Wf5Mzx4\nnZLFZvU2bgcA3exfNPO/g1YunKfQY+Jg4fr6tJUUTZ3XZUrhmLNWvpvSwDS19ZmC\nIXOu0+V94aNgnhMsk9rr59I8qcbsQGIBoHzuAl8NzZCgdbEXkiY90w1skUw8J57z\nqCsMBydAueMXuWqF5nGtYbi5vHwK42PffpiZ7G5Kjwn8nYMW5IZdL6ZnMEVJUWC9\nI4waeKg0yskczYDmZUEAtrn3laX9677ToCpiKrvmZYjlGl0BaGp3cxggP2xaDbUq\nqfFxWNgvUAb3pXD09JM6Mt6HSIJaFc9vQbrKB9KT515y763j5CC2KUsilszKi3mB\nHYe5PoebdjS7D1Oh+tRqfegU2IImzSwW3iwA7PJvefFuc/kNIijfS/gH/cAqAK6z\nbhdOtE/zc7TtqW2Wn5Y03jIZdtm12CxSxwgtCF1NPyEWyIxAQUX9ACb3M0FAZ61n\nfpPrvwTaIIxxZ01L3IzPLpbc44x/DhJIEU+iDt6IMTrHOphD9MCG4631eIdB0H1b\n6zbNX1CXTsafqHRFV9XmYYIeOMggmd90s3xIbEujA6HKNP/gwzO6CDJ+nHFDEqoF\nSkxRdTkEqjTjVKieURW7Swv7zpfu5PrsrrkyGnsRrBJJzXlm2FOOxnbI2iSL1B5F\nrO5kbUxFeZUIDq+7Yv4kLWcCAwEAAQ==
616a9724:MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAnC+bR4bHf/L6QdU4puhQ\ngl1MHePszRC38bzvVFDUJsmCaMCL2suCs2A2yxAgGb9pu9AJYLAmxQC4mM3jNqhg\n/E7yuaBbek3O02zN/ctvflJ250wZCy+z0ZGIp1ak6pu1j14IwHokl9j36zNfGtfv\nADVOcdpWITFFlPqwq1qt/H3UsKVmtiF3BNWWTeUEQwKvlU8ymxgS99yn0+4OPyNT\nL3EUeS+NQJtDS01unau0t7LnjUXn+XIneWny8bIYOQCuVR6s/gpIGuhBaUqwaJOw\n7jkJZYF2Ij7uPb4b5/R3vX2FfxxqEHqssFSg8FFUNTZz3qNZs0CRVyfA972g9WkJ\nhPfn31pQYil4QGRibCMIeU27YAEjXoqfJKEPh4UWMQsQLrEfdGfb8VgwrPbniGfU\nL3jKJR3VAafL9330iawzVQDlIlwGl6u77gEXMl9K0pfazunYhAp+BMP+9ot5ckK+\nosmrqj11qMESsAj083GeFdfV3pXEIwUytaB0AKEht9DbqUfiE/oeZ/LAXgySMtVC\nsbC4ESmgVeY2xSBIJdDyUap7FR49GGrw0W49NUv9gRgQtGGaNVQQO9oGL2PBC41P\niWF9GLoX30HIz1P8PF/cZvicSSPkQf2Z6TV+t0ebdGNS5DjapdnCrq8m9Z0pyKsQ\nuxAL2a7zX8l5i1CZh1ycUGsCAwEAAQ==
616abc23:MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA0MfCDrhODRCIxR9Dep1s\neXafh5CE5BrF4WbCgCsevyPIdvTeyIaW4vmO3bbG4VzhogDZju+R3IQYFuhoXP5v\nY+zYJGnwrgz3r5wYAvPnLEs1+dtDKYOgJXQj+wLJBW1mzRDL8FoRXOe5iRmn1EFS\nwZ1DoUvyu7/J5r0itKicZp3QKED6YoilXed+1vnS4Sk0mzN4smuMR9eO1mMCqNp9\n9KTfRDHTbakIHwasECCXCp50uXdoW6ig/xUAFanpm9LtK6jctNDbXDhQmgvAaLXZ\nLvFqoaYJ/CvWkyYCgL6qxvMvVmPoRv7OPcyni4xR/WgWa0MSaEWjgPx3+yj9fiMA\n1S02pFWFDOr5OUF/O4YhFJvUCOtVsUPPfA/Lj6faL0h5QI9mQhy5Zb9TTaS9jB6p\nLw7u0dJlrjFedk8KTJdFCcaGYHP6kNPnOxMylcB/5WcztXZVQD5WpCicGNBxCGMm\nW64SgrV7M07gQfL/32QLsdqPUf0i8hoVD8wfQ3EpbQzv6Fk1Cn90bZqZafg8XWGY\nwddhkXk7egrr23Djv37V2okjzdqoyLBYBxMz63qQzFoAVv5VoY2NDTbXYUYytOvG\nGJ1afYDRVWrExCech1mX5ZVUB1br6WM+psFLJFoBFl6mDmiYt0vMYBddKISsvwLl\nIJQkzDwtXzT2cSjoj3T5QekCAwEAAQ==
616ac3bc:MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAvaaoSLab+IluixwKV5Od\n0gib2YurjPatGIbn5Ov2DLUFYiebj2oJINXJSwUOO+4WcuHFEqiL/1rya+k5hLZt\nhnPL1tn6QD4rESznvGSasRCQNT2vS/oyZbTYJRyAtFkEYLlq0t3S3xBxxHWuvIf0\nqVxVNYpQWyM3N9RIeYBR/euXKJXileSHk/uq1I5wTC0XBIHWcthczGN0m9wBEiWS\n0m3cnPk4q0Ea8mUJ91Rqob19qETz6VbSPYYpZk3qOycjKosuwcuzoMpwU8KRiMFd\n5LHtX0Hx85ghGsWDVtS0c0+aJa4lOMGvJCAOvDfqvODv7gKlCXUpgumGpLdTmaZ8\n1RwqspAe3IqBcdKTqRD4m2mSg23nVx2FAY3cjFvZQtfooT7q1ItRV5RgH6FhQSl7\n+6YIMJ1Bf8AAlLdRLpg+doOUGcEn+pkDiHFgI8ylH1LKyFKw+eXaAml/7DaWZk1d\ndqggwhXOhc/UUZFQuQQ8A8zpA13PcbC05XxN2hyP93tCEtyynMLVPtrRwDnHxFKa\nqKzs3rMDXPSXRn3ZZTdKH3069ApkEjQdpcwUh+EmJ1Ve/5cdtzT6kKWCjKBFZP/s\n91MlRrX2BTRdHaU5QJkUheUtakwxuHrdah2F94lRmsnQlpPr2YseJu6sIE+Dnx4M\nCfhdVbQL2w54R645nlnohu8CAwEAAQ==
616adfeb:MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAq0BFD1D4lIxQcsqEpQzU\npNCYM3aP1V/fxxVdT4DWvSI53JHTwHQamKdMWtEXetWVbP5zSROniYKFXd/xrD9X\n0jiGHey3lEtylXRIPxe5s+wXoCmNLcJVnvTcDtwx/ne2NLHxp76lyc25At+6RgE6\nADjLVuoD7M4IFDkAsd8UQ8zM0Dww9SylIk/wgV3ZkifecvgUQRagrNUdUjR56EBZ\nraQrev4hhzOgwelT0kXCu3snbUuNY/lU53CoTzfBJ5UfEJ5pMw1ij6X0r5S9IVsy\nKLWH1hiO0NzU2c8ViUYCly4Fe9xMTFc6u2dy/dxf6FwERfGzETQxqZvSfrRX+GLj\n/QZAXiPg5178hT/m0Y3z5IGenIC/80Z9NCi+byF1WuJlzKjDcF/TU72zk0+PNM/H\nKuppf3JT4DyjiVzNC5YoWJT2QRMS9KLP5iKCSThwVceEEg5HfhQBRT9M6KIcFLSs\nmFjx9kNEEmc1E8hl5IR3+3Ry8G5/bTIIruz14jgeY9u5jhL8Vyyvo41jgt9sLHR1\n/J1TxKfkgksYev7PoX6/ZzJ1ksWKZY5NFoDXTNYUgzFUTOoEaOg3BAQKadb3Qbbq\nXIrxmPBdgrn9QI7NCgfnAY3Tb4EEjs3ON/BNyEhUENcXOH6I1NbcuBQ7g9P73kE4\nVORdoc8MdJ5eoKBpO8Ww8HECAwEAAQ==
616ae350:MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAyduVzi1mWm+lYo2Tqt/0\nXkCIWrDNP1QBMVPrE0/ZlU2bCGSoo2Z9FHQKz/mTyMRlhNqTfhJ5qU3U9XlyGOPJ\npiM+b91g26pnpXJ2Q2kOypSgOMOPA4cQ42PkHBEqhuzssfj9t7x47ppS94bboh46\nxLSDRff/NAbtwTpvhStV3URYkxFG++cKGGa5MPXBrxIp+iZf9GnuxVdST5PGiVGP\nODL/b69sPJQNbJHVquqUTOh5Ry8uuD2WZuXfKf7/C0jC/ie9m2+0CttNu9tMciGM\nEyKG1/Xhk5iIWO43m4SrrT2WkFlcZ1z2JSf9Pjm4C2+HovYpihwwdM/OdP8Xmsnr\nDzVB4YvQiW+IHBjStHVuyiZWc+JsgEPJzisNY0Wyc/kNyNtqVKpX6dRhMLanLmy+\nf53cCSI05KPQAcGj6tdL+D60uKDkt+FsDa0BTAobZ31OsFVid0vCXtsbplNhW1IF\nHwsGXBTVcfXg44RLyL8Lk/2dQxDHNHzAUslJXzPxaHBLmt++2COa2EI1iWlvtznk\nOk9WP8SOAIj+xdqoiHcC4j72BOVVgiITIJNHrbppZCq6qPR+fgXmXa+sDcGh30m6\n9Wpbr28kLMSHiENCWTdsFij+NQTd5S47H7XTROHnalYDuF1RpS+DpQidT5tUimaT\nJZDr++FjKrnnijbyNF8b98UCAwEAAQ==
616db30d:MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAnpUpyWDWjlUk3smlWeA0\nlIMW+oJ38t92CRLHH3IqRhyECBRW0d0aRGtq7TY8PmxjjvBZrxTNDpJT6KUk4LRm\na6A6IuAI7QnNK8SJqM0DLzlpygd7GJf8ZL9SoHSH+gFsYF67Cpooz/YDqWrlN7Vw\ntO00s0B+eXy+PCXYU7VSfuWFGK8TGEv6HfGMALLjhqMManyvfp8hz3ubN1rK3c8C\nUS/ilRh1qckdbtPvoDPhSbTDmfU1g/EfRSIEXBrIMLg9ka/XB9PvWRrekrppnQzP\nhP9YE3x/wbFc5QqQWiRCYyQl/rgIMOXvIxhkfe8H5n1Et4VAorkpEAXdsfN8KSVv\nLSMazVlLp9GYq5SUpqYX3KnxdWBgN7BJoZ4sltsTpHQ/34SXWfu3UmyUveWj7wp0\nx9hwsPirVI00EEea9AbP7NM2rAyu6ukcm4m6ATd2DZJIViq2es6m60AE6SMCmrQF\nwmk4H/kdQgeAELVfGOm2VyJ3z69fQuywz7xu27S6zTKi05Qlnohxol4wVb6OB7qG\nLPRtK9ObgzRo/OPumyXqlzAi/Yvyd1ZQk8labZps3e16bQp8+pVPiumWioMFJDWV\nGZjCmyMSU8V6MB6njbgLHoyg2LCukCAeSjbPGGGYhnKLm1AKSoJh3IpZuqcKCk5C\n8CM1S15HxV78s9dFntEqIokCAwEAAQ==
66ba20fe:MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAtfB12w4ZgqsXWZDfUAV/\n6Y4aHUKIu3q4SXrNZ7CXF9nXoAVYrS7NAxJdAodsY3vPCN0g5O8DFXR+390LdOuQ\n+HsGKCc1k5tX5ZXld37EZNTNSbR0k+NKhd9h6X3u6wqPOx7SIKxwAQR8qeeFq4pP\nrt9GAGlxtuYgzIIcKJPwE0dZlcBCg+GnptCUZXp/38BP1eYC+xTXSL6Muq1etYfg\nodXdb7Yl+2h1IHuOwo5rjgY5kpY7GcAs8AjGk3lDD/av60OTYccknH0NCVSmPoXK\nvrxDBOn0LQRNBLcAfnTKgHrzy0Q5h4TNkkyTgxkoQw5ObDk9nnabTxql732yy9BY\ns+hM9+dSFO1HKeVXreYSA2n1ndF18YAvAumzgyqzB7I4pMHXq1kC/8bONMJxwSkS\nYm6CoXKyavp7RqGMyeVpRC7tV+blkrrUml0BwNkxE+XnwDRB3xDV6hqgWe0XrifD\nYTfvd9ScZQP83ip0r4IKlq4GMv/R5shcCRJSkSZ6QSGshH40JYSoiwJf5FHbj9ND\n7do0UAqebWo4yNx63j/wb2ULorW3AClv0BCFSdPsIrCStiGdpgJDBR2P2NZOCob3\nG9uMj+wJD6JJg2nWqNJxkANXX37Qf8plgzssrhrgOvB0fjjS7GYhfkfmZTJ0wPOw\nA8+KzFseBh4UFGgue78KwgkCAwEAAQ==
'
__Keyring=
__KeyringFile="/usr/share/keyrings/ubuntu-archive-keyring.gpg"
__SkipSigCheck=0
__SkipEmulation=0
__UseMirror=0
__UnprocessedBuildArgs=
while :; do
if [[ "$#" -le 0 ]]; then
break
fi
lowerI="$(echo "$1" | tr "[:upper:]" "[:lower:]")"
case $lowerI in
-\?|-h|--help)
usage
;;
arm)
__BuildArch=arm
__UbuntuArch=armhf
__AlpineArch=armv7
__QEMUArch=arm
;;
arm64)
__BuildArch=arm64
__UbuntuArch=arm64
__AlpineArch=aarch64
__QEMUArch=aarch64
__FreeBSDArch=arm64
__FreeBSDMachineArch=aarch64
;;
armel)
__BuildArch=armel
__UbuntuArch=armel
__UbuntuRepo="http://archive.debian.org/debian/"
__CodeName=buster
__KeyringFile="/usr/share/keyrings/debian-archive-keyring.gpg"
__LLDB_Package="liblldb-6.0-dev"
__UbuntuPackages="${__UbuntuPackages// libomp-dev/}"
__UbuntuPackages="${__UbuntuPackages// libomp5/}"
__UbuntuSuites=
;;
armv6)
__BuildArch=armv6
__UbuntuArch=armhf
__QEMUArch=arm
__UbuntuRepo="http://raspbian.raspberrypi.org/raspbian/"
__CodeName=buster
__KeyringFile="/usr/share/keyrings/raspbian-archive-keyring.gpg"
__LLDB_Package="liblldb-6.0-dev"
__UbuntuSuites=
if [[ -e "$__KeyringFile" ]]; then
__Keyring="--keyring $__KeyringFile"
fi
;;
loongarch64)
__BuildArch=loongarch64
__AlpineArch=loongarch64
__QEMUArch=loongarch64
__UbuntuArch=loong64
__UbuntuSuites=unreleased
__LLDB_Package="liblldb-19-dev"
if [[ "$__CodeName" == "sid" ]]; then
__UbuntuRepo="http://ftp.ports.debian.org/debian-ports/"
fi
;;
riscv64)
__BuildArch=riscv64
__AlpineArch=riscv64
__AlpinePackages="${__AlpinePackages// lldb-dev/}"
__QEMUArch=riscv64
__UbuntuArch=riscv64
__UbuntuPackages="${__UbuntuPackages// libunwind8-dev/}"
unset __LLDB_Package
;;
ppc64le)
__BuildArch=ppc64le
__AlpineArch=ppc64le
__QEMUArch=ppc64le
__UbuntuArch=ppc64el
__UbuntuRepo="http://ports.ubuntu.com/ubuntu-ports/"
__UbuntuPackages="${__UbuntuPackages// libunwind8-dev/}"
__UbuntuPackages="${__UbuntuPackages// libomp-dev/}"
__UbuntuPackages="${__UbuntuPackages// libomp5/}"
unset __LLDB_Package
;;
s390x)
__BuildArch=s390x
__AlpineArch=s390x
__QEMUArch=s390x
__UbuntuArch=s390x
__UbuntuRepo="http://ports.ubuntu.com/ubuntu-ports/"
__UbuntuPackages="${__UbuntuPackages// libunwind8-dev/}"
__UbuntuPackages="${__UbuntuPackages// libomp-dev/}"
__UbuntuPackages="${__UbuntuPackages// libomp5/}"
unset __LLDB_Package
;;
x64)
__BuildArch=x64
__AlpineArch=x86_64
__UbuntuArch=amd64
__FreeBSDArch=amd64
__FreeBSDMachineArch=amd64
__illumosArch=x86_64
__HaikuArch=x86_64
__UbuntuRepo="http://archive.ubuntu.com/ubuntu/"
;;
x86)
__BuildArch=x86
__UbuntuArch=i386
__AlpineArch=x86
__UbuntuRepo="http://archive.ubuntu.com/ubuntu/"
;;
lldb*)
version="$(echo "$lowerI" | tr -d '[:alpha:]-=')"
majorVersion="${version%%.*}"
[ -z "${version##*.*}" ] && minorVersion="${version#*.}"
if [ -z "$minorVersion" ]; then
minorVersion=0
fi
# for versions > 6.0, lldb has dropped the minor version
if [ "$majorVersion" -le 6 ]; then
version="$majorVersion.$minorVersion"
else
version="$majorVersion"
fi
__LLDB_Package="liblldb-${version}-dev"
;;
no-lldb)
unset __LLDB_Package
;;
llvm*)
version="$(echo "$lowerI" | tr -d '[:alpha:]-=')"
__LLVM_MajorVersion="${version%%.*}"
[ -z "${version##*.*}" ] && __LLVM_MinorVersion="${version#*.}"
if [ -z "$__LLVM_MinorVersion" ]; then
__LLVM_MinorVersion=0
fi
# for versions > 6.0, lldb has dropped the minor version
if [ "$__LLVM_MajorVersion" -gt 6 ]; then
__LLVM_MinorVersion=
fi
;;
xenial) # Ubuntu 16.04
__CodeName=xenial
;;
bionic) # Ubuntu 18.04
__CodeName=bionic
;;
focal) # Ubuntu 20.04
__CodeName=focal
;;
jammy) # Ubuntu 22.04
__CodeName=jammy
;;
noble) # Ubuntu 24.04
__CodeName=noble
if [[ -z "$__LLDB_Package" ]]; then
__LLDB_Package="liblldb-19-dev"
fi
;;
stretch) # Debian 9
__CodeName=stretch
__LLDB_Package="liblldb-6.0-dev"
__KeyringFile="/usr/share/keyrings/debian-archive-keyring.gpg"
if [[ -z "$__UbuntuRepo" ]]; then
__UbuntuRepo="http://ftp.debian.org/debian/"
fi
;;
buster) # Debian 10
__CodeName=buster
__LLDB_Package="liblldb-6.0-dev"
__KeyringFile="/usr/share/keyrings/debian-archive-keyring.gpg"
if [[ -z "$__UbuntuRepo" ]]; then
__UbuntuRepo="http://archive.debian.org/debian/"
fi
;;
bullseye) # Debian 11
__CodeName=bullseye
__KeyringFile="/usr/share/keyrings/debian-archive-keyring.gpg"
if [[ -z "$__UbuntuRepo" ]]; then
__UbuntuRepo="http://ftp.debian.org/debian/"
fi
;;
bookworm) # Debian 12
__CodeName=bookworm
__KeyringFile="/usr/share/keyrings/debian-archive-keyring.gpg"
if [[ -z "$__UbuntuRepo" ]]; then
__UbuntuRepo="http://ftp.debian.org/debian/"
fi
;;
sid) # Debian sid
__CodeName=sid
__UbuntuSuites=
# Debian-Ports architectures need different values
case "$__UbuntuArch" in
amd64|arm64|armel|armhf|i386|mips64el|ppc64el|riscv64|s390x)
__KeyringFile="/usr/share/keyrings/debian-archive-keyring.gpg"
if [[ -z "$__UbuntuRepo" ]]; then
__UbuntuRepo="http://ftp.debian.org/debian/"
fi
;;
*)
__KeyringFile="/usr/share/keyrings/debian-ports-archive-keyring.gpg"
if [[ -z "$__UbuntuRepo" ]]; then
__UbuntuRepo="http://ftp.ports.debian.org/debian-ports/"
fi
;;
esac
if [[ -e "$__KeyringFile" ]]; then
__Keyring="--keyring $__KeyringFile"
fi
;;
tizen)
__CodeName=
__UbuntuRepo=
__Tizen=tizen
;;
alpine*)
__CodeName=alpine
__UbuntuRepo=
if [[ "$lowerI" == "alpineedge" ]]; then
__AlpineVersion=edge
else
version="$(echo "$lowerI" | tr -d '[:alpha:]-=')"
__AlpineMajorVersion="${version%%.*}"
__AlpineMinorVersion="${version#*.}"
__AlpineVersion="$__AlpineMajorVersion.$__AlpineMinorVersion"
fi
;;
freebsd13)
__CodeName=freebsd
__SkipUnmount=1
;;
freebsd14)
__CodeName=freebsd
__FreeBSDBase="14.2-RELEASE"
__FreeBSDABI="14"
__SkipUnmount=1
;;
illumos)
__CodeName=illumos
__SkipUnmount=1
;;
haiku)
__CodeName=haiku
__SkipUnmount=1
;;
--skipunmount)
__SkipUnmount=1
;;
--skipsigcheck)
__SkipSigCheck=1
;;
--skipemulation)
__SkipEmulation=1
;;
--rootfsdir|-rootfsdir)
shift
__RootfsDir="$1"
;;
--use-mirror)
__UseMirror=1
;;
--use-jobs)
shift
MAXJOBS=$1
;;
*)
__UnprocessedBuildArgs="$__UnprocessedBuildArgs $1"
;;
esac
shift
done
case "$__AlpineVersion" in
3.14) __AlpinePackages+=" llvm11-libs" ;;
3.15) __AlpinePackages+=" llvm12-libs" ;;
3.16) __AlpinePackages+=" llvm13-libs" ;;
3.17) __AlpinePackages+=" llvm15-libs" ;;
edge) __AlpineLlvmLibsLookup=1 ;;
*)
if [[ "$__AlpineArch" =~ s390x|ppc64le ]]; then
__AlpineVersion=3.15 # minimum version that supports lldb-dev
__AlpinePackages+=" llvm12-libs"
elif [[ "$__AlpineArch" == "x86" ]]; then
__AlpineVersion=3.17 # minimum version that supports lldb-dev
__AlpinePackages+=" llvm15-libs"
elif [[ "$__AlpineArch" == "riscv64" || "$__AlpineArch" == "loongarch64" ]]; then
__AlpineVersion=3.21 # minimum version that supports lldb-dev
__AlpinePackages+=" llvm19-libs"
elif [[ -n "$__AlpineMajorVersion" ]]; then
# use whichever alpine version is provided and select the latest toolchain libs
__AlpineLlvmLibsLookup=1
else
__AlpineVersion=3.13 # 3.13 to maximize compatibility
__AlpinePackages+=" llvm10-libs"
fi
esac
if [[ "$__AlpineVersion" =~ 3\.1[345] ]]; then
# compiler-rt--static was merged in compiler-rt package in alpine 3.16
# for older versions, we need compiler-rt--static, so replace the name
__AlpinePackages="${__AlpinePackages/compiler-rt/compiler-rt-static}"
fi
__UbuntuPackages+=" ${__LLDB_Package:-}"
if [[ -z "$__UbuntuRepo" ]]; then
__UbuntuRepo="http://ports.ubuntu.com/"
fi
if [[ -n "$__LLVM_MajorVersion" ]]; then
__UbuntuPackages+=" libclang-common-${__LLVM_MajorVersion}${__LLVM_MinorVersion:+.$__LLVM_MinorVersion}-dev"
fi
if [[ -z "$__RootfsDir" && -n "$ROOTFS_DIR" ]]; then
__RootfsDir="$ROOTFS_DIR"
fi
if [[ -z "$__RootfsDir" ]]; then
__RootfsDir="$__CrossDir/../../../.tools/rootfs/$__BuildArch"
fi
if [[ -d "$__RootfsDir" ]]; then
if [[ "$__SkipUnmount" == "0" ]]; then
umount "$__RootfsDir"/* || true
fi
rm -rf "$__RootfsDir"
fi
mkdir -p "$__RootfsDir"
__RootfsDir="$( cd "$__RootfsDir" && pwd )"
__hasWget=
ensureDownloadTool()
{
if command -v wget &> /dev/null; then
__hasWget=1
elif command -v curl &> /dev/null; then
__hasWget=0
else
>&2 echo "ERROR: either wget or curl is required by this script."
exit 1
fi
}
if [[ "$__CodeName" == "alpine" ]]; then
__ApkToolsVersion=2.12.11
__ApkToolsDir="$(mktemp -d)"
__ApkKeysDir="$(mktemp -d)"
arch="$(uname -m)"
ensureDownloadTool
if [[ "$__hasWget" == 1 ]]; then
wget -P "$__ApkToolsDir" "https://gitlab.alpinelinux.org/api/v4/projects/5/packages/generic/v$__ApkToolsVersion/$arch/apk.static"
else
curl -SLO --create-dirs --output-dir "$__ApkToolsDir" "https://gitlab.alpinelinux.org/api/v4/projects/5/packages/generic/v$__ApkToolsVersion/$arch/apk.static"
fi
if [[ "$arch" == "x86_64" ]]; then
__ApkToolsSHA512SUM="53e57b49230da07ef44ee0765b9592580308c407a8d4da7125550957bb72cb59638e04f8892a18b584451c8d841d1c7cb0f0ab680cc323a3015776affaa3be33"
elif [[ "$arch" == "aarch64" ]]; then
__ApkToolsSHA512SUM="9e2b37ecb2b56c05dad23d379be84fd494c14bd730b620d0d576bda760588e1f2f59a7fcb2f2080577e0085f23a0ca8eadd993b4e61c2ab29549fdb71969afd0"
else
echo "WARNING: add missing hash for your host architecture. To find the value, use: 'find /tmp -name apk.static -exec sha512sum {} \;'"
fi
echo "$__ApkToolsSHA512SUM $__ApkToolsDir/apk.static" | sha512sum -c
chmod +x "$__ApkToolsDir/apk.static"
if [[ "$__AlpineVersion" == "edge" ]]; then
version=edge
else
version="v$__AlpineVersion"
fi
for line in $__AlpineKeys; do
id="${line%%:*}"
content="${line#*:}"
echo -e "-----BEGIN PUBLIC KEY-----\n$content\n-----END PUBLIC KEY-----" > "$__ApkKeysDir/alpine-devel@lists.alpinelinux.org-$id.rsa.pub"
done
if [[ "$__SkipSigCheck" == "1" ]]; then
__ApkSignatureArg="--allow-untrusted"
else
__ApkSignatureArg="--keys-dir $__ApkKeysDir"
fi
if [[ "$__SkipEmulation" == "1" ]]; then
__NoEmulationArg="--no-scripts"
fi
# initialize DB
# shellcheck disable=SC2086
"$__ApkToolsDir/apk.static" \
-X "http://dl-cdn.alpinelinux.org/alpine/$version/main" \
-X "http://dl-cdn.alpinelinux.org/alpine/$version/community" \
-U $__ApkSignatureArg --root "$__RootfsDir" --arch "$__AlpineArch" --initdb add
if [[ "$__AlpineLlvmLibsLookup" == 1 ]]; then
# shellcheck disable=SC2086
__AlpinePackages+=" $("$__ApkToolsDir/apk.static" \
-X "http://dl-cdn.alpinelinux.org/alpine/$version/main" \
-X "http://dl-cdn.alpinelinux.org/alpine/$version/community" \
-U $__ApkSignatureArg --root "$__RootfsDir" --arch "$__AlpineArch" \
search 'llvm*-libs' | grep -E '^llvm' | sort | tail -1 | sed 's/-[^-]*//2g')"
fi
# install all packages in one go
# shellcheck disable=SC2086
"$__ApkToolsDir/apk.static" \
-X "http://dl-cdn.alpinelinux.org/alpine/$version/main" \
-X "http://dl-cdn.alpinelinux.org/alpine/$version/community" \
-U $__ApkSignatureArg --root "$__RootfsDir" --arch "$__AlpineArch" $__NoEmulationArg \
add $__AlpinePackages
rm -r "$__ApkToolsDir"
elif [[ "$__CodeName" == "freebsd" ]]; then
mkdir -p "$__RootfsDir"/usr/local/etc
JOBS=${MAXJOBS:="$(getconf _NPROCESSORS_ONLN)"}
ensureDownloadTool
if [[ "$__hasWget" == 1 ]]; then
wget -O- "https://download.freebsd.org/ftp/releases/${__FreeBSDArch}/${__FreeBSDMachineArch}/${__FreeBSDBase}/base.txz" | tar -C "$__RootfsDir" -Jxf - ./lib ./usr/lib ./usr/libdata ./usr/include ./usr/share/keys ./etc ./bin/freebsd-version
else
curl -SL "https://download.freebsd.org/ftp/releases/${__FreeBSDArch}/${__FreeBSDMachineArch}/${__FreeBSDBase}/base.txz" | tar -C "$__RootfsDir" -Jxf - ./lib ./usr/lib ./usr/libdata ./usr/include ./usr/share/keys ./etc ./bin/freebsd-version
fi
echo "ABI = \"FreeBSD:${__FreeBSDABI}:${__FreeBSDMachineArch}\"; FINGERPRINTS = \"${__RootfsDir}/usr/share/keys\"; REPOS_DIR = [\"${__RootfsDir}/etc/pkg\"]; REPO_AUTOUPDATE = NO; RUN_SCRIPTS = NO;" > "${__RootfsDir}"/usr/local/etc/pkg.conf
echo "FreeBSD: { url: \"pkg+http://pkg.FreeBSD.org/\${ABI}/quarterly\", mirror_type: \"srv\", signature_type: \"fingerprints\", fingerprints: \"/usr/share/keys/pkg\", enabled: yes }" > "${__RootfsDir}"/etc/pkg/FreeBSD.conf
mkdir -p "$__RootfsDir"/tmp
# get and build package manager
if [[ "$__hasWget" == 1 ]]; then
wget -O- "https://github.com/freebsd/pkg/archive/${__FreeBSDPkg}.tar.gz" | tar -C "$__RootfsDir"/tmp -zxf -
else
curl -SL "https://github.com/freebsd/pkg/archive/${__FreeBSDPkg}.tar.gz" | tar -C "$__RootfsDir"/tmp -zxf -
fi
cd "$__RootfsDir/tmp/pkg-${__FreeBSDPkg}"
# needed for install to succeed
mkdir -p "$__RootfsDir"/host/etc
./autogen.sh && ./configure --prefix="$__RootfsDir"/host && make -j "$JOBS" && make install
rm -rf "$__RootfsDir/tmp/pkg-${__FreeBSDPkg}"
# install packages we need.
INSTALL_AS_USER=$(whoami) "$__RootfsDir"/host/sbin/pkg -r "$__RootfsDir" -C "$__RootfsDir"/usr/local/etc/pkg.conf update
# shellcheck disable=SC2086
INSTALL_AS_USER=$(whoami) "$__RootfsDir"/host/sbin/pkg -r "$__RootfsDir" -C "$__RootfsDir"/usr/local/etc/pkg.conf install --yes $__FreeBSDPackages
elif [[ "$__CodeName" == "illumos" ]]; then
mkdir "$__RootfsDir/tmp"
pushd "$__RootfsDir/tmp"
JOBS=${MAXJOBS:="$(getconf _NPROCESSORS_ONLN)"}
ensureDownloadTool
echo "Downloading sysroot."
if [[ "$__hasWget" == 1 ]]; then
wget -O- https://github.com/illumos/sysroot/releases/download/20181213-de6af22ae73b-v1/illumos-sysroot-i386-20181213-de6af22ae73b-v1.tar.gz | tar -C "$__RootfsDir" -xzf -
else
curl -SL https://github.com/illumos/sysroot/releases/download/20181213-de6af22ae73b-v1/illumos-sysroot-i386-20181213-de6af22ae73b-v1.tar.gz | tar -C "$__RootfsDir" -xzf -
fi
echo "Building binutils. Please wait.."
if [[ "$__hasWget" == 1 ]]; then
wget -O- https://ftp.gnu.org/gnu/binutils/binutils-2.42.tar.xz | tar -xJf -
else
curl -SL https://ftp.gnu.org/gnu/binutils/binutils-2.42.tar.xz | tar -xJf -
fi
mkdir build-binutils && cd build-binutils
../binutils-2.42/configure --prefix="$__RootfsDir" --target="${__illumosArch}-sun-solaris2.11" --program-prefix="${__illumosArch}-illumos-" --with-sysroot="$__RootfsDir"
make -j "$JOBS" && make install && cd ..
echo "Building gcc. Please wait.."
if [[ "$__hasWget" == 1 ]]; then
wget -O- https://ftp.gnu.org/gnu/gcc/gcc-13.3.0/gcc-13.3.0.tar.xz | tar -xJf -
else
curl -SL https://ftp.gnu.org/gnu/gcc/gcc-13.3.0/gcc-13.3.0.tar.xz | tar -xJf -
fi
CFLAGS="-fPIC"
CXXFLAGS="-fPIC"
CXXFLAGS_FOR_TARGET="-fPIC"
CFLAGS_FOR_TARGET="-fPIC"
export CFLAGS CXXFLAGS CXXFLAGS_FOR_TARGET CFLAGS_FOR_TARGET
mkdir build-gcc && cd build-gcc
../gcc-13.3.0/configure --prefix="$__RootfsDir" --target="${__illumosArch}-sun-solaris2.11" --program-prefix="${__illumosArch}-illumos-" --with-sysroot="$__RootfsDir" --with-gnu-as \
--with-gnu-ld --disable-nls --disable-libgomp --disable-libquadmath --disable-libssp --disable-libvtv --disable-libcilkrts --disable-libada --disable-libsanitizer \
--disable-libquadmath-support --disable-shared --enable-tls
make -j "$JOBS" && make install && cd ..
BaseUrl=https://pkgsrc.smartos.org
if [[ "$__UseMirror" == 1 ]]; then
BaseUrl=https://pkgsrc.smartos.skylime.net
fi
BaseUrl="$BaseUrl/packages/SmartOS/2019Q4/${__illumosArch}/All"
echo "Downloading manifest"
if [[ "$__hasWget" == 1 ]]; then
wget "$BaseUrl"
else
curl -SLO "$BaseUrl"
fi
echo "Downloading dependencies."
read -ra array <<<"$__IllumosPackages"
for package in "${array[@]}"; do
echo "Installing '$package'"
# find last occurrence of package in listing and extract its name
package="$(sed -En '/.*href="('"$package"'-[0-9].*).tgz".*/h;$!d;g;s//\1/p' All)"
echo "Resolved name '$package'"
if [[ "$__hasWget" == 1 ]]; then
wget "$BaseUrl"/"$package".tgz
else
curl -SLO "$BaseUrl"/"$package".tgz
fi
ar -x "$package".tgz
tar --skip-old-files -xzf "$package".tmp.tg* -C "$__RootfsDir" 2>/dev/null
done
echo "Cleaning up temporary files."
popd
rm -rf "$__RootfsDir"/{tmp,+*}
mkdir -p "$__RootfsDir"/usr/include/net
mkdir -p "$__RootfsDir"/usr/include/netpacket
if [[ "$__hasWget" == 1 ]]; then
wget -P "$__RootfsDir"/usr/include/net https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/io/bpf/net/bpf.h
wget -P "$__RootfsDir"/usr/include/net https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/io/bpf/net/dlt.h
wget -P "$__RootfsDir"/usr/include/netpacket https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/inet/sockmods/netpacket/packet.h
wget -P "$__RootfsDir"/usr/include/sys https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/sys/sdt.h
else
curl -SLO --create-dirs --output-dir "$__RootfsDir"/usr/include/net https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/io/bpf/net/bpf.h
curl -SLO --create-dirs --output-dir "$__RootfsDir"/usr/include/net https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/io/bpf/net/dlt.h
curl -SLO --create-dirs --output-dir "$__RootfsDir"/usr/include/netpacket https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/inet/sockmods/netpacket/packet.h
curl -SLO --create-dirs --output-dir "$__RootfsDir"/usr/include/sys https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/sys/sdt.h
fi
elif [[ "$__CodeName" == "haiku" ]]; then
JOBS=${MAXJOBS:="$(getconf _NPROCESSORS_ONLN)"}
echo "Building Haiku sysroot for $__HaikuArch"
mkdir -p "$__RootfsDir/tmp"
pushd "$__RootfsDir/tmp"
mkdir "$__RootfsDir/tmp/download"
ensureDownloadTool
echo "Downloading Haiku package tools"
git clone https://github.com/haiku/haiku-toolchains-ubuntu --depth 1 "$__RootfsDir/tmp/script"
if [[ "$__hasWget" == 1 ]]; then
wget -O "$__RootfsDir/tmp/download/hosttools.zip" "$("$__RootfsDir/tmp/script/fetch.sh" --hosttools)"
else
curl -SLo "$__RootfsDir/tmp/download/hosttools.zip" "$("$__RootfsDir/tmp/script/fetch.sh" --hosttools)"
fi
unzip -o "$__RootfsDir/tmp/download/hosttools.zip" -d "$__RootfsDir/tmp/bin"
HaikuBaseUrl="https://eu.hpkg.haiku-os.org/haiku/master/$__HaikuArch/current"
HaikuPortsBaseUrl="https://eu.hpkg.haiku-os.org/haikuports/master/$__HaikuArch/current"
echo "Downloading HaikuPorts package repository index..."
if [[ "$__hasWget" == 1 ]]; then
wget -P "$__RootfsDir/tmp/download" "$HaikuPortsBaseUrl/repo"
else
curl -SLO --create-dirs --output-dir "$__RootfsDir/tmp/download" "$HaikuPortsBaseUrl/repo"
fi
echo "Downloading Haiku packages"
read -ra array <<<"$__HaikuPackages"
for package in "${array[@]}"; do
echo "Downloading $package..."
hpkgFilename="$(LD_LIBRARY_PATH="$__RootfsDir/tmp/bin" "$__RootfsDir/tmp/bin/package_repo" list -f "$__RootfsDir/tmp/download/repo" |
grep -E "${package}-" | sort -V | tail -n 1 | xargs)"
if [ -z "$hpkgFilename" ]; then
>&2 echo "ERROR: package $package missing."
exit 1
fi
echo "Resolved filename: $hpkgFilename..."
hpkgDownloadUrl="$HaikuPortsBaseUrl/packages/$hpkgFilename"
if [[ "$__hasWget" == 1 ]]; then
wget -P "$__RootfsDir/tmp/download" "$hpkgDownloadUrl"
else
curl -SLO --create-dirs --output-dir "$__RootfsDir/tmp/download" "$hpkgDownloadUrl"
fi
done
for package in haiku haiku_devel; do
echo "Downloading $package..."
if [[ "$__hasWget" == 1 ]]; then
hpkgVersion="$(wget -qO- "$HaikuBaseUrl" | sed -n 's/^.*version: "\([^"]*\)".*$/\1/p')"
wget -P "$__RootfsDir/tmp/download" "$HaikuBaseUrl/packages/$package-$hpkgVersion-1-$__HaikuArch.hpkg"
else
hpkgVersion="$(curl -sSL "$HaikuBaseUrl" | sed -n 's/^.*version: "\([^"]*\)".*$/\1/p')"
curl -SLO --create-dirs --output-dir "$__RootfsDir/tmp/download" "$HaikuBaseUrl/packages/$package-$hpkgVersion-1-$__HaikuArch.hpkg"
fi
done
# Set up the sysroot
echo "Setting up sysroot and extracting required packages"
mkdir -p "$__RootfsDir/boot/system"
for file in "$__RootfsDir/tmp/download/"*.hpkg; do
echo "Extracting $file..."
LD_LIBRARY_PATH="$__RootfsDir/tmp/bin" "$__RootfsDir/tmp/bin/package" extract -C "$__RootfsDir/boot/system" "$file"
done
# Download buildtools
echo "Downloading Haiku buildtools"
if [[ "$__hasWget" == 1 ]]; then
wget -O "$__RootfsDir/tmp/download/buildtools.zip" "$("$__RootfsDir/tmp/script/fetch.sh" --buildtools --arch=$__HaikuArch)"
else
curl -SLo "$__RootfsDir/tmp/download/buildtools.zip" "$("$__RootfsDir/tmp/script/fetch.sh" --buildtools --arch=$__HaikuArch)"
fi
unzip -o "$__RootfsDir/tmp/download/buildtools.zip" -d "$__RootfsDir"
# Cleaning up temporary files
echo "Cleaning up temporary files"
popd
rm -rf "$__RootfsDir/tmp"
elif [[ -n "$__CodeName" ]]; then
__Suites="$__CodeName $(for suite in $__UbuntuSuites; do echo -n "$__CodeName-$suite "; done)"
if [[ "$__SkipEmulation" == "1" ]]; then
if [[ -z "$AR" ]]; then
if command -v ar &>/dev/null; then
AR="$(command -v ar)"
elif command -v llvm-ar &>/dev/null; then
AR="$(command -v llvm-ar)"
else
echo "Unable to find ar or llvm-ar on PATH, add them to PATH or set AR environment variable pointing to the available AR tool"
exit 1
fi
fi
PYTHON=${PYTHON_EXECUTABLE:-python3}
# shellcheck disable=SC2086,SC2046
echo running "$PYTHON" "$__CrossDir/install-debs.py" --arch "$__UbuntuArch" --mirror "$__UbuntuRepo" --rootfsdir "$__RootfsDir" --artool "$AR" \
$(for suite in $__Suites; do echo -n "--suite $suite "; done) \
$__UbuntuPackages
# shellcheck disable=SC2086,SC2046
"$PYTHON" "$__CrossDir/install-debs.py" --arch "$__UbuntuArch" --mirror "$__UbuntuRepo" --rootfsdir "$__RootfsDir" --artool "$AR" \
$(for suite in $__Suites; do echo -n "--suite $suite "; done) \
$__UbuntuPackages
exit 0
fi
__UpdateOptions=
if [[ "$__SkipSigCheck" == "0" ]]; then
__Keyring="$__Keyring --force-check-gpg"
else
__Keyring=
__UpdateOptions="--allow-unauthenticated --allow-insecure-repositories"
fi
# shellcheck disable=SC2086
echo running debootstrap "--variant=minbase" $__Keyring --arch "$__UbuntuArch" "$__CodeName" "$__RootfsDir" "$__UbuntuRepo"
# shellcheck disable=SC2086
if ! debootstrap "--variant=minbase" $__Keyring --arch "$__UbuntuArch" "$__CodeName" "$__RootfsDir" "$__UbuntuRepo"; then
echo "debootstrap failed! dumping debootstrap.log"
cat "$__RootfsDir/debootstrap/debootstrap.log"
exit 1
fi
rm -rf "$__RootfsDir"/etc/apt/*.{sources,list} "$__RootfsDir"/etc/apt/sources.list.d
mkdir -p "$__RootfsDir/etc/apt/sources.list.d/"
# shellcheck disable=SC2086
cat > "$__RootfsDir/etc/apt/sources.list.d/$__CodeName.sources" < token2) - (token1 < token2)
else:
return -1 if isinstance(token1, str) else 1
return len(tokens1) - len(tokens2)
def compare_debian_versions(version1, version2):
"""Compare two Debian package versions."""
epoch1, upstream1, revision1 = parse_debian_version(version1)
epoch2, upstream2, revision2 = parse_debian_version(version2)
if epoch1 != epoch2:
return epoch1 - epoch2
result = compare_upstream_version(upstream1, upstream2)
if result != 0:
return result
return compare_upstream_version(revision1, revision2)
def resolve_dependencies(packages, aliases, desired_packages):
"""Recursively resolves dependencies for the desired packages."""
resolved = []
to_process = deque(desired_packages)
while to_process:
current = to_process.popleft()
resolved_package = current if current in packages else aliases.get(current, [None])[0]
if not resolved_package:
print(f"Error: Package '{current}' was not found in the available packages.")
sys.exit(1)
if resolved_package not in resolved:
resolved.append(resolved_package)
deps = packages.get(resolved_package, {}).get("Depends", "")
if deps:
deps = [dep.split(' ')[0] for dep in deps.split(', ') if dep]
for dep in deps:
if dep not in resolved and dep not in to_process and dep in packages:
to_process.append(dep)
return resolved
def parse_package_index(content):
"""Parses the Packages.gz file and returns package information."""
packages = {}
aliases = {}
entries = re.split(r'\n\n+', content)
for entry in entries:
fields = dict(re.findall(r'^(\S+): (.+)$', entry, re.MULTILINE))
if "Package" in fields:
package_name = fields["Package"]
version = fields.get("Version")
filename = fields.get("Filename")
depends = fields.get("Depends")
provides = fields.get("Provides", None)
# Only update if package_name is not in packages or if the new version is higher
if package_name not in packages or compare_debian_versions(version, packages[package_name]["Version"]) > 0:
packages[package_name] = {
"Version": version,
"Filename": filename,
"Depends": depends
}
# Update aliases if package provides any alternatives
if provides:
provides_list = [x.strip() for x in provides.split(",")]
for alias in provides_list:
# Strip version specifiers
alias_name = re.sub(r'\s*\(=.*\)', '', alias)
if alias_name not in aliases:
aliases[alias_name] = []
if package_name not in aliases[alias_name]:
aliases[alias_name].append(package_name)
return packages, aliases
def install_packages(mirror, packages_info, aliases, tmp_dir, extract_dir, ar_tool, desired_packages):
"""Downloads .deb files and extracts them."""
resolved_packages = resolve_dependencies(packages_info, aliases, desired_packages)
print(f"Resolved packages (including dependencies): {resolved_packages}")
packages_to_download = {}
for pkg in resolved_packages:
if pkg in packages_info:
packages_to_download[pkg] = packages_info[pkg]
if pkg in aliases:
for alias in aliases[pkg]:
if alias in packages_info:
packages_to_download[alias] = packages_info[alias]
asyncio.run(download_deb_files_parallel(mirror, packages_to_download, tmp_dir))
package_to_deb_file_map = {}
for pkg in resolved_packages:
pkg_info = packages_info.get(pkg)
if pkg_info:
deb_filename = pkg_info.get("Filename")
if deb_filename:
deb_file_path = os.path.join(tmp_dir, os.path.basename(deb_filename))
package_to_deb_file_map[pkg] = deb_file_path
for pkg in reversed(resolved_packages):
deb_file = package_to_deb_file_map.get(pkg)
if deb_file and os.path.exists(deb_file):
extract_deb_file(deb_file, tmp_dir, extract_dir, ar_tool)
print("All done!")
def extract_deb_file(deb_file, tmp_dir, extract_dir, ar_tool):
"""Extract .deb file contents"""
os.makedirs(extract_dir, exist_ok=True)
with tempfile.TemporaryDirectory(dir=tmp_dir) as tmp_subdir:
result = subprocess.run(f"{ar_tool} t {os.path.abspath(deb_file)}", cwd=tmp_subdir, check=True, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
tar_filename = None
for line in result.stdout.decode().splitlines():
if line.startswith("data.tar"):
tar_filename = line.strip()
break
if not tar_filename:
raise FileNotFoundError(f"Could not find 'data.tar.*' in {deb_file}.")
tar_file_path = os.path.join(tmp_subdir, tar_filename)
print(f"Extracting {tar_filename} from {deb_file}..")
subprocess.run(f"{ar_tool} p {os.path.abspath(deb_file)} {tar_filename} > {tar_file_path}", check=True, shell=True)
file_extension = os.path.splitext(tar_file_path)[1].lower()
if file_extension == ".xz":
mode = "r:xz"
elif file_extension == ".gz":
mode = "r:gz"
elif file_extension == ".zst":
# zstd is not supported by standard library yet
decompressed_tar_path = tar_file_path.replace(".zst", "")
with open(tar_file_path, "rb") as zst_file, open(decompressed_tar_path, "wb") as decompressed_file:
dctx = zstandard.ZstdDecompressor()
dctx.copy_stream(zst_file, decompressed_file)
tar_file_path = decompressed_tar_path
mode = "r"
else:
raise ValueError(f"Unsupported compression format: {file_extension}")
with tarfile.open(tar_file_path, mode) as tar:
tar.extractall(path=extract_dir, filter='fully_trusted')
def finalize_setup(rootfsdir):
lib_dir = os.path.join(rootfsdir, 'lib')
usr_lib_dir = os.path.join(rootfsdir, 'usr', 'lib')
if os.path.exists(lib_dir):
if os.path.islink(lib_dir):
os.remove(lib_dir)
else:
os.makedirs(usr_lib_dir, exist_ok=True)
for item in os.listdir(lib_dir):
src = os.path.join(lib_dir, item)
dest = os.path.join(usr_lib_dir, item)
if os.path.isdir(src):
shutil.copytree(src, dest, dirs_exist_ok=True)
else:
shutil.copy2(src, dest)
shutil.rmtree(lib_dir)
os.symlink(usr_lib_dir, lib_dir)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Generate rootfs for .NET runtime on Debian-like OS")
parser.add_argument("--distro", required=False, help="Distro name (e.g., debian, ubuntu, etc.)")
parser.add_argument("--arch", required=True, help="Architecture (e.g., amd64, loong64, etc.)")
parser.add_argument("--rootfsdir", required=True, help="Destination directory.")
parser.add_argument('--suite', required=True, action='append', help='Specify one or more repository suites to collect index data.')
parser.add_argument("--mirror", required=False, help="Mirror (e.g., http://ftp.debian.org/debian-ports etc.)")
parser.add_argument("--artool", required=False, default="ar", help="ar tool to extract debs (e.g., ar, llvm-ar etc.)")
parser.add_argument("packages", nargs="+", help="List of package names to be installed.")
args = parser.parse_args()
if args.mirror is None:
if args.distro == "ubuntu":
args.mirror = "http://archive.ubuntu.com/ubuntu" if args.arch in ["amd64", "i386"] else "http://ports.ubuntu.com/ubuntu-ports"
elif args.distro == "debian":
args.mirror = "http://ftp.debian.org/debian-ports"
else:
raise Exception("Unsupported distro")
DESIRED_PACKAGES = args.packages + [ # base packages
"dpkg",
"busybox",
"libc-bin",
"base-files",
"base-passwd",
"debianutils"
]
print(f"Creating rootfs. rootfsdir: {args.rootfsdir}, distro: {args.distro}, arch: {args.arch}, suites: {args.suite}, mirror: {args.mirror}")
package_index_content = asyncio.run(download_package_index_parallel(args.mirror, args.arch, args.suite))
packages_info, aliases = parse_package_index(package_index_content)
with tempfile.TemporaryDirectory() as tmp_dir:
install_packages(args.mirror, packages_info, aliases, tmp_dir, args.rootfsdir, args.artool, DESIRED_PACKAGES)
finalize_setup(args.rootfsdir)
================================================
FILE: eng/common/cross/riscv64/tizen/tizen.patch
================================================
diff -u -r a/usr/lib/libc.so b/usr/lib/libc.so
--- a/usr/lib64/libc.so 2016-12-30 23:00:08.284951863 +0900
+++ b/usr/lib64/libc.so 2016-12-30 23:00:32.140951815 +0900
@@ -2,4 +2,4 @@
Use the shared library, but some functions are only in
the static library, so try that secondarily. */
OUTPUT_FORMAT(elf64-littleriscv)
-GROUP ( /lib64/libc.so.6 /usr/lib64/libc_nonshared.a AS_NEEDED ( /lib64/ld-linux-riscv64-lp64d.so.1 ) )
+GROUP ( libc.so.6 libc_nonshared.a AS_NEEDED ( ld-linux-riscv64-lp64d.so.1 ) )
================================================
FILE: eng/common/cross/tizen-build-rootfs.sh
================================================
#!/usr/bin/env bash
set -e
ARCH=$1
LINK_ARCH=$ARCH
case "$ARCH" in
arm)
TIZEN_ARCH="armv7hl"
;;
armel)
TIZEN_ARCH="armv7l"
LINK_ARCH="arm"
;;
arm64)
TIZEN_ARCH="aarch64"
;;
x86)
TIZEN_ARCH="i686"
;;
x64)
TIZEN_ARCH="x86_64"
LINK_ARCH="x86"
;;
riscv64)
TIZEN_ARCH="riscv64"
LINK_ARCH="riscv"
;;
*)
echo "Unsupported architecture for tizen: $ARCH"
exit 1
esac
__CrossDir=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
__TIZEN_CROSSDIR="$__CrossDir/${ARCH}/tizen"
if [[ -z "$ROOTFS_DIR" ]]; then
echo "ROOTFS_DIR is not defined."
exit 1;
fi
TIZEN_TMP_DIR=$ROOTFS_DIR/tizen_tmp
mkdir -p $TIZEN_TMP_DIR
# Download files
echo ">>Start downloading files"
VERBOSE=1 $__CrossDir/tizen-fetch.sh $TIZEN_TMP_DIR $TIZEN_ARCH
echo "<>Start constructing Tizen rootfs"
TIZEN_RPM_FILES=`ls $TIZEN_TMP_DIR/*.rpm`
cd $ROOTFS_DIR
for f in $TIZEN_RPM_FILES; do
rpm2cpio $f | cpio -idm --quiet
done
echo "<>Start configuring Tizen rootfs"
ln -sfn asm-${LINK_ARCH} ./usr/include/asm
patch -p1 < $__TIZEN_CROSSDIR/tizen.patch
if [[ "$TIZEN_ARCH" == "riscv64" ]]; then
echo "Fixing broken symlinks in $PWD"
rm ./usr/lib64/libresolv.so
ln -s ../../lib64/libresolv.so.2 ./usr/lib64/libresolv.so
rm ./usr/lib64/libpthread.so
ln -s ../../lib64/libpthread.so.0 ./usr/lib64/libpthread.so
rm ./usr/lib64/libdl.so
ln -s ../../lib64/libdl.so.2 ./usr/lib64/libdl.so
rm ./usr/lib64/libutil.so
ln -s ../../lib64/libutil.so.1 ./usr/lib64/libutil.so
rm ./usr/lib64/libm.so
ln -s ../../lib64/libm.so.6 ./usr/lib64/libm.so
rm ./usr/lib64/librt.so
ln -s ../../lib64/librt.so.1 ./usr/lib64/librt.so
rm ./lib/ld-linux-riscv64-lp64d.so.1
ln -s ../lib64/ld-linux-riscv64-lp64d.so.1 ./lib/ld-linux-riscv64-lp64d.so.1
fi
echo "</dev/null; then
VERBOSE=0
fi
Log()
{
if [ $VERBOSE -ge 1 ]; then
echo ${@:2}
fi
}
Inform()
{
Log 1 -e "\x1B[0;34m$@\x1B[m"
}
Debug()
{
Log 2 -e "\x1B[0;32m$@\x1B[m"
}
Error()
{
>&2 Log 0 -e "\x1B[0;31m$@\x1B[m"
}
Fetch()
{
URL=$1
FILE=$2
PROGRESS=$3
if [ $VERBOSE -ge 1 ] && [ $PROGRESS ]; then
CURL_OPT="--progress-bar"
else
CURL_OPT="--silent"
fi
curl $CURL_OPT $URL > $FILE
}
hash curl 2> /dev/null || { Error "Require 'curl' Aborting."; exit 1; }
hash xmllint 2> /dev/null || { Error "Require 'xmllint' Aborting."; exit 1; }
hash sha256sum 2> /dev/null || { Error "Require 'sha256sum' Aborting."; exit 1; }
TMPDIR=$1
if [ ! -d $TMPDIR ]; then
TMPDIR=./tizen_tmp
Debug "Create temporary directory : $TMPDIR"
mkdir -p $TMPDIR
fi
TIZEN_ARCH=$2
TIZEN_URL=http://download.tizen.org/snapshots/TIZEN/Tizen
BUILD_XML=build.xml
REPOMD_XML=repomd.xml
PRIMARY_XML=primary.xml
TARGET_URL="http://__not_initialized"
Xpath_get()
{
XPATH_RESULT=''
XPATH=$1
XML_FILE=$2
RESULT=$(xmllint --xpath $XPATH $XML_FILE)
if [[ -z ${RESULT// } ]]; then
Error "Can not find target from $XML_FILE"
Debug "Xpath = $XPATH"
exit 1
fi
XPATH_RESULT=$RESULT
}
fetch_tizen_pkgs_init()
{
TARGET=$1
PROFILE=$2
Debug "Initialize TARGET=$TARGET, PROFILE=$PROFILE"
TMP_PKG_DIR=$TMPDIR/tizen_${PROFILE}_pkgs
if [ -d $TMP_PKG_DIR ]; then rm -rf $TMP_PKG_DIR; fi
mkdir -p $TMP_PKG_DIR
PKG_URL=$TIZEN_URL/$PROFILE/latest
BUILD_XML_URL=$PKG_URL/$BUILD_XML
TMP_BUILD=$TMP_PKG_DIR/$BUILD_XML
TMP_REPOMD=$TMP_PKG_DIR/$REPOMD_XML
TMP_PRIMARY=$TMP_PKG_DIR/$PRIMARY_XML
TMP_PRIMARYGZ=${TMP_PRIMARY}.gz
Fetch $BUILD_XML_URL $TMP_BUILD
Debug "fetch $BUILD_XML_URL to $TMP_BUILD"
TARGET_XPATH="//build/buildtargets/buildtarget[@name=\"$TARGET\"]/repo[@type=\"binary\"]/text()"
Xpath_get $TARGET_XPATH $TMP_BUILD
TARGET_PATH=$XPATH_RESULT
TARGET_URL=$PKG_URL/$TARGET_PATH
REPOMD_URL=$TARGET_URL/repodata/repomd.xml
PRIMARY_XPATH='string(//*[local-name()="data"][@type="primary"]/*[local-name()="location"]/@href)'
Fetch $REPOMD_URL $TMP_REPOMD
Debug "fetch $REPOMD_URL to $TMP_REPOMD"
Xpath_get $PRIMARY_XPATH $TMP_REPOMD
PRIMARY_XML_PATH=$XPATH_RESULT
PRIMARY_URL=$TARGET_URL/$PRIMARY_XML_PATH
Fetch $PRIMARY_URL $TMP_PRIMARYGZ
Debug "fetch $PRIMARY_URL to $TMP_PRIMARYGZ"
gunzip $TMP_PRIMARYGZ
Debug "unzip $TMP_PRIMARYGZ to $TMP_PRIMARY"
}
fetch_tizen_pkgs()
{
ARCH=$1
PACKAGE_XPATH_TPL='string(//*[local-name()="metadata"]/*[local-name()="package"][*[local-name()="name"][text()="_PKG_"]][*[local-name()="arch"][text()="_ARCH_"]]/*[local-name()="location"]/@href)'
PACKAGE_CHECKSUM_XPATH_TPL='string(//*[local-name()="metadata"]/*[local-name()="package"][*[local-name()="name"][text()="_PKG_"]][*[local-name()="arch"][text()="_ARCH_"]]/*[local-name()="checksum"]/text())'
for pkg in ${@:2}
do
Inform "Fetching... $pkg"
XPATH=${PACKAGE_XPATH_TPL/_PKG_/$pkg}
XPATH=${XPATH/_ARCH_/$ARCH}
Xpath_get $XPATH $TMP_PRIMARY
PKG_PATH=$XPATH_RESULT
XPATH=${PACKAGE_CHECKSUM_XPATH_TPL/_PKG_/$pkg}
XPATH=${XPATH/_ARCH_/$ARCH}
Xpath_get $XPATH $TMP_PRIMARY
CHECKSUM=$XPATH_RESULT
PKG_URL=$TARGET_URL/$PKG_PATH
PKG_FILE=$(basename $PKG_PATH)
PKG_PATH=$TMPDIR/$PKG_FILE
Debug "Download $PKG_URL to $PKG_PATH"
Fetch $PKG_URL $PKG_PATH true
echo "$CHECKSUM $PKG_PATH" | sha256sum -c - > /dev/null
if [ $? -ne 0 ]; then
Error "Fail to fetch $PKG_URL to $PKG_PATH"
Debug "Checksum = $CHECKSUM"
exit 1
fi
done
}
BASE="Tizen-Base"
UNIFIED="Tizen-Unified"
Inform "Initialize ${TIZEN_ARCH} base"
fetch_tizen_pkgs_init standard $BASE
Inform "fetch common packages"
fetch_tizen_pkgs ${TIZEN_ARCH} gcc gcc-devel-static glibc glibc-devel libicu libicu-devel libatomic linux-glibc-devel keyutils keyutils-devel libkeyutils
Inform "fetch coreclr packages"
fetch_tizen_pkgs ${TIZEN_ARCH} libgcc libstdc++ libstdc++-devel libunwind libunwind-devel lttng-ust-devel lttng-ust userspace-rcu-devel userspace-rcu
if [ "$TIZEN_ARCH" != "riscv64" ]; then
fetch_tizen_pkgs ${TIZEN_ARCH} lldb lldb-devel
fi
Inform "fetch corefx packages"
fetch_tizen_pkgs ${TIZEN_ARCH} libcom_err libcom_err-devel zlib zlib-devel libopenssl11 libopenssl1.1-devel krb5 krb5-devel
Inform "Initialize standard unified"
fetch_tizen_pkgs_init standard $UNIFIED
Inform "fetch corefx packages"
fetch_tizen_pkgs ${TIZEN_ARCH} gssdp gssdp-devel tizen-release
================================================
FILE: eng/common/cross/toolchain.cmake
================================================
set(CROSS_ROOTFS $ENV{ROOTFS_DIR})
# reset platform variables (e.g. cmake 3.25 sets LINUX=1)
unset(LINUX)
unset(FREEBSD)
unset(ILLUMOS)
unset(ANDROID)
unset(TIZEN)
unset(HAIKU)
set(TARGET_ARCH_NAME $ENV{TARGET_BUILD_ARCH})
if(EXISTS ${CROSS_ROOTFS}/bin/freebsd-version)
set(CMAKE_SYSTEM_NAME FreeBSD)
set(FREEBSD 1)
elseif(EXISTS ${CROSS_ROOTFS}/usr/platform/i86pc)
set(CMAKE_SYSTEM_NAME SunOS)
set(ILLUMOS 1)
elseif(EXISTS ${CROSS_ROOTFS}/boot/system/develop/headers/config/HaikuConfig.h)
set(CMAKE_SYSTEM_NAME Haiku)
set(HAIKU 1)
else()
set(CMAKE_SYSTEM_NAME Linux)
set(LINUX 1)
endif()
set(CMAKE_SYSTEM_VERSION 1)
if(EXISTS ${CROSS_ROOTFS}/etc/tizen-release)
set(TIZEN 1)
elseif(EXISTS ${CROSS_ROOTFS}/android_platform)
set(ANDROID 1)
endif()
if(TARGET_ARCH_NAME STREQUAL "arm")
set(CMAKE_SYSTEM_PROCESSOR armv7l)
if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/armv7-alpine-linux-musleabihf)
set(TOOLCHAIN "armv7-alpine-linux-musleabihf")
elseif(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/armv6-alpine-linux-musleabihf)
set(TOOLCHAIN "armv6-alpine-linux-musleabihf")
else()
set(TOOLCHAIN "arm-linux-gnueabihf")
endif()
if(TIZEN)
set(TIZEN_TOOLCHAIN "armv7hl-tizen-linux-gnueabihf")
endif()
elseif(TARGET_ARCH_NAME STREQUAL "arm64")
set(CMAKE_SYSTEM_PROCESSOR aarch64)
if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/aarch64-alpine-linux-musl)
set(TOOLCHAIN "aarch64-alpine-linux-musl")
elseif(LINUX)
set(TOOLCHAIN "aarch64-linux-gnu")
if(TIZEN)
set(TIZEN_TOOLCHAIN "aarch64-tizen-linux-gnu")
endif()
elseif(FREEBSD)
set(triple "aarch64-unknown-freebsd12")
endif()
elseif(TARGET_ARCH_NAME STREQUAL "armel")
set(CMAKE_SYSTEM_PROCESSOR armv7l)
set(TOOLCHAIN "arm-linux-gnueabi")
if(TIZEN)
set(TIZEN_TOOLCHAIN "armv7l-tizen-linux-gnueabi")
endif()
elseif(TARGET_ARCH_NAME STREQUAL "armv6")
set(CMAKE_SYSTEM_PROCESSOR armv6l)
if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/armv6-alpine-linux-musleabihf)
set(TOOLCHAIN "armv6-alpine-linux-musleabihf")
else()
set(TOOLCHAIN "arm-linux-gnueabihf")
endif()
elseif(TARGET_ARCH_NAME STREQUAL "loongarch64")
set(CMAKE_SYSTEM_PROCESSOR "loongarch64")
if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/loongarch64-alpine-linux-musl)
set(TOOLCHAIN "loongarch64-alpine-linux-musl")
else()
set(TOOLCHAIN "loongarch64-linux-gnu")
endif()
elseif(TARGET_ARCH_NAME STREQUAL "ppc64le")
set(CMAKE_SYSTEM_PROCESSOR ppc64le)
if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/powerpc64le-alpine-linux-musl)
set(TOOLCHAIN "powerpc64le-alpine-linux-musl")
else()
set(TOOLCHAIN "powerpc64le-linux-gnu")
endif()
elseif(TARGET_ARCH_NAME STREQUAL "riscv64")
set(CMAKE_SYSTEM_PROCESSOR riscv64)
if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/riscv64-alpine-linux-musl)
set(TOOLCHAIN "riscv64-alpine-linux-musl")
else()
set(TOOLCHAIN "riscv64-linux-gnu")
if(TIZEN)
set(TIZEN_TOOLCHAIN "riscv64-tizen-linux-gnu")
endif()
endif()
elseif(TARGET_ARCH_NAME STREQUAL "s390x")
set(CMAKE_SYSTEM_PROCESSOR s390x)
if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/s390x-alpine-linux-musl)
set(TOOLCHAIN "s390x-alpine-linux-musl")
else()
set(TOOLCHAIN "s390x-linux-gnu")
endif()
elseif(TARGET_ARCH_NAME STREQUAL "x64")
set(CMAKE_SYSTEM_PROCESSOR x86_64)
if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/x86_64-alpine-linux-musl)
set(TOOLCHAIN "x86_64-alpine-linux-musl")
elseif(LINUX)
set(TOOLCHAIN "x86_64-linux-gnu")
if(TIZEN)
set(TIZEN_TOOLCHAIN "x86_64-tizen-linux-gnu")
endif()
elseif(FREEBSD)
set(triple "x86_64-unknown-freebsd12")
elseif(ILLUMOS)
set(TOOLCHAIN "x86_64-illumos")
elseif(HAIKU)
set(TOOLCHAIN "x86_64-unknown-haiku")
endif()
elseif(TARGET_ARCH_NAME STREQUAL "x86")
set(CMAKE_SYSTEM_PROCESSOR i686)
if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/i586-alpine-linux-musl)
set(TOOLCHAIN "i586-alpine-linux-musl")
else()
set(TOOLCHAIN "i686-linux-gnu")
endif()
if(TIZEN)
set(TIZEN_TOOLCHAIN "i586-tizen-linux-gnu")
endif()
else()
message(FATAL_ERROR "Arch is ${TARGET_ARCH_NAME}. Only arm, arm64, armel, armv6, loongarch64, ppc64le, riscv64, s390x, x64 and x86 are supported!")
endif()
if(DEFINED ENV{TOOLCHAIN})
set(TOOLCHAIN $ENV{TOOLCHAIN})
endif()
# Specify include paths
if(TIZEN)
function(find_toolchain_dir prefix)
# Dynamically find the version subdirectory
file(GLOB DIRECTORIES "${prefix}/*")
list(GET DIRECTORIES 0 FIRST_MATCH)
get_filename_component(TOOLCHAIN_VERSION ${FIRST_MATCH} NAME)
set(TIZEN_TOOLCHAIN_PATH "${prefix}/${TOOLCHAIN_VERSION}" PARENT_SCOPE)
endfunction()
if(TARGET_ARCH_NAME MATCHES "^(arm|armel|x86)$")
find_toolchain_dir("${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}")
else()
find_toolchain_dir("${CROSS_ROOTFS}/usr/lib64/gcc/${TIZEN_TOOLCHAIN}")
endif()
message(STATUS "TIZEN_TOOLCHAIN_PATH set to: ${TIZEN_TOOLCHAIN_PATH}")
include_directories(SYSTEM ${TIZEN_TOOLCHAIN_PATH}/include/c++)
include_directories(SYSTEM ${TIZEN_TOOLCHAIN_PATH}/include/c++/${TIZEN_TOOLCHAIN})
endif()
function(locate_toolchain_exec exec var)
set(TOOLSET_PREFIX ${TOOLCHAIN}-)
string(TOUPPER ${exec} EXEC_UPPERCASE)
if(NOT "$ENV{CLR_${EXEC_UPPERCASE}}" STREQUAL "")
set(${var} "$ENV{CLR_${EXEC_UPPERCASE}}" PARENT_SCOPE)
return()
endif()
find_program(EXEC_LOCATION_${exec}
NAMES
"${TOOLSET_PREFIX}${exec}${CLR_CMAKE_COMPILER_FILE_NAME_VERSION}"
"${TOOLSET_PREFIX}${exec}")
if (EXEC_LOCATION_${exec} STREQUAL "EXEC_LOCATION_${exec}-NOTFOUND")
message(FATAL_ERROR "Unable to find toolchain executable. Name: ${exec}, Prefix: ${TOOLSET_PREFIX}.")
endif()
set(${var} ${EXEC_LOCATION_${exec}} PARENT_SCOPE)
endfunction()
if(ANDROID)
if(TARGET_ARCH_NAME STREQUAL "arm")
set(ANDROID_ABI armeabi-v7a)
elseif(TARGET_ARCH_NAME STREQUAL "arm64")
set(ANDROID_ABI arm64-v8a)
endif()
# extract platform number required by the NDK's toolchain
file(READ "${CROSS_ROOTFS}/android_platform" RID_FILE_CONTENTS)
string(REPLACE "RID=" "" ANDROID_RID "${RID_FILE_CONTENTS}")
string(REGEX REPLACE ".*\\.([0-9]+)-.*" "\\1" ANDROID_PLATFORM "${ANDROID_RID}")
set(ANDROID_TOOLCHAIN clang)
set(FEATURE_EVENT_TRACE 0) # disable event trace as there is no lttng-ust package in termux repository
set(CMAKE_SYSTEM_LIBRARY_PATH "${CROSS_ROOTFS}/usr/lib")
set(CMAKE_SYSTEM_INCLUDE_PATH "${CROSS_ROOTFS}/usr/include")
# include official NDK toolchain script
include(${CROSS_ROOTFS}/../build/cmake/android.toolchain.cmake)
elseif(FREEBSD)
# we cross-compile by instructing clang
set(CMAKE_C_COMPILER_TARGET ${triple})
set(CMAKE_CXX_COMPILER_TARGET ${triple})
set(CMAKE_ASM_COMPILER_TARGET ${triple})
set(CMAKE_SYSROOT "${CROSS_ROOTFS}")
set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -fuse-ld=lld")
set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -fuse-ld=lld")
set(CMAKE_MODULE_LINKER_FLAGS "${CMAKE_MODULE_LINKER_FLAGS} -fuse-ld=lld")
elseif(ILLUMOS)
set(CMAKE_SYSROOT "${CROSS_ROOTFS}")
set(CMAKE_SYSTEM_PREFIX_PATH "${CROSS_ROOTFS}")
set(CMAKE_C_STANDARD_LIBRARIES "${CMAKE_C_STANDARD_LIBRARIES} -lssp")
set(CMAKE_CXX_STANDARD_LIBRARIES "${CMAKE_CXX_STANDARD_LIBRARIES} -lssp")
include_directories(SYSTEM ${CROSS_ROOTFS}/include)
locate_toolchain_exec(gcc CMAKE_C_COMPILER)
locate_toolchain_exec(g++ CMAKE_CXX_COMPILER)
elseif(HAIKU)
set(CMAKE_SYSROOT "${CROSS_ROOTFS}")
set(CMAKE_PROGRAM_PATH "${CMAKE_PROGRAM_PATH};${CROSS_ROOTFS}/cross-tools-x86_64/bin")
set(CMAKE_SYSTEM_PREFIX_PATH "${CROSS_ROOTFS}")
set(CMAKE_C_STANDARD_LIBRARIES "${CMAKE_C_STANDARD_LIBRARIES} -lssp")
set(CMAKE_CXX_STANDARD_LIBRARIES "${CMAKE_CXX_STANDARD_LIBRARIES} -lssp")
locate_toolchain_exec(gcc CMAKE_C_COMPILER)
locate_toolchain_exec(g++ CMAKE_CXX_COMPILER)
# let CMake set up the correct search paths
include(Platform/Haiku)
else()
set(CMAKE_SYSROOT "${CROSS_ROOTFS}")
set(CMAKE_C_COMPILER_EXTERNAL_TOOLCHAIN "${CROSS_ROOTFS}/usr")
set(CMAKE_CXX_COMPILER_EXTERNAL_TOOLCHAIN "${CROSS_ROOTFS}/usr")
set(CMAKE_ASM_COMPILER_EXTERNAL_TOOLCHAIN "${CROSS_ROOTFS}/usr")
endif()
# Specify link flags
function(add_toolchain_linker_flag Flag)
set(Config "${ARGV1}")
set(CONFIG_SUFFIX "")
if (NOT Config STREQUAL "")
set(CONFIG_SUFFIX "_${Config}")
endif()
set("CMAKE_EXE_LINKER_FLAGS${CONFIG_SUFFIX}_INIT" "${CMAKE_EXE_LINKER_FLAGS${CONFIG_SUFFIX}_INIT} ${Flag}" PARENT_SCOPE)
set("CMAKE_SHARED_LINKER_FLAGS${CONFIG_SUFFIX}_INIT" "${CMAKE_SHARED_LINKER_FLAGS${CONFIG_SUFFIX}_INIT} ${Flag}" PARENT_SCOPE)
endfunction()
if(LINUX)
add_toolchain_linker_flag("-Wl,--rpath-link=${CROSS_ROOTFS}/lib/${TOOLCHAIN}")
add_toolchain_linker_flag("-Wl,--rpath-link=${CROSS_ROOTFS}/usr/lib/${TOOLCHAIN}")
endif()
if(TARGET_ARCH_NAME MATCHES "^(arm|armel)$")
if(TIZEN)
add_toolchain_linker_flag("-B${TIZEN_TOOLCHAIN_PATH}")
add_toolchain_linker_flag("-L${CROSS_ROOTFS}/lib")
add_toolchain_linker_flag("-L${CROSS_ROOTFS}/usr/lib")
add_toolchain_linker_flag("-L${TIZEN_TOOLCHAIN_PATH}")
endif()
elseif(TARGET_ARCH_NAME MATCHES "^(arm64|x64|riscv64)$")
if(TIZEN)
add_toolchain_linker_flag("-B${TIZEN_TOOLCHAIN_PATH}")
add_toolchain_linker_flag("-L${CROSS_ROOTFS}/lib64")
add_toolchain_linker_flag("-L${CROSS_ROOTFS}/usr/lib64")
add_toolchain_linker_flag("-L${TIZEN_TOOLCHAIN_PATH}")
add_toolchain_linker_flag("-Wl,--rpath-link=${CROSS_ROOTFS}/lib64")
add_toolchain_linker_flag("-Wl,--rpath-link=${CROSS_ROOTFS}/usr/lib64")
add_toolchain_linker_flag("-Wl,--rpath-link=${TIZEN_TOOLCHAIN_PATH}")
endif()
elseif(TARGET_ARCH_NAME STREQUAL "s390x")
add_toolchain_linker_flag("--target=${TOOLCHAIN}")
elseif(TARGET_ARCH_NAME STREQUAL "x86")
if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/i586-alpine-linux-musl)
add_toolchain_linker_flag("--target=${TOOLCHAIN}")
add_toolchain_linker_flag("-Wl,--rpath-link=${CROSS_ROOTFS}/usr/lib/gcc/${TOOLCHAIN}")
endif()
add_toolchain_linker_flag(-m32)
if(TIZEN)
add_toolchain_linker_flag("-B${TIZEN_TOOLCHAIN_PATH}")
add_toolchain_linker_flag("-L${CROSS_ROOTFS}/lib")
add_toolchain_linker_flag("-L${CROSS_ROOTFS}/usr/lib")
add_toolchain_linker_flag("-L${TIZEN_TOOLCHAIN_PATH}")
endif()
elseif(ILLUMOS)
add_toolchain_linker_flag("-L${CROSS_ROOTFS}/lib/amd64")
add_toolchain_linker_flag("-L${CROSS_ROOTFS}/usr/amd64/lib")
elseif(HAIKU)
add_toolchain_linker_flag("-lnetwork")
add_toolchain_linker_flag("-lroot")
endif()
# Specify compile options
if((TARGET_ARCH_NAME MATCHES "^(arm|arm64|armel|armv6|loongarch64|ppc64le|riscv64|s390x|x64|x86)$" AND NOT ANDROID AND NOT FREEBSD) OR ILLUMOS OR HAIKU)
set(CMAKE_C_COMPILER_TARGET ${TOOLCHAIN})
set(CMAKE_CXX_COMPILER_TARGET ${TOOLCHAIN})
set(CMAKE_ASM_COMPILER_TARGET ${TOOLCHAIN})
endif()
if(TARGET_ARCH_NAME MATCHES "^(arm|armel)$")
add_compile_options(-mthumb)
if (NOT DEFINED CLR_ARM_FPU_TYPE)
set (CLR_ARM_FPU_TYPE vfpv3)
endif (NOT DEFINED CLR_ARM_FPU_TYPE)
add_compile_options (-mfpu=${CLR_ARM_FPU_TYPE})
if (NOT DEFINED CLR_ARM_FPU_CAPABILITY)
set (CLR_ARM_FPU_CAPABILITY 0x7)
endif (NOT DEFINED CLR_ARM_FPU_CAPABILITY)
add_definitions (-DCLR_ARM_FPU_CAPABILITY=${CLR_ARM_FPU_CAPABILITY})
# persist variables across multiple try_compile passes
list(APPEND CMAKE_TRY_COMPILE_PLATFORM_VARIABLES CLR_ARM_FPU_TYPE CLR_ARM_FPU_CAPABILITY)
if(TARGET_ARCH_NAME STREQUAL "armel")
add_compile_options(-mfloat-abi=softfp)
endif()
elseif(TARGET_ARCH_NAME STREQUAL "s390x")
add_compile_options("--target=${TOOLCHAIN}")
elseif(TARGET_ARCH_NAME STREQUAL "x86")
if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/i586-alpine-linux-musl)
add_compile_options(--target=${TOOLCHAIN})
endif()
add_compile_options(-m32)
add_compile_options(-Wno-error=unused-command-line-argument)
endif()
if(TIZEN)
if(TARGET_ARCH_NAME MATCHES "^(arm|armel|arm64|x86)$")
add_compile_options(-Wno-deprecated-declarations) # compile-time option
add_compile_options(-D__extern_always_inline=inline) # compile-time option
endif()
endif()
# Set LLDB include and library paths for builds that need lldb.
if(TARGET_ARCH_NAME MATCHES "^(arm|armel|x86)$")
if(TARGET_ARCH_NAME STREQUAL "x86")
set(LLVM_CROSS_DIR "$ENV{LLVM_CROSS_HOME}")
else() # arm/armel case
set(LLVM_CROSS_DIR "$ENV{LLVM_ARM_HOME}")
endif()
if(LLVM_CROSS_DIR)
set(WITH_LLDB_LIBS "${LLVM_CROSS_DIR}/lib/" CACHE STRING "")
set(WITH_LLDB_INCLUDES "${LLVM_CROSS_DIR}/include" CACHE STRING "")
set(LLDB_H "${WITH_LLDB_INCLUDES}" CACHE STRING "")
set(LLDB "${LLVM_CROSS_DIR}/lib/liblldb.so" CACHE STRING "")
else()
if(TARGET_ARCH_NAME STREQUAL "x86")
set(WITH_LLDB_LIBS "${CROSS_ROOTFS}/usr/lib/i386-linux-gnu" CACHE STRING "")
set(CHECK_LLVM_DIR "${CROSS_ROOTFS}/usr/lib/llvm-3.8/include")
if(EXISTS "${CHECK_LLVM_DIR}" AND IS_DIRECTORY "${CHECK_LLVM_DIR}")
set(WITH_LLDB_INCLUDES "${CHECK_LLVM_DIR}")
else()
set(WITH_LLDB_INCLUDES "${CROSS_ROOTFS}/usr/lib/llvm-3.6/include")
endif()
else() # arm/armel case
set(WITH_LLDB_LIBS "${CROSS_ROOTFS}/usr/lib/${TOOLCHAIN}" CACHE STRING "")
set(WITH_LLDB_INCLUDES "${CROSS_ROOTFS}/usr/lib/llvm-3.6/include" CACHE STRING "")
endif()
endif()
endif()
# Set C++ standard library options if specified
set(CLR_CMAKE_CXX_STANDARD_LIBRARY "" CACHE STRING "Standard library flavor to link against. Only supported with the Clang compiler.")
if (CLR_CMAKE_CXX_STANDARD_LIBRARY)
add_compile_options($<$:--stdlib=${CLR_CMAKE_CXX_STANDARD_LIBRARY}>)
add_link_options($<$:--stdlib=${CLR_CMAKE_CXX_STANDARD_LIBRARY}>)
endif()
option(CLR_CMAKE_CXX_STANDARD_LIBRARY_STATIC "Statically link against the C++ standard library" OFF)
if(CLR_CMAKE_CXX_STANDARD_LIBRARY_STATIC)
add_link_options($<$:-static-libstdc++>)
endif()
set(CLR_CMAKE_CXX_ABI_LIBRARY "" CACHE STRING "C++ ABI implementation library to link against. Only supported with the Clang compiler.")
if (CLR_CMAKE_CXX_ABI_LIBRARY)
# The user may specify the ABI library with the 'lib' prefix, like 'libstdc++'. Strip the prefix here so the linker finds the right library.
string(REGEX REPLACE "^lib(.+)" "\\1" CLR_CMAKE_CXX_ABI_LIBRARY ${CLR_CMAKE_CXX_ABI_LIBRARY})
# We need to specify this as a linker-backend option as Clang will filter this option out when linking to libc++.
add_link_options("LINKER:-l${CLR_CMAKE_CXX_ABI_LIBRARY}")
endif()
set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER)
set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY)
set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY)
set(CMAKE_FIND_ROOT_PATH_MODE_PACKAGE ONLY)
================================================
FILE: eng/common/darc-init.ps1
================================================
param (
$darcVersion = $null,
$versionEndpoint = 'https://maestro.dot.net/api/assets/darc-version?api-version=2020-02-20',
$verbosity = 'minimal',
$toolpath = $null
)
. $PSScriptRoot\tools.ps1
function InstallDarcCli ($darcVersion, $toolpath) {
$darcCliPackageName = 'microsoft.dotnet.darc'
$dotnetRoot = InitializeDotNetCli -install:$true
$dotnet = "$dotnetRoot\dotnet.exe"
$toolList = & "$dotnet" tool list -g
if ($toolList -like "*$darcCliPackageName*") {
& "$dotnet" tool uninstall $darcCliPackageName -g
}
# If the user didn't explicitly specify the darc version,
# query the Maestro API for the correct version of darc to install.
if (-not $darcVersion) {
$darcVersion = $(Invoke-WebRequest -Uri $versionEndpoint -UseBasicParsing).Content
}
$arcadeServicesSource = 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json'
Write-Host "Installing Darc CLI version $darcVersion..."
Write-Host 'You may need to restart your command window if this is the first dotnet tool you have installed.'
if (-not $toolpath) {
Write-Host "'$dotnet' tool install $darcCliPackageName --version $darcVersion --add-source '$arcadeServicesSource' -v $verbosity -g"
& "$dotnet" tool install $darcCliPackageName --version $darcVersion --add-source "$arcadeServicesSource" -v $verbosity -g
}else {
Write-Host "'$dotnet' tool install $darcCliPackageName --version $darcVersion --add-source '$arcadeServicesSource' -v $verbosity --tool-path '$toolpath'"
& "$dotnet" tool install $darcCliPackageName --version $darcVersion --add-source "$arcadeServicesSource" -v $verbosity --tool-path "$toolpath"
}
}
try {
InstallDarcCli $darcVersion $toolpath
}
catch {
Write-Host $_.ScriptStackTrace
Write-PipelineTelemetryError -Category 'Darc' -Message $_
ExitWithExitCode 1
}
================================================
FILE: eng/common/darc-init.sh
================================================
#!/usr/bin/env bash
source="${BASH_SOURCE[0]}"
darcVersion=''
versionEndpoint='https://maestro.dot.net/api/assets/darc-version?api-version=2020-02-20'
verbosity='minimal'
while [[ $# > 0 ]]; do
opt="$(echo "$1" | tr "[:upper:]" "[:lower:]")"
case "$opt" in
--darcversion)
darcVersion=$2
shift
;;
--versionendpoint)
versionEndpoint=$2
shift
;;
--verbosity)
verbosity=$2
shift
;;
--toolpath)
toolpath=$2
shift
;;
*)
echo "Invalid argument: $1"
usage
exit 1
;;
esac
shift
done
# resolve $source until the file is no longer a symlink
while [[ -h "$source" ]]; do
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
source="$(readlink "$source")"
# if $source was a relative symlink, we need to resolve it relative to the path where the
# symlink file was located
[[ $source != /* ]] && source="$scriptroot/$source"
done
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
. "$scriptroot/tools.sh"
if [ -z "$darcVersion" ]; then
darcVersion=$(curl -X GET "$versionEndpoint" -H "accept: text/plain")
fi
function InstallDarcCli {
local darc_cli_package_name="microsoft.dotnet.darc"
InitializeDotNetCli true
local dotnet_root=$_InitializeDotNetCli
if [ -z "$toolpath" ]; then
local tool_list=$($dotnet_root/dotnet tool list -g)
if [[ $tool_list = *$darc_cli_package_name* ]]; then
echo $($dotnet_root/dotnet tool uninstall $darc_cli_package_name -g)
fi
else
local tool_list=$($dotnet_root/dotnet tool list --tool-path "$toolpath")
if [[ $tool_list = *$darc_cli_package_name* ]]; then
echo $($dotnet_root/dotnet tool uninstall $darc_cli_package_name --tool-path "$toolpath")
fi
fi
local arcadeServicesSource="https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json"
echo "Installing Darc CLI version $darcVersion..."
echo "You may need to restart your command shell if this is the first dotnet tool you have installed."
if [ -z "$toolpath" ]; then
echo $($dotnet_root/dotnet tool install $darc_cli_package_name --version $darcVersion --add-source "$arcadeServicesSource" -v $verbosity -g)
else
echo $($dotnet_root/dotnet tool install $darc_cli_package_name --version $darcVersion --add-source "$arcadeServicesSource" -v $verbosity --tool-path "$toolpath")
fi
}
InstallDarcCli
================================================
FILE: eng/common/dotnet-install.cmd
================================================
@echo off
powershell -ExecutionPolicy ByPass -NoProfile -command "& """%~dp0dotnet-install.ps1""" %*"
================================================
FILE: eng/common/dotnet-install.ps1
================================================
[CmdletBinding(PositionalBinding=$false)]
Param(
[string] $verbosity = 'minimal',
[string] $architecture = '',
[string] $version = 'Latest',
[string] $runtime = 'dotnet',
[string] $RuntimeSourceFeed = '',
[string] $RuntimeSourceFeedKey = ''
)
. $PSScriptRoot\tools.ps1
$dotnetRoot = Join-Path $RepoRoot '.dotnet'
$installdir = $dotnetRoot
try {
if ($architecture -and $architecture.Trim() -eq 'x86') {
$installdir = Join-Path $installdir 'x86'
}
InstallDotNet $installdir $version $architecture $runtime $true -RuntimeSourceFeed $RuntimeSourceFeed -RuntimeSourceFeedKey $RuntimeSourceFeedKey
}
catch {
Write-Host $_.ScriptStackTrace
Write-PipelineTelemetryError -Category 'InitializeToolset' -Message $_
ExitWithExitCode 1
}
ExitWithExitCode 0
================================================
FILE: eng/common/dotnet-install.sh
================================================
#!/usr/bin/env bash
source="${BASH_SOURCE[0]}"
# resolve $source until the file is no longer a symlink
while [[ -h "$source" ]]; do
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
source="$(readlink "$source")"
# if $source was a relative symlink, we need to resolve it relative to the path where the
# symlink file was located
[[ $source != /* ]] && source="$scriptroot/$source"
done
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
. "$scriptroot/tools.sh"
version='Latest'
architecture=''
runtime='dotnet'
runtimeSourceFeed=''
runtimeSourceFeedKey=''
while [[ $# > 0 ]]; do
opt="$(echo "$1" | tr "[:upper:]" "[:lower:]")"
case "$opt" in
-version|-v)
shift
version="$1"
;;
-architecture|-a)
shift
architecture="$1"
;;
-runtime|-r)
shift
runtime="$1"
;;
-runtimesourcefeed)
shift
runtimeSourceFeed="$1"
;;
-runtimesourcefeedkey)
shift
runtimeSourceFeedKey="$1"
;;
*)
Write-PipelineTelemetryError -Category 'Build' -Message "Invalid argument: $1"
exit 1
;;
esac
shift
done
# Use uname to determine what the CPU is, see https://en.wikipedia.org/wiki/Uname#Examples
cpuname=$(uname -m)
case $cpuname in
arm64|aarch64)
buildarch=arm64
if [ "$(getconf LONG_BIT)" -lt 64 ]; then
# This is 32-bit OS running on 64-bit CPU (for example Raspberry Pi OS)
buildarch=arm
fi
;;
loongarch64)
buildarch=loongarch64
;;
amd64|x86_64)
buildarch=x64
;;
armv*l)
buildarch=arm
;;
i[3-6]86)
buildarch=x86
;;
riscv64)
buildarch=riscv64
;;
*)
echo "Unknown CPU $cpuname detected, treating it as x64"
buildarch=x64
;;
esac
dotnetRoot="${repo_root}.dotnet"
if [[ $architecture != "" ]] && [[ $architecture != $buildarch ]]; then
dotnetRoot="$dotnetRoot/$architecture"
fi
InstallDotNet "$dotnetRoot" $version "$architecture" $runtime true $runtimeSourceFeed $runtimeSourceFeedKey || {
local exit_code=$?
Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "dotnet-install.sh failed (exit code '$exit_code')." >&2
ExitWithExitCode $exit_code
}
ExitWithExitCode 0
================================================
FILE: eng/common/dotnet.cmd
================================================
@echo off
:: This script is used to install the .NET SDK.
:: It will also invoke the SDK with any provided arguments.
powershell -ExecutionPolicy ByPass -NoProfile -command "& """%~dp0dotnet.ps1""" %*"
exit /b %ErrorLevel%
================================================
FILE: eng/common/dotnet.ps1
================================================
# This script is used to install the .NET SDK.
# It will also invoke the SDK with any provided arguments.
. $PSScriptRoot\tools.ps1
$dotnetRoot = InitializeDotNetCli -install:$true
# Invoke acquired SDK with args if they are provided
if ($args.count -gt 0) {
$env:DOTNET_NOLOGO=1
& "$dotnetRoot\dotnet.exe" $args
}
================================================
FILE: eng/common/dotnet.sh
================================================
#!/usr/bin/env bash
# This script is used to install the .NET SDK.
# It will also invoke the SDK with any provided arguments.
source="${BASH_SOURCE[0]}"
# resolve $SOURCE until the file is no longer a symlink
while [[ -h $source ]]; do
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
source="$(readlink "$source")"
# if $source was a relative symlink, we need to resolve it relative to the path where the
# symlink file was located
[[ $source != /* ]] && source="$scriptroot/$source"
done
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
source $scriptroot/tools.sh
InitializeDotNetCli true # install
# Invoke acquired SDK with args if they are provided
if [[ $# > 0 ]]; then
__dotnetDir=${_InitializeDotNetCli}
dotnetPath=${__dotnetDir}/dotnet
${dotnetPath} "$@"
fi
================================================
FILE: eng/common/enable-cross-org-publishing.ps1
================================================
param(
[string] $token
)
. $PSScriptRoot\pipeline-logging-functions.ps1
# Write-PipelineSetVariable will no-op if a variable named $ci is not defined
# Since this script is only ever called in AzDO builds, just universally set it
$ci = $true
Write-PipelineSetVariable -Name 'VSS_NUGET_ACCESSTOKEN' -Value $token -IsMultiJobVariable $false
Write-PipelineSetVariable -Name 'VSS_NUGET_URI_PREFIXES' -Value 'https://dnceng.pkgs.visualstudio.com/;https://pkgs.dev.azure.com/dnceng/;https://devdiv.pkgs.visualstudio.com/;https://pkgs.dev.azure.com/devdiv/' -IsMultiJobVariable $false
================================================
FILE: eng/common/generate-locproject.ps1
================================================
Param(
[Parameter(Mandatory=$true)][string] $SourcesDirectory, # Directory where source files live; if using a Localize directory it should live in here
[string] $LanguageSet = 'VS_Main_Languages', # Language set to be used in the LocProject.json
[switch] $UseCheckedInLocProjectJson, # When set, generates a LocProject.json and compares it to one that already exists in the repo; otherwise just generates one
[switch] $CreateNeutralXlfs # Creates neutral xlf files. Only set to false when running locally
)
# Generates LocProject.json files for the OneLocBuild task. OneLocBuildTask is described here:
# https://ceapex.visualstudio.com/CEINTL/_wiki/wikis/CEINTL.wiki/107/Localization-with-OneLocBuild-Task
Set-StrictMode -Version 2.0
$ErrorActionPreference = "Stop"
. $PSScriptRoot\pipeline-logging-functions.ps1
$exclusionsFilePath = "$SourcesDirectory\eng\Localize\LocExclusions.json"
$exclusions = @{ Exclusions = @() }
if (Test-Path -Path $exclusionsFilePath)
{
$exclusions = Get-Content "$exclusionsFilePath" | ConvertFrom-Json
}
Push-Location "$SourcesDirectory" # push location for Resolve-Path -Relative to work
# Template files
$jsonFiles = @()
$jsonTemplateFiles = Get-ChildItem -Recurse -Path "$SourcesDirectory" | Where-Object { $_.FullName -Match "\.template\.config\\localize\\.+\.en\.json" } # .NET templating pattern
$jsonTemplateFiles | ForEach-Object {
$null = $_.Name -Match "(.+)\.[\w-]+\.json" # matches '[filename].[langcode].json
$destinationFile = "$($_.Directory.FullName)\$($Matches.1).json"
$jsonFiles += Copy-Item "$($_.FullName)" -Destination $destinationFile -PassThru
}
$jsonWinformsTemplateFiles = Get-ChildItem -Recurse -Path "$SourcesDirectory" | Where-Object { $_.FullName -Match "en\\strings\.json" } # current winforms pattern
$wxlFilesV3 = @()
$wxlFilesV5 = @()
$wxlFiles = Get-ChildItem -Recurse -Path "$SourcesDirectory" | Where-Object { $_.FullName -Match "\\.+\.wxl" -And -Not( $_.Directory.Name -Match "\d{4}" ) } # localized files live in four digit lang ID directories; this excludes them
if (-not $wxlFiles) {
$wxlEnFiles = Get-ChildItem -Recurse -Path "$SourcesDirectory" | Where-Object { $_.FullName -Match "\\1033\\.+\.wxl" } # pick up en files (1033 = en) specifically so we can copy them to use as the neutral xlf files
if ($wxlEnFiles) {
$wxlFiles = @()
$wxlEnFiles | ForEach-Object {
$destinationFile = "$($_.Directory.Parent.FullName)\$($_.Name)"
$content = Get-Content $_.FullName -Raw
# Split files on schema to select different parser settings in the generated project.
if ($content -like "*http://wixtoolset.org/schemas/v4/wxl*")
{
$wxlFilesV5 += Copy-Item $_.FullName -Destination $destinationFile -PassThru
}
elseif ($content -like "*http://schemas.microsoft.com/wix/2006/localization*")
{
$wxlFilesV3 += Copy-Item $_.FullName -Destination $destinationFile -PassThru
}
}
}
}
$macosHtmlEnFiles = Get-ChildItem -Recurse -Path "$SourcesDirectory" | Where-Object { $_.FullName -Match "en\.lproj\\.+\.html$" } # add installer HTML files
$macosHtmlFiles = @()
if ($macosHtmlEnFiles) {
$macosHtmlEnFiles | ForEach-Object {
$destinationFile = "$($_.Directory.Parent.FullName)\$($_.Name)"
$macosHtmlFiles += Copy-Item "$($_.FullName)" -Destination $destinationFile -PassThru
}
}
$xlfFiles = @()
$allXlfFiles = Get-ChildItem -Recurse -Path "$SourcesDirectory\*\*.xlf"
$langXlfFiles = @()
if ($allXlfFiles) {
$null = $allXlfFiles[0].FullName -Match "\.([\w-]+)\.xlf" # matches '[langcode].xlf'
$firstLangCode = $Matches.1
$langXlfFiles = Get-ChildItem -Recurse -Path "$SourcesDirectory\*\*.$firstLangCode.xlf"
}
$langXlfFiles | ForEach-Object {
$null = $_.Name -Match "(.+)\.[\w-]+\.xlf" # matches '[filename].[langcode].xlf
$destinationFile = "$($_.Directory.FullName)\$($Matches.1).xlf"
$xlfFiles += Copy-Item "$($_.FullName)" -Destination $destinationFile -PassThru
}
$locFiles = $jsonFiles + $jsonWinformsTemplateFiles + $xlfFiles
$locJson = @{
Projects = @(
@{
LanguageSet = $LanguageSet
LocItems = @(
$locFiles | ForEach-Object {
$outputPath = "$(($_.DirectoryName | Resolve-Path -Relative) + "\")"
$continue = $true
foreach ($exclusion in $exclusions.Exclusions) {
if ($_.FullName.Contains($exclusion))
{
$continue = $false
}
}
$sourceFile = ($_.FullName | Resolve-Path -Relative)
if (!$CreateNeutralXlfs -and $_.Extension -eq '.xlf') {
Remove-Item -Path $sourceFile
}
if ($continue)
{
if ($_.Directory.Name -eq 'en' -and $_.Extension -eq '.json') {
return @{
SourceFile = $sourceFile
CopyOption = "LangIDOnPath"
OutputPath = "$($_.Directory.Parent.FullName | Resolve-Path -Relative)\"
}
} else {
return @{
SourceFile = $sourceFile
CopyOption = "LangIDOnName"
OutputPath = $outputPath
}
}
}
}
)
},
@{
LanguageSet = $LanguageSet
CloneLanguageSet = "WiX_CloneLanguages"
LssFiles = @( "wxl_loc.lss" )
LocItems = @(
$wxlFilesV3 | ForEach-Object {
$outputPath = "$($_.Directory.FullName | Resolve-Path -Relative)\"
$continue = $true
foreach ($exclusion in $exclusions.Exclusions) {
if ($_.FullName.Contains($exclusion)) {
$continue = $false
}
}
$sourceFile = ($_.FullName | Resolve-Path -Relative)
if ($continue)
{
return @{
SourceFile = $sourceFile
CopyOption = "LangIDOnPath"
OutputPath = $outputPath
}
}
}
)
},
@{
LanguageSet = $LanguageSet
CloneLanguageSet = "WiX_CloneLanguages"
LssFiles = @( "P210WxlSchemaV4.lss" )
LocItems = @(
$wxlFilesV5 | ForEach-Object {
$outputPath = "$($_.Directory.FullName | Resolve-Path -Relative)\"
$continue = $true
foreach ($exclusion in $exclusions.Exclusions) {
if ($_.FullName.Contains($exclusion)) {
$continue = $false
}
}
$sourceFile = ($_.FullName | Resolve-Path -Relative)
if ($continue)
{
return @{
SourceFile = $sourceFile
CopyOption = "LangIDOnPath"
OutputPath = $outputPath
}
}
}
)
},
@{
LanguageSet = $LanguageSet
CloneLanguageSet = "VS_macOS_CloneLanguages"
LssFiles = @( ".\eng\common\loc\P22DotNetHtmlLocalization.lss" )
LocItems = @(
$macosHtmlFiles | ForEach-Object {
$outputPath = "$($_.Directory.FullName | Resolve-Path -Relative)\"
$continue = $true
foreach ($exclusion in $exclusions.Exclusions) {
if ($_.FullName.Contains($exclusion)) {
$continue = $false
}
}
$sourceFile = ($_.FullName | Resolve-Path -Relative)
$lciFile = $sourceFile + ".lci"
if ($continue) {
$result = @{
SourceFile = $sourceFile
CopyOption = "LangIDOnPath"
OutputPath = $outputPath
}
if (Test-Path $lciFile -PathType Leaf) {
$result["LciFile"] = $lciFile
}
return $result
}
}
)
}
)
}
$json = ConvertTo-Json $locJson -Depth 5
Write-Host "LocProject.json generated:`n`n$json`n`n"
Pop-Location
if (!$UseCheckedInLocProjectJson) {
New-Item "$SourcesDirectory\eng\Localize\LocProject.json" -Force # Need this to make sure the Localize directory is created
Set-Content "$SourcesDirectory\eng\Localize\LocProject.json" $json
}
else {
New-Item "$SourcesDirectory\eng\Localize\LocProject-generated.json" -Force # Need this to make sure the Localize directory is created
Set-Content "$SourcesDirectory\eng\Localize\LocProject-generated.json" $json
if ((Get-FileHash "$SourcesDirectory\eng\Localize\LocProject-generated.json").Hash -ne (Get-FileHash "$SourcesDirectory\eng\Localize\LocProject.json").Hash) {
Write-PipelineTelemetryError -Category "OneLocBuild" -Message "Existing LocProject.json differs from generated LocProject.json. Download LocProject-generated.json and compare them."
exit 1
}
else {
Write-Host "Generated LocProject.json and current LocProject.json are identical."
}
}
================================================
FILE: eng/common/helixpublish.proj
================================================
msbuild%(Identity)$(WorkItemDirectory)$(WorkItemCommand)$(WorkItemTimeout)
================================================
FILE: eng/common/init-tools-native.cmd
================================================
@echo off
powershell -NoProfile -NoLogo -ExecutionPolicy ByPass -command "& """%~dp0init-tools-native.ps1""" %*"
exit /b %ErrorLevel%
================================================
FILE: eng/common/init-tools-native.ps1
================================================
<#
.SYNOPSIS
Entry point script for installing native tools
.DESCRIPTION
Reads $RepoRoot\global.json file to determine native assets to install
and executes installers for those tools
.PARAMETER BaseUri
Base file directory or Url from which to acquire tool archives
.PARAMETER InstallDirectory
Directory to install native toolset. This is a command-line override for the default
Install directory precedence order:
- InstallDirectory command-line override
- NETCOREENG_INSTALL_DIRECTORY environment variable
- (default) %USERPROFILE%/.netcoreeng/native
.PARAMETER Clean
Switch specifying to not install anything, but cleanup native asset folders
.PARAMETER Force
Clean and then install tools
.PARAMETER DownloadRetries
Total number of retry attempts
.PARAMETER RetryWaitTimeInSeconds
Wait time between retry attempts in seconds
.PARAMETER GlobalJsonFile
File path to global.json file
.PARAMETER PathPromotion
Optional switch to enable either promote native tools specified in the global.json to the path (in Azure Pipelines)
or break the build if a native tool is not found on the path (on a local dev machine)
.NOTES
#>
[CmdletBinding(PositionalBinding=$false)]
Param (
[string] $BaseUri = 'https://netcorenativeassets.blob.core.windows.net/resource-packages/external',
[string] $InstallDirectory,
[switch] $Clean = $False,
[switch] $Force = $False,
[int] $DownloadRetries = 5,
[int] $RetryWaitTimeInSeconds = 30,
[string] $GlobalJsonFile,
[switch] $PathPromotion
)
if (!$GlobalJsonFile) {
$GlobalJsonFile = Join-Path (Get-Item $PSScriptRoot).Parent.Parent.FullName 'global.json'
}
Set-StrictMode -version 2.0
$ErrorActionPreference='Stop'
. $PSScriptRoot\pipeline-logging-functions.ps1
Import-Module -Name (Join-Path $PSScriptRoot 'native\CommonLibrary.psm1')
try {
# Define verbose switch if undefined
$Verbose = $VerbosePreference -Eq 'Continue'
$EngCommonBaseDir = Join-Path $PSScriptRoot 'native\'
$NativeBaseDir = $InstallDirectory
if (!$NativeBaseDir) {
$NativeBaseDir = CommonLibrary\Get-NativeInstallDirectory
}
$Env:CommonLibrary_NativeInstallDir = $NativeBaseDir
$InstallBin = Join-Path $NativeBaseDir 'bin'
$InstallerPath = Join-Path $EngCommonBaseDir 'install-tool.ps1'
# Process tools list
Write-Host "Processing $GlobalJsonFile"
If (-Not (Test-Path $GlobalJsonFile)) {
Write-Host "Unable to find '$GlobalJsonFile'"
exit 0
}
$NativeTools = Get-Content($GlobalJsonFile) -Raw |
ConvertFrom-Json |
Select-Object -Expand 'native-tools' -ErrorAction SilentlyContinue
if ($NativeTools) {
if ($PathPromotion -eq $True) {
$ArcadeToolsDirectory = "$env:SYSTEMDRIVE\arcade-tools"
if (Test-Path $ArcadeToolsDirectory) { # if this directory exists, we should use native tools on machine
$NativeTools.PSObject.Properties | ForEach-Object {
$ToolName = $_.Name
$ToolVersion = $_.Value
$InstalledTools = @{}
if ((Get-Command "$ToolName" -ErrorAction SilentlyContinue) -eq $null) {
if ($ToolVersion -eq "latest") {
$ToolVersion = ""
}
$ToolDirectories = (Get-ChildItem -Path "$ArcadeToolsDirectory" -Filter "$ToolName-$ToolVersion*" | Sort-Object -Descending)
if ($ToolDirectories -eq $null) {
Write-Error "Unable to find directory for $ToolName $ToolVersion; please make sure the tool is installed on this image."
exit 1
}
$ToolDirectory = $ToolDirectories[0]
$BinPathFile = "$($ToolDirectory.FullName)\binpath.txt"
if (-not (Test-Path -Path "$BinPathFile")) {
Write-Error "Unable to find binpath.txt in '$($ToolDirectory.FullName)' ($ToolName $ToolVersion); artifact is either installed incorrectly or is not a bootstrappable tool."
exit 1
}
$BinPath = Get-Content "$BinPathFile"
$ToolPath = Convert-Path -Path $BinPath
Write-Host "Adding $ToolName to the path ($ToolPath)..."
Write-Host "##vso[task.prependpath]$ToolPath"
$env:PATH = "$ToolPath;$env:PATH"
$InstalledTools += @{ $ToolName = $ToolDirectory.FullName }
}
}
return $InstalledTools
} else {
$NativeTools.PSObject.Properties | ForEach-Object {
$ToolName = $_.Name
$ToolVersion = $_.Value
if ((Get-Command "$ToolName" -ErrorAction SilentlyContinue) -eq $null) {
Write-PipelineTelemetryError -Category 'NativeToolsBootstrap' -Message "$ToolName not found on path. Please install $ToolName $ToolVersion before proceeding."
Write-PipelineTelemetryError -Category 'NativeToolsBootstrap' -Message "If this is running on a build machine, the arcade-tools directory was not found, which means there's an error with the image."
}
}
exit 0
}
} else {
$NativeTools.PSObject.Properties | ForEach-Object {
$ToolName = $_.Name
$ToolVersion = $_.Value
$LocalInstallerArguments = @{ ToolName = "$ToolName" }
$LocalInstallerArguments += @{ InstallPath = "$InstallBin" }
$LocalInstallerArguments += @{ BaseUri = "$BaseUri" }
$LocalInstallerArguments += @{ CommonLibraryDirectory = "$EngCommonBaseDir" }
$LocalInstallerArguments += @{ Version = "$ToolVersion" }
if ($Verbose) {
$LocalInstallerArguments += @{ Verbose = $True }
}
if (Get-Variable 'Force' -ErrorAction 'SilentlyContinue') {
if($Force) {
$LocalInstallerArguments += @{ Force = $True }
}
}
if ($Clean) {
$LocalInstallerArguments += @{ Clean = $True }
}
Write-Verbose "Installing $ToolName version $ToolVersion"
Write-Verbose "Executing '$InstallerPath $($LocalInstallerArguments.Keys.ForEach({"-$_ '$($LocalInstallerArguments.$_)'"}) -join ' ')'"
& $InstallerPath @LocalInstallerArguments
if ($LASTEXITCODE -Ne "0") {
$errMsg = "$ToolName installation failed"
if ((Get-Variable 'DoNotAbortNativeToolsInstallationOnFailure' -ErrorAction 'SilentlyContinue') -and $DoNotAbortNativeToolsInstallationOnFailure) {
$showNativeToolsWarning = $true
if ((Get-Variable 'DoNotDisplayNativeToolsInstallationWarnings' -ErrorAction 'SilentlyContinue') -and $DoNotDisplayNativeToolsInstallationWarnings) {
$showNativeToolsWarning = $false
}
if ($showNativeToolsWarning) {
Write-Warning $errMsg
}
$toolInstallationFailure = $true
} else {
# We cannot change this to Write-PipelineTelemetryError because of https://github.com/dotnet/arcade/issues/4482
Write-Host $errMsg
exit 1
}
}
}
if ((Get-Variable 'toolInstallationFailure' -ErrorAction 'SilentlyContinue') -and $toolInstallationFailure) {
# We cannot change this to Write-PipelineTelemetryError because of https://github.com/dotnet/arcade/issues/4482
Write-Host 'Native tools bootstrap failed'
exit 1
}
}
}
else {
Write-Host 'No native tools defined in global.json'
exit 0
}
if ($Clean) {
exit 0
}
if (Test-Path $InstallBin) {
Write-Host 'Native tools are available from ' (Convert-Path -Path $InstallBin)
Write-Host "##vso[task.prependpath]$(Convert-Path -Path $InstallBin)"
return $InstallBin
}
elseif (-not ($PathPromotion)) {
Write-PipelineTelemetryError -Category 'NativeToolsBootstrap' -Message 'Native tools install directory does not exist, installation failed'
exit 1
}
exit 0
}
catch {
Write-Host $_.ScriptStackTrace
Write-PipelineTelemetryError -Category 'NativeToolsBootstrap' -Message $_
ExitWithExitCode 1
}
================================================
FILE: eng/common/init-tools-native.sh
================================================
#!/usr/bin/env bash
source="${BASH_SOURCE[0]}"
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
base_uri='https://netcorenativeassets.blob.core.windows.net/resource-packages/external'
install_directory=''
clean=false
force=false
download_retries=5
retry_wait_time_seconds=30
global_json_file="$(dirname "$(dirname "${scriptroot}")")/global.json"
declare -a native_assets
. $scriptroot/pipeline-logging-functions.sh
. $scriptroot/native/common-library.sh
while (($# > 0)); do
lowerI="$(echo $1 | tr "[:upper:]" "[:lower:]")"
case $lowerI in
--baseuri)
base_uri=$2
shift 2
;;
--installdirectory)
install_directory=$2
shift 2
;;
--clean)
clean=true
shift 1
;;
--force)
force=true
shift 1
;;
--donotabortonfailure)
donotabortonfailure=true
shift 1
;;
--donotdisplaywarnings)
donotdisplaywarnings=true
shift 1
;;
--downloadretries)
download_retries=$2
shift 2
;;
--retrywaittimeseconds)
retry_wait_time_seconds=$2
shift 2
;;
--help)
echo "Common settings:"
echo " --installdirectory Directory to install native toolset."
echo " This is a command-line override for the default"
echo " Install directory precedence order:"
echo " - InstallDirectory command-line override"
echo " - NETCOREENG_INSTALL_DIRECTORY environment variable"
echo " - (default) %USERPROFILE%/.netcoreeng/native"
echo ""
echo " --clean Switch specifying not to install anything, but cleanup native asset folders"
echo " --donotabortonfailure Switch specifiying whether to abort native tools installation on failure"
echo " --donotdisplaywarnings Switch specifiying whether to display warnings during native tools installation on failure"
echo " --force Clean and then install tools"
echo " --help Print help and exit"
echo ""
echo "Advanced settings:"
echo " --baseuri Base URI for where to download native tools from"
echo " --downloadretries Number of times a download should be attempted"
echo " --retrywaittimeseconds Wait time between download attempts"
echo ""
exit 0
;;
esac
done
function ReadGlobalJsonNativeTools {
# happy path: we have a proper JSON parsing tool `jq(1)` in PATH!
if command -v jq &> /dev/null; then
# jq: read each key/value pair under "native-tools" entry and emit:
# KEY="" VALUE=""
# followed by a null byte.
#
# bash: read line with null byte delimeter and push to array (for later `eval`uation).
while IFS= read -rd '' line; do
native_assets+=("$line")
done < <(jq -r '. |
select(has("native-tools")) |
."native-tools" |
keys[] as $k |
@sh "KEY=\($k) VALUE=\(.[$k])\u0000"' "$global_json_file")
return
fi
# Warning: falling back to manually parsing JSON, which is not recommended.
# Following routine matches the output and escaping logic of jq(1)'s @sh formatter used above.
# It has been tested with several weird strings with escaped characters in entries (key and value)
# and results were compared with the output of jq(1) in binary representation using xxd(1);
# just before the assignment to 'native_assets' array (above and below).
# try to capture the section under "native-tools".
if [[ ! "$(cat "$global_json_file")" =~ \"native-tools\"[[:space:]\:\{]*([^\}]+) ]]; then
return
fi
section="${BASH_REMATCH[1]}"
parseStarted=0
possibleEnd=0
escaping=0
escaped=0
isKey=1
for (( i=0; i<${#section}; i++ )); do
char="${section:$i:1}"
if ! ((parseStarted)) && [[ "$char" =~ [[:space:],:] ]]; then continue; fi
if ! ((escaping)) && [[ "$char" == "\\" ]]; then
escaping=1
elif ((escaping)) && ! ((escaped)); then
escaped=1
fi
if ! ((parseStarted)) && [[ "$char" == "\"" ]]; then
parseStarted=1
possibleEnd=0
elif [[ "$char" == "'" ]]; then
token="$token'\\\''"
possibleEnd=0
elif ((escaping)) || [[ "$char" != "\"" ]]; then
token="$token$char"
possibleEnd=1
fi
if ((possibleEnd)) && ! ((escaping)) && [[ "$char" == "\"" ]]; then
# Use printf to unescape token to match jq(1)'s @sh formatting rules.
# do not use 'token="$(printf "$token")"' syntax, as $() eats the trailing linefeed.
printf -v token "'$token'"
if ((isKey)); then
KEY="$token"
isKey=0
else
line="KEY=$KEY VALUE=$token"
native_assets+=("$line")
isKey=1
fi
# reset for next token
parseStarted=0
token=
elif ((escaping)) && ((escaped)); then
escaping=0
escaped=0
fi
done
}
native_base_dir=$install_directory
if [[ -z $install_directory ]]; then
native_base_dir=$(GetNativeInstallDirectory)
fi
install_bin="${native_base_dir}/bin"
installed_any=false
ReadGlobalJsonNativeTools
if [[ ${#native_assets[@]} -eq 0 ]]; then
echo "No native tools defined in global.json"
exit 0;
else
native_installer_dir="$scriptroot/native"
for index in "${!native_assets[@]}"; do
eval "${native_assets["$index"]}"
installer_path="$native_installer_dir/install-$KEY.sh"
installer_command="$installer_path"
installer_command+=" --baseuri $base_uri"
installer_command+=" --installpath $install_bin"
installer_command+=" --version $VALUE"
echo $installer_command
if [[ $force = true ]]; then
installer_command+=" --force"
fi
if [[ $clean = true ]]; then
installer_command+=" --clean"
fi
if [[ -a $installer_path ]]; then
$installer_command
if [[ $? != 0 ]]; then
if [[ $donotabortonfailure = true ]]; then
if [[ $donotdisplaywarnings != true ]]; then
Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Execution Failed"
fi
else
Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Execution Failed"
exit 1
fi
else
$installed_any = true
fi
else
if [[ $donotabortonfailure == true ]]; then
if [[ $donotdisplaywarnings != true ]]; then
Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Execution Failed: no install script"
fi
else
Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Execution Failed: no install script"
exit 1
fi
fi
done
fi
if [[ $clean = true ]]; then
exit 0
fi
if [[ -d $install_bin ]]; then
echo "Native tools are available from $install_bin"
echo "##vso[task.prependpath]$install_bin"
else
if [[ $installed_any = true ]]; then
Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Native tools install directory does not exist, installation failed"
exit 1
fi
fi
exit 0
================================================
FILE: eng/common/internal/Directory.Build.props
================================================
falsefalse
================================================
FILE: eng/common/internal/NuGet.config
================================================
================================================
FILE: eng/common/internal/Tools.csproj
================================================
net472falsefalse
================================================
FILE: eng/common/internal-feed-operations.ps1
================================================
param(
[Parameter(Mandatory=$true)][string] $Operation,
[string] $AuthToken,
[string] $CommitSha,
[string] $RepoName,
[switch] $IsFeedPrivate
)
$ErrorActionPreference = 'Stop'
Set-StrictMode -Version 2.0
. $PSScriptRoot\tools.ps1
# Sets VSS_NUGET_EXTERNAL_FEED_ENDPOINTS based on the "darc-int-*" feeds defined in NuGet.config. This is needed
# in build agents by CredProvider to authenticate the restore requests to internal feeds as specified in
# https://github.com/microsoft/artifacts-credprovider/blob/0f53327cd12fd893d8627d7b08a2171bf5852a41/README.md#environment-variables. This should ONLY be called from identified
# internal builds
function SetupCredProvider {
param(
[string] $AuthToken
)
# Install the Cred Provider NuGet plugin
Write-Host 'Setting up Cred Provider NuGet plugin in the agent...'
Write-Host "Getting 'installcredprovider.ps1' from 'https://github.com/microsoft/artifacts-credprovider'..."
$url = 'https://raw.githubusercontent.com/microsoft/artifacts-credprovider/master/helpers/installcredprovider.ps1'
Write-Host "Writing the contents of 'installcredprovider.ps1' locally..."
Invoke-WebRequest $url -UseBasicParsing -OutFile installcredprovider.ps1
Write-Host 'Installing plugin...'
.\installcredprovider.ps1 -Force
Write-Host "Deleting local copy of 'installcredprovider.ps1'..."
Remove-Item .\installcredprovider.ps1
if (-Not("$env:USERPROFILE\.nuget\plugins\netcore")) {
Write-PipelineTelemetryError -Category 'Arcade' -Message 'CredProvider plugin was not installed correctly!'
ExitWithExitCode 1
}
else {
Write-Host 'CredProvider plugin was installed correctly!'
}
# Then, we set the 'VSS_NUGET_EXTERNAL_FEED_ENDPOINTS' environment variable to restore from the stable
# feeds successfully
$nugetConfigPath = Join-Path $RepoRoot "NuGet.config"
if (-Not (Test-Path -Path $nugetConfigPath)) {
Write-PipelineTelemetryError -Category 'Build' -Message 'NuGet.config file not found in repo root!'
ExitWithExitCode 1
}
$endpoints = New-Object System.Collections.ArrayList
$nugetConfigPackageSources = Select-Xml -Path $nugetConfigPath -XPath "//packageSources/add[contains(@key, 'darc-int-')]/@value" | foreach{$_.Node.Value}
if (($nugetConfigPackageSources | Measure-Object).Count -gt 0 ) {
foreach ($stableRestoreResource in $nugetConfigPackageSources) {
$trimmedResource = ([string]$stableRestoreResource).Trim()
[void]$endpoints.Add(@{endpoint="$trimmedResource"; password="$AuthToken"})
}
}
if (($endpoints | Measure-Object).Count -gt 0) {
$endpointCredentials = @{endpointCredentials=$endpoints} | ConvertTo-Json -Compress
# Create the environment variables the AzDo way
Write-LoggingCommand -Area 'task' -Event 'setvariable' -Data $endpointCredentials -Properties @{
'variable' = 'VSS_NUGET_EXTERNAL_FEED_ENDPOINTS'
'issecret' = 'false'
}
# We don't want sessions cached since we will be updating the endpoints quite frequently
Write-LoggingCommand -Area 'task' -Event 'setvariable' -Data 'False' -Properties @{
'variable' = 'NUGET_CREDENTIALPROVIDER_SESSIONTOKENCACHE_ENABLED'
'issecret' = 'false'
}
}
else
{
Write-Host 'No internal endpoints found in NuGet.config'
}
}
#Workaround for https://github.com/microsoft/msbuild/issues/4430
function InstallDotNetSdkAndRestoreArcade {
$dotnetTempDir = Join-Path $RepoRoot "dotnet"
$dotnetSdkVersion="2.1.507" # After experimentation we know this version works when restoring the SDK (compared to 3.0.*)
$dotnet = "$dotnetTempDir\dotnet.exe"
$restoreProjPath = "$PSScriptRoot\restore.proj"
Write-Host "Installing dotnet SDK version $dotnetSdkVersion to restore Arcade SDK..."
InstallDotNetSdk "$dotnetTempDir" "$dotnetSdkVersion"
'' | Out-File "$restoreProjPath"
& $dotnet restore $restoreProjPath
Write-Host 'Arcade SDK restored!'
if (Test-Path -Path $restoreProjPath) {
Remove-Item $restoreProjPath
}
if (Test-Path -Path $dotnetTempDir) {
Remove-Item $dotnetTempDir -Recurse
}
}
try {
Push-Location $PSScriptRoot
if ($Operation -like 'setup') {
SetupCredProvider $AuthToken
}
elseif ($Operation -like 'install-restore') {
InstallDotNetSdkAndRestoreArcade
}
else {
Write-PipelineTelemetryError -Category 'Arcade' -Message "Unknown operation '$Operation'!"
ExitWithExitCode 1
}
}
catch {
Write-Host $_.ScriptStackTrace
Write-PipelineTelemetryError -Category 'Arcade' -Message $_
ExitWithExitCode 1
}
finally {
Pop-Location
}
================================================
FILE: eng/common/internal-feed-operations.sh
================================================
#!/usr/bin/env bash
set -e
# Sets VSS_NUGET_EXTERNAL_FEED_ENDPOINTS based on the "darc-int-*" feeds defined in NuGet.config. This is needed
# in build agents by CredProvider to authenticate the restore requests to internal feeds as specified in
# https://github.com/microsoft/artifacts-credprovider/blob/0f53327cd12fd893d8627d7b08a2171bf5852a41/README.md#environment-variables.
# This should ONLY be called from identified internal builds
function SetupCredProvider {
local authToken=$1
# Install the Cred Provider NuGet plugin
echo "Setting up Cred Provider NuGet plugin in the agent..."...
echo "Getting 'installcredprovider.ps1' from 'https://github.com/microsoft/artifacts-credprovider'..."
local url="https://raw.githubusercontent.com/microsoft/artifacts-credprovider/master/helpers/installcredprovider.sh"
echo "Writing the contents of 'installcredprovider.ps1' locally..."
local installcredproviderPath="installcredprovider.sh"
if command -v curl > /dev/null; then
curl $url > "$installcredproviderPath"
else
wget -q -O "$installcredproviderPath" "$url"
fi
echo "Installing plugin..."
. "$installcredproviderPath"
echo "Deleting local copy of 'installcredprovider.sh'..."
rm installcredprovider.sh
if [ ! -d "$HOME/.nuget/plugins" ]; then
Write-PipelineTelemetryError -category 'Build' 'CredProvider plugin was not installed correctly!'
ExitWithExitCode 1
else
echo "CredProvider plugin was installed correctly!"
fi
# Then, we set the 'VSS_NUGET_EXTERNAL_FEED_ENDPOINTS' environment variable to restore from the stable
# feeds successfully
local nugetConfigPath="{$repo_root}NuGet.config"
if [ ! "$nugetConfigPath" ]; then
Write-PipelineTelemetryError -category 'Build' "NuGet.config file not found in repo's root!"
ExitWithExitCode 1
fi
local endpoints='['
local nugetConfigPackageValues=`cat "$nugetConfigPath" | grep "key=\"darc-int-"`
local pattern="value=\"(.*)\""
for value in $nugetConfigPackageValues
do
if [[ $value =~ $pattern ]]; then
local endpoint="${BASH_REMATCH[1]}"
endpoints+="{\"endpoint\": \"$endpoint\", \"password\": \"$authToken\"},"
fi
done
endpoints=${endpoints%?}
endpoints+=']'
if [ ${#endpoints} -gt 2 ]; then
local endpointCredentials="{\"endpointCredentials\": "$endpoints"}"
echo "##vso[task.setvariable variable=VSS_NUGET_EXTERNAL_FEED_ENDPOINTS]$endpointCredentials"
echo "##vso[task.setvariable variable=NUGET_CREDENTIALPROVIDER_SESSIONTOKENCACHE_ENABLED]False"
else
echo "No internal endpoints found in NuGet.config"
fi
}
# Workaround for https://github.com/microsoft/msbuild/issues/4430
function InstallDotNetSdkAndRestoreArcade {
local dotnetTempDir="$repo_root/dotnet"
local dotnetSdkVersion="2.1.507" # After experimentation we know this version works when restoring the SDK (compared to 3.0.*)
local restoreProjPath="$repo_root/eng/common/restore.proj"
echo "Installing dotnet SDK version $dotnetSdkVersion to restore Arcade SDK..."
echo "" > "$restoreProjPath"
InstallDotNetSdk "$dotnetTempDir" "$dotnetSdkVersion"
local res=`$dotnetTempDir/dotnet restore $restoreProjPath`
echo "Arcade SDK restored!"
# Cleanup
if [ "$restoreProjPath" ]; then
rm "$restoreProjPath"
fi
if [ "$dotnetTempDir" ]; then
rm -r $dotnetTempDir
fi
}
source="${BASH_SOURCE[0]}"
operation=''
authToken=''
repoName=''
while [[ $# > 0 ]]; do
opt="$(echo "$1" | tr "[:upper:]" "[:lower:]")"
case "$opt" in
--operation)
operation=$2
shift
;;
--authtoken)
authToken=$2
shift
;;
*)
echo "Invalid argument: $1"
usage
exit 1
;;
esac
shift
done
while [[ -h "$source" ]]; do
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
source="$(readlink "$source")"
# if $source was a relative symlink, we need to resolve it relative to the path where the
# symlink file was located
[[ $source != /* ]] && source="$scriptroot/$source"
done
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
. "$scriptroot/tools.sh"
if [ "$operation" = "setup" ]; then
SetupCredProvider $authToken
elif [ "$operation" = "install-restore" ]; then
InstallDotNetSdkAndRestoreArcade
else
echo "Unknown operation '$operation'!"
fi
================================================
FILE: eng/common/loc/P22DotNetHtmlLocalization.lss
================================================
================================================
FILE: eng/common/msbuild.ps1
================================================
[CmdletBinding(PositionalBinding=$false)]
Param(
[string] $verbosity = 'minimal',
[bool] $warnAsError = $true,
[bool] $nodeReuse = $true,
[switch] $ci,
[switch] $prepareMachine,
[switch] $excludePrereleaseVS,
[string] $msbuildEngine = $null,
[Parameter(ValueFromRemainingArguments=$true)][String[]]$extraArgs
)
. $PSScriptRoot\tools.ps1
try {
if ($ci) {
$nodeReuse = $false
}
MSBuild @extraArgs
}
catch {
Write-Host $_.ScriptStackTrace
Write-PipelineTelemetryError -Category 'Build' -Message $_
ExitWithExitCode 1
}
ExitWithExitCode 0
================================================
FILE: eng/common/msbuild.sh
================================================
#!/usr/bin/env bash
source="${BASH_SOURCE[0]}"
# resolve $source until the file is no longer a symlink
while [[ -h "$source" ]]; do
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
source="$(readlink "$source")"
# if $source was a relative symlink, we need to resolve it relative to the path where the
# symlink file was located
[[ $source != /* ]] && source="$scriptroot/$source"
done
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
verbosity='minimal'
warn_as_error=true
node_reuse=true
prepare_machine=false
extra_args=''
while (($# > 0)); do
lowerI="$(echo $1 | tr "[:upper:]" "[:lower:]")"
case $lowerI in
--verbosity)
verbosity=$2
shift 2
;;
--warnaserror)
warn_as_error=$2
shift 2
;;
--nodereuse)
node_reuse=$2
shift 2
;;
--ci)
ci=true
shift 1
;;
--preparemachine)
prepare_machine=true
shift 1
;;
*)
extra_args="$extra_args $1"
shift 1
;;
esac
done
. "$scriptroot/tools.sh"
if [[ "$ci" == true ]]; then
node_reuse=false
fi
MSBuild $extra_args
ExitWithExitCode 0
================================================
FILE: eng/common/native/CommonLibrary.psm1
================================================
<#
.SYNOPSIS
Helper module to install an archive to a directory
.DESCRIPTION
Helper module to download and extract an archive to a specified directory
.PARAMETER Uri
Uri of artifact to download
.PARAMETER InstallDirectory
Directory to extract artifact contents to
.PARAMETER Force
Force download / extraction if file or contents already exist. Default = False
.PARAMETER DownloadRetries
Total number of retry attempts. Default = 5
.PARAMETER RetryWaitTimeInSeconds
Wait time between retry attempts in seconds. Default = 30
.NOTES
Returns False if download or extraction fail, True otherwise
#>
function DownloadAndExtract {
[CmdletBinding(PositionalBinding=$false)]
Param (
[Parameter(Mandatory=$True)]
[string] $Uri,
[Parameter(Mandatory=$True)]
[string] $InstallDirectory,
[switch] $Force = $False,
[int] $DownloadRetries = 5,
[int] $RetryWaitTimeInSeconds = 30
)
# Define verbose switch if undefined
$Verbose = $VerbosePreference -Eq "Continue"
$TempToolPath = CommonLibrary\Get-TempPathFilename -Path $Uri
# Download native tool
$DownloadStatus = CommonLibrary\Get-File -Uri $Uri `
-Path $TempToolPath `
-DownloadRetries $DownloadRetries `
-RetryWaitTimeInSeconds $RetryWaitTimeInSeconds `
-Force:$Force `
-Verbose:$Verbose
if ($DownloadStatus -Eq $False) {
Write-Error "Download failed from $Uri"
return $False
}
# Extract native tool
$UnzipStatus = CommonLibrary\Expand-Zip -ZipPath $TempToolPath `
-OutputDirectory $InstallDirectory `
-Force:$Force `
-Verbose:$Verbose
if ($UnzipStatus -Eq $False) {
# Retry Download one more time with Force=true
$DownloadRetryStatus = CommonLibrary\Get-File -Uri $Uri `
-Path $TempToolPath `
-DownloadRetries 1 `
-RetryWaitTimeInSeconds $RetryWaitTimeInSeconds `
-Force:$True `
-Verbose:$Verbose
if ($DownloadRetryStatus -Eq $False) {
Write-Error "Last attempt of download failed as well"
return $False
}
# Retry unzip again one more time with Force=true
$UnzipRetryStatus = CommonLibrary\Expand-Zip -ZipPath $TempToolPath `
-OutputDirectory $InstallDirectory `
-Force:$True `
-Verbose:$Verbose
if ($UnzipRetryStatus -Eq $False)
{
Write-Error "Last attempt of unzip failed as well"
# Clean up partial zips and extracts
if (Test-Path $TempToolPath) {
Remove-Item $TempToolPath -Force
}
if (Test-Path $InstallDirectory) {
Remove-Item $InstallDirectory -Force -Recurse
}
return $False
}
}
return $True
}
<#
.SYNOPSIS
Download a file, retry on failure
.DESCRIPTION
Download specified file and retry if attempt fails
.PARAMETER Uri
Uri of file to download. If Uri is a local path, the file will be copied instead of downloaded
.PARAMETER Path
Path to download or copy uri file to
.PARAMETER Force
Overwrite existing file if present. Default = False
.PARAMETER DownloadRetries
Total number of retry attempts. Default = 5
.PARAMETER RetryWaitTimeInSeconds
Wait time between retry attempts in seconds Default = 30
#>
function Get-File {
[CmdletBinding(PositionalBinding=$false)]
Param (
[Parameter(Mandatory=$True)]
[string] $Uri,
[Parameter(Mandatory=$True)]
[string] $Path,
[int] $DownloadRetries = 5,
[int] $RetryWaitTimeInSeconds = 30,
[switch] $Force = $False
)
$Attempt = 0
if ($Force) {
if (Test-Path $Path) {
Remove-Item $Path -Force
}
}
if (Test-Path $Path) {
Write-Host "File '$Path' already exists, skipping download"
return $True
}
$DownloadDirectory = Split-Path -ErrorAction Ignore -Path "$Path" -Parent
if (-Not (Test-Path $DownloadDirectory)) {
New-Item -path $DownloadDirectory -force -itemType "Directory" | Out-Null
}
$TempPath = "$Path.tmp"
if (Test-Path -IsValid -Path $Uri) {
Write-Verbose "'$Uri' is a file path, copying temporarily to '$TempPath'"
Copy-Item -Path $Uri -Destination $TempPath
Write-Verbose "Moving temporary file to '$Path'"
Move-Item -Path $TempPath -Destination $Path
return $?
}
else {
Write-Verbose "Downloading $Uri"
# Don't display the console progress UI - it's a huge perf hit
$ProgressPreference = 'SilentlyContinue'
while($Attempt -Lt $DownloadRetries)
{
try {
Invoke-WebRequest -UseBasicParsing -Uri $Uri -OutFile $TempPath
Write-Verbose "Downloaded to temporary location '$TempPath'"
Move-Item -Path $TempPath -Destination $Path
Write-Verbose "Moved temporary file to '$Path'"
return $True
}
catch {
$Attempt++
if ($Attempt -Lt $DownloadRetries) {
$AttemptsLeft = $DownloadRetries - $Attempt
Write-Warning "Download failed, $AttemptsLeft attempts remaining, will retry in $RetryWaitTimeInSeconds seconds"
Start-Sleep -Seconds $RetryWaitTimeInSeconds
}
else {
Write-Error $_
Write-Error $_.Exception
}
}
}
}
return $False
}
<#
.SYNOPSIS
Generate a shim for a native tool
.DESCRIPTION
Creates a wrapper script (shim) that passes arguments forward to native tool assembly
.PARAMETER ShimName
The name of the shim
.PARAMETER ShimDirectory
The directory where shims are stored
.PARAMETER ToolFilePath
Path to file that shim forwards to
.PARAMETER Force
Replace shim if already present. Default = False
.NOTES
Returns $True if generating shim succeeds, $False otherwise
#>
function New-ScriptShim {
[CmdletBinding(PositionalBinding=$false)]
Param (
[Parameter(Mandatory=$True)]
[string] $ShimName,
[Parameter(Mandatory=$True)]
[string] $ShimDirectory,
[Parameter(Mandatory=$True)]
[string] $ToolFilePath,
[Parameter(Mandatory=$True)]
[string] $BaseUri,
[switch] $Force
)
try {
Write-Verbose "Generating '$ShimName' shim"
if (-Not (Test-Path $ToolFilePath)){
Write-Error "Specified tool file path '$ToolFilePath' does not exist"
return $False
}
# WinShimmer is a small .NET Framework program that creates .exe shims to bootstrapped programs
# Many of the checks for installed programs expect a .exe extension for Windows tools, rather
# than a .bat or .cmd file.
# Source: https://github.com/dotnet/arcade/tree/master/src/WinShimmer
if (-Not (Test-Path "$ShimDirectory\WinShimmer\winshimmer.exe")) {
$InstallStatus = DownloadAndExtract -Uri "$BaseUri/windows/winshimmer/WinShimmer.zip" `
-InstallDirectory $ShimDirectory\WinShimmer `
-Force:$Force `
-DownloadRetries 2 `
-RetryWaitTimeInSeconds 5 `
-Verbose:$Verbose
}
if ((Test-Path (Join-Path $ShimDirectory "$ShimName.exe"))) {
Write-Host "$ShimName.exe already exists; replacing..."
Remove-Item (Join-Path $ShimDirectory "$ShimName.exe")
}
& "$ShimDirectory\WinShimmer\winshimmer.exe" $ShimName $ToolFilePath $ShimDirectory
return $True
}
catch {
Write-Host $_
Write-Host $_.Exception
return $False
}
}
<#
.SYNOPSIS
Returns the machine architecture of the host machine
.NOTES
Returns 'x64' on 64 bit machines
Returns 'x86' on 32 bit machines
#>
function Get-MachineArchitecture {
$ProcessorArchitecture = $Env:PROCESSOR_ARCHITECTURE
$ProcessorArchitectureW6432 = $Env:PROCESSOR_ARCHITEW6432
if($ProcessorArchitecture -Eq "X86")
{
if(($ProcessorArchitectureW6432 -Eq "") -Or
($ProcessorArchitectureW6432 -Eq "X86")) {
return "x86"
}
$ProcessorArchitecture = $ProcessorArchitectureW6432
}
if (($ProcessorArchitecture -Eq "AMD64") -Or
($ProcessorArchitecture -Eq "IA64") -Or
($ProcessorArchitecture -Eq "ARM64") -Or
($ProcessorArchitecture -Eq "LOONGARCH64") -Or
($ProcessorArchitecture -Eq "RISCV64")) {
return "x64"
}
return "x86"
}
<#
.SYNOPSIS
Get the name of a temporary folder under the native install directory
#>
function Get-TempDirectory {
return Join-Path (Get-NativeInstallDirectory) "temp/"
}
function Get-TempPathFilename {
[CmdletBinding(PositionalBinding=$false)]
Param (
[Parameter(Mandatory=$True)]
[string] $Path
)
$TempDir = CommonLibrary\Get-TempDirectory
$TempFilename = Split-Path $Path -leaf
$TempPath = Join-Path $TempDir $TempFilename
return $TempPath
}
<#
.SYNOPSIS
Returns the base directory to use for native tool installation
.NOTES
Returns the value of the NETCOREENG_INSTALL_DIRECTORY if that environment variable
is set, or otherwise returns an install directory under the %USERPROFILE%
#>
function Get-NativeInstallDirectory {
$InstallDir = $Env:NETCOREENG_INSTALL_DIRECTORY
if (!$InstallDir) {
$InstallDir = Join-Path $Env:USERPROFILE ".netcoreeng/native/"
}
return $InstallDir
}
<#
.SYNOPSIS
Unzip an archive
.DESCRIPTION
Powershell module to unzip an archive to a specified directory
.PARAMETER ZipPath (Required)
Path to archive to unzip
.PARAMETER OutputDirectory (Required)
Output directory for archive contents
.PARAMETER Force
Overwrite output directory contents if they already exist
.NOTES
- Returns True and does not perform an extraction if output directory already exists but Overwrite is not True.
- Returns True if unzip operation is successful
- Returns False if Overwrite is True and it is unable to remove contents of OutputDirectory
- Returns False if unable to extract zip archive
#>
function Expand-Zip {
[CmdletBinding(PositionalBinding=$false)]
Param (
[Parameter(Mandatory=$True)]
[string] $ZipPath,
[Parameter(Mandatory=$True)]
[string] $OutputDirectory,
[switch] $Force
)
Write-Verbose "Extracting '$ZipPath' to '$OutputDirectory'"
try {
if ((Test-Path $OutputDirectory) -And (-Not $Force)) {
Write-Host "Directory '$OutputDirectory' already exists, skipping extract"
return $True
}
if (Test-Path $OutputDirectory) {
Write-Verbose "'Force' is 'True', but '$OutputDirectory' exists, removing directory"
Remove-Item $OutputDirectory -Force -Recurse
if ($? -Eq $False) {
Write-Error "Unable to remove '$OutputDirectory'"
return $False
}
}
$TempOutputDirectory = Join-Path "$(Split-Path -Parent $OutputDirectory)" "$(Split-Path -Leaf $OutputDirectory).tmp"
if (Test-Path $TempOutputDirectory) {
Remove-Item $TempOutputDirectory -Force -Recurse
}
New-Item -Path $TempOutputDirectory -Force -ItemType "Directory" | Out-Null
Add-Type -assembly "system.io.compression.filesystem"
[io.compression.zipfile]::ExtractToDirectory("$ZipPath", "$TempOutputDirectory")
if ($? -Eq $False) {
Write-Error "Unable to extract '$ZipPath'"
return $False
}
Move-Item -Path $TempOutputDirectory -Destination $OutputDirectory
}
catch {
Write-Host $_
Write-Host $_.Exception
return $False
}
return $True
}
export-modulemember -function DownloadAndExtract
export-modulemember -function Expand-Zip
export-modulemember -function Get-File
export-modulemember -function Get-MachineArchitecture
export-modulemember -function Get-NativeInstallDirectory
export-modulemember -function Get-TempDirectory
export-modulemember -function Get-TempPathFilename
export-modulemember -function New-ScriptShim
================================================
FILE: eng/common/native/common-library.sh
================================================
#!/usr/bin/env bash
function GetNativeInstallDirectory {
local install_dir
if [[ -z $NETCOREENG_INSTALL_DIRECTORY ]]; then
install_dir=$HOME/.netcoreeng/native/
else
install_dir=$NETCOREENG_INSTALL_DIRECTORY
fi
echo $install_dir
return 0
}
function GetTempDirectory {
echo $(GetNativeInstallDirectory)temp/
return 0
}
function ExpandZip {
local zip_path=$1
local output_directory=$2
local force=${3:-false}
echo "Extracting $zip_path to $output_directory"
if [[ -d $output_directory ]] && [[ $force = false ]]; then
echo "Directory '$output_directory' already exists, skipping extract"
return 0
fi
if [[ -d $output_directory ]]; then
echo "'Force flag enabled, but '$output_directory' exists. Removing directory"
rm -rf $output_directory
if [[ $? != 0 ]]; then
Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Unable to remove '$output_directory'"
return 1
fi
fi
echo "Creating directory: '$output_directory'"
mkdir -p $output_directory
echo "Extracting archive"
tar -xf $zip_path -C $output_directory
if [[ $? != 0 ]]; then
Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Unable to extract '$zip_path'"
return 1
fi
return 0
}
function GetCurrentOS {
local unameOut="$(uname -s)"
case $unameOut in
Linux*) echo "Linux";;
Darwin*) echo "MacOS";;
esac
return 0
}
function GetFile {
local uri=$1
local path=$2
local force=${3:-false}
local download_retries=${4:-5}
local retry_wait_time_seconds=${5:-30}
if [[ -f $path ]]; then
if [[ $force = false ]]; then
echo "File '$path' already exists. Skipping download"
return 0
else
rm -rf $path
fi
fi
if [[ -f $uri ]]; then
echo "'$uri' is a file path, copying file to '$path'"
cp $uri $path
return $?
fi
echo "Downloading $uri"
# Use curl if available, otherwise use wget
if command -v curl > /dev/null; then
curl "$uri" -sSL --retry $download_retries --retry-delay $retry_wait_time_seconds --create-dirs -o "$path" --fail
else
wget -q -O "$path" "$uri" --tries="$download_retries"
fi
return $?
}
function GetTempPathFileName {
local path=$1
local temp_dir=$(GetTempDirectory)
local temp_file_name=$(basename $path)
echo $temp_dir$temp_file_name
return 0
}
function DownloadAndExtract {
local uri=$1
local installDir=$2
local force=${3:-false}
local download_retries=${4:-5}
local retry_wait_time_seconds=${5:-30}
local temp_tool_path=$(GetTempPathFileName $uri)
echo "downloading to: $temp_tool_path"
# Download file
GetFile "$uri" "$temp_tool_path" $force $download_retries $retry_wait_time_seconds
if [[ $? != 0 ]]; then
Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Failed to download '$uri' to '$temp_tool_path'."
return 1
fi
# Extract File
echo "extracting from $temp_tool_path to $installDir"
ExpandZip "$temp_tool_path" "$installDir" $force $download_retries $retry_wait_time_seconds
if [[ $? != 0 ]]; then
Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Failed to extract '$temp_tool_path' to '$installDir'."
return 1
fi
return 0
}
function NewScriptShim {
local shimpath=$1
local tool_file_path=$2
local force=${3:-false}
echo "Generating '$shimpath' shim"
if [[ -f $shimpath ]]; then
if [[ $force = false ]]; then
echo "File '$shimpath' already exists." >&2
return 1
else
rm -rf $shimpath
fi
fi
if [[ ! -f $tool_file_path ]]; then
# try to see if the path is lower cased
tool_file_path="$(echo $tool_file_path | tr "[:upper:]" "[:lower:]")"
if [[ ! -f $tool_file_path ]]; then
Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Specified tool file path:'$tool_file_path' does not exist"
return 1
fi
fi
local shim_contents=$'#!/usr/bin/env bash\n'
shim_contents+="SHIMARGS="$'$1\n'
shim_contents+="$tool_file_path"$' $SHIMARGS\n'
# Write shim file
echo "$shim_contents" > $shimpath
chmod +x $shimpath
echo "Finished generating shim '$shimpath'"
return $?
}
================================================
FILE: eng/common/native/init-compiler.sh
================================================
#!/bin/sh
#
# This file detects the C/C++ compiler and exports it to the CC/CXX environment variables
#
# NOTE: some scripts source this file and rely on stdout being empty, make sure
# to not output *anything* here, unless it is an error message that fails the
# build.
if [ -z "$build_arch" ] || [ -z "$compiler" ]; then
echo "Usage..."
echo "build_arch= compiler= init-compiler.sh"
echo "Specify the target architecture."
echo "Specify the name of compiler (clang or gcc)."
exit 1
fi
case "$compiler" in
clang*|-clang*|--clang*)
# clangx.y or clang-x.y
version="$(echo "$compiler" | tr -d '[:alpha:]-=')"
majorVersion="${version%%.*}"
# LLVM based on v18 released in early 2024, with two releases per year
maxVersion="$((18 + ((($(date +%Y) - 2024) * 12 + $(date +%-m) - 3) / 6)))"
compiler=clang
;;
gcc*|-gcc*|--gcc*)
# gccx.y or gcc-x.y
version="$(echo "$compiler" | tr -d '[:alpha:]-=')"
majorVersion="${version%%.*}"
# GCC based on v14 released in early 2024, with one release per year
maxVersion="$((14 + ((($(date +%Y) - 2024) * 12 + $(date +%-m) - 3) / 12)))"
compiler=gcc
;;
esac
cxxCompiler="$compiler++"
# clear the existing CC and CXX from environment
CC=
CXX=
LDFLAGS=
if [ "$compiler" = "gcc" ]; then cxxCompiler="g++"; fi
check_version_exists() {
desired_version=-1
# Set up the environment to be used for building with the desired compiler.
if command -v "$compiler-$1" > /dev/null; then
desired_version="-$1"
elif command -v "$compiler$1" > /dev/null; then
desired_version="$1"
fi
echo "$desired_version"
}
__baseOS="$(uname)"
set_compiler_version_from_CC() {
if [ "$__baseOS" = "Darwin" ]; then
# On Darwin, the versions from -version/-dumpversion refer to Xcode
# versions, not llvm versions, so we can't rely on them.
return
fi
version="$("$CC" -dumpversion)"
if [ -z "$version" ]; then
echo "Error: $CC -dumpversion didn't provide a version"
exit 1
fi
# gcc and clang often display 3 part versions. However, gcc can show only 1 part in some environments.
IFS=. read -r majorVersion _ < /dev/null; then
echo "Error: No compatible version of $compiler was found within the range of $minVersion to $maxVersion. Please upgrade your toolchain or specify the compiler explicitly using CLR_CC and CLR_CXX environment variables."
exit 1
fi
CC="$(command -v "$compiler" 2> /dev/null)"
CXX="$(command -v "$cxxCompiler" 2> /dev/null)"
set_compiler_version_from_CC
fi
else
desired_version="$(check_version_exists "$majorVersion")"
if [ "$desired_version" = "-1" ]; then
echo "Error: Could not find specific version of $compiler: $majorVersion."
exit 1
fi
fi
if [ -z "$CC" ]; then
CC="$(command -v "$compiler$desired_version" 2> /dev/null)"
CXX="$(command -v "$cxxCompiler$desired_version" 2> /dev/null)"
if [ -z "$CXX" ]; then CXX="$(command -v "$cxxCompiler" 2> /dev/null)"; fi
set_compiler_version_from_CC
fi
else
if [ ! -f "$CLR_CC" ]; then
echo "Error: CLR_CC is set but path '$CLR_CC' does not exist"
exit 1
fi
CC="$CLR_CC"
CXX="$CLR_CXX"
set_compiler_version_from_CC
fi
if [ -z "$CC" ]; then
echo "Error: Unable to find $compiler."
exit 1
fi
if [ "$__baseOS" != "Darwin" ]; then
# On Darwin, we always want to use the Apple linker.
# Only lld version >= 9 can be considered stable. lld supports s390x starting from 18.0.
if [ "$compiler" = "clang" ] && [ -n "$majorVersion" ] && [ "$majorVersion" -ge 9 ] && { [ "$build_arch" != "s390x" ] || [ "$majorVersion" -ge 18 ]; }; then
if "$CC" -fuse-ld=lld -Wl,--version >/dev/null 2>&1; then
LDFLAGS="-fuse-ld=lld"
fi
fi
fi
SCAN_BUILD_COMMAND="$(command -v "scan-build$desired_version" 2> /dev/null)"
export CC CXX LDFLAGS SCAN_BUILD_COMMAND
================================================
FILE: eng/common/native/init-distro-rid.sh
================================================
#!/bin/sh
# getNonPortableDistroRid
#
# Input:
# targetOs: (str)
# targetArch: (str)
# rootfsDir: (str)
#
# Return:
# non-portable rid
getNonPortableDistroRid()
{
targetOs="$1"
targetArch="$2"
rootfsDir="$3"
nonPortableRid=""
if [ "$targetOs" = "linux" ]; then
# shellcheck disable=SC1091
if [ -e "${rootfsDir}/etc/os-release" ]; then
. "${rootfsDir}/etc/os-release"
if echo "${VERSION_ID:-}" | grep -qE '^([[:digit:]]|\.)+$'; then
nonPortableRid="${ID}.${VERSION_ID}-${targetArch}"
else
# Rolling release distros either do not set VERSION_ID, set it as blank or
# set it to non-version looking string (such as TEMPLATE_VERSION_ID on ArchLinux);
# so omit it here to be consistent with everything else.
nonPortableRid="${ID}-${targetArch}"
fi
elif [ -e "${rootfsDir}/android_platform" ]; then
# shellcheck disable=SC1091
. "${rootfsDir}/android_platform"
nonPortableRid="$RID"
fi
fi
if [ "$targetOs" = "freebsd" ]; then
# $rootfsDir can be empty. freebsd-version is a shell script and should always work.
__freebsd_major_version=$("$rootfsDir"/bin/freebsd-version | cut -d'.' -f1)
nonPortableRid="freebsd.$__freebsd_major_version-${targetArch}"
elif command -v getprop >/dev/null && getprop ro.product.system.model | grep -qi android; then
__android_sdk_version=$(getprop ro.build.version.sdk)
nonPortableRid="android.$__android_sdk_version-${targetArch}"
elif [ "$targetOs" = "illumos" ]; then
__uname_version=$(uname -v)
nonPortableRid="illumos-${targetArch}"
elif [ "$targetOs" = "solaris" ]; then
__uname_version=$(uname -v)
__solaris_major_version=$(echo "$__uname_version" | cut -d'.' -f1)
nonPortableRid="solaris.$__solaris_major_version-${targetArch}"
elif [ "$targetOs" = "haiku" ]; then
__uname_release="$(uname -r)"
nonPortableRid=haiku.r"$__uname_release"-"$targetArch"
fi
echo "$nonPortableRid" | tr '[:upper:]' '[:lower:]'
}
# initDistroRidGlobal
#
# Input:
# os: (str)
# arch: (str)
# rootfsDir?: (nullable:string)
#
# Return:
# None
#
# Notes:
# It is important to note that the function does not return anything, but it
# exports the following variables on success:
# __DistroRid : Non-portable rid of the target platform.
# __PortableTargetOS : OS-part of the portable rid that corresponds to the target platform.
initDistroRidGlobal()
{
targetOs="$1"
targetArch="$2"
rootfsDir=""
if [ $# -ge 3 ]; then
rootfsDir="$3"
fi
if [ -n "${rootfsDir}" ]; then
# We may have a cross build. Check for the existence of the rootfsDir
if [ ! -e "${rootfsDir}" ]; then
echo "Error: rootfsDir has been passed, but the location is not valid."
exit 1
fi
fi
__DistroRid=$(getNonPortableDistroRid "${targetOs}" "${targetArch}" "${rootfsDir}")
if [ -z "${__PortableTargetOS:-}" ]; then
__PortableTargetOS="$targetOs"
STRINGS="$(command -v strings || true)"
if [ -z "$STRINGS" ]; then
STRINGS="$(command -v llvm-strings || true)"
fi
# Check for musl-based distros (e.g. Alpine Linux, Void Linux).
if "${rootfsDir}/usr/bin/ldd" --version 2>&1 | grep -q musl ||
( [ -n "$STRINGS" ] && "$STRINGS" "${rootfsDir}/usr/bin/ldd" 2>&1 | grep -q musl ); then
__PortableTargetOS="linux-musl"
fi
fi
export __DistroRid __PortableTargetOS
}
================================================
FILE: eng/common/native/init-os-and-arch.sh
================================================
#!/bin/sh
# Use uname to determine what the OS is.
OSName=$(uname -s | tr '[:upper:]' '[:lower:]')
if command -v getprop && getprop ro.product.system.model 2>&1 | grep -qi android; then
OSName="android"
fi
case "$OSName" in
freebsd|linux|netbsd|openbsd|sunos|android|haiku)
os="$OSName" ;;
darwin)
os=osx ;;
*)
echo "Unsupported OS $OSName detected!"
exit 1 ;;
esac
# On Solaris, `uname -m` is discouraged, see https://docs.oracle.com/cd/E36784_01/html/E36870/uname-1.html
# and `uname -p` returns processor type (e.g. i386 on amd64).
# The appropriate tool to determine CPU is isainfo(1) https://docs.oracle.com/cd/E36784_01/html/E36870/isainfo-1.html.
if [ "$os" = "sunos" ]; then
if uname -o 2>&1 | grep -q illumos; then
os="illumos"
else
os="solaris"
fi
CPUName=$(isainfo -n)
else
# For the rest of the operating systems, use uname(1) to determine what the CPU is.
CPUName=$(uname -m)
fi
case "$CPUName" in
arm64|aarch64)
arch=arm64
if [ "$(getconf LONG_BIT)" -lt 64 ]; then
# This is 32-bit OS running on 64-bit CPU (for example Raspberry Pi OS)
arch=arm
fi
;;
loongarch64)
arch=loongarch64
;;
riscv64)
arch=riscv64
;;
amd64|x86_64)
arch=x64
;;
armv7l|armv8l)
# shellcheck disable=SC1091
if (NAME=""; . /etc/os-release; test "$NAME" = "Tizen"); then
arch=armel
else
arch=arm
fi
;;
armv6l)
arch=armv6
;;
i[3-6]86)
echo "Unsupported CPU $CPUName detected, build might not succeed!"
arch=x86
;;
s390x)
arch=s390x
;;
ppc64le)
arch=ppc64le
;;
*)
echo "Unknown CPU $CPUName detected!"
exit 1
;;
esac
================================================
FILE: eng/common/native/install-cmake-test.sh
================================================
#!/usr/bin/env bash
source="${BASH_SOURCE[0]}"
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
. $scriptroot/common-library.sh
base_uri=
install_path=
version=
clean=false
force=false
download_retries=5
retry_wait_time_seconds=30
while (($# > 0)); do
lowerI="$(echo $1 | tr "[:upper:]" "[:lower:]")"
case $lowerI in
--baseuri)
base_uri=$2
shift 2
;;
--installpath)
install_path=$2
shift 2
;;
--version)
version=$2
shift 2
;;
--clean)
clean=true
shift 1
;;
--force)
force=true
shift 1
;;
--downloadretries)
download_retries=$2
shift 2
;;
--retrywaittimeseconds)
retry_wait_time_seconds=$2
shift 2
;;
--help)
echo "Common settings:"
echo " --baseuri Base file directory or Url wrom which to acquire tool archives"
echo " --installpath Base directory to install native tool to"
echo " --clean Don't install the tool, just clean up the current install of the tool"
echo " --force Force install of tools even if they previously exist"
echo " --help Print help and exit"
echo ""
echo "Advanced settings:"
echo " --downloadretries Total number of retry attempts"
echo " --retrywaittimeseconds Wait time between retry attempts in seconds"
echo ""
exit 0
;;
esac
done
tool_name="cmake-test"
tool_os=$(GetCurrentOS)
tool_folder="$(echo $tool_os | tr "[:upper:]" "[:lower:]")"
tool_arch="x86_64"
tool_name_moniker="$tool_name-$version-$tool_os-$tool_arch"
tool_install_directory="$install_path/$tool_name/$version"
tool_file_path="$tool_install_directory/$tool_name_moniker/bin/$tool_name"
shim_path="$install_path/$tool_name.sh"
uri="${base_uri}/$tool_folder/$tool_name/$tool_name_moniker.tar.gz"
# Clean up tool and installers
if [[ $clean = true ]]; then
echo "Cleaning $tool_install_directory"
if [[ -d $tool_install_directory ]]; then
rm -rf $tool_install_directory
fi
echo "Cleaning $shim_path"
if [[ -f $shim_path ]]; then
rm -rf $shim_path
fi
tool_temp_path=$(GetTempPathFileName $uri)
echo "Cleaning $tool_temp_path"
if [[ -f $tool_temp_path ]]; then
rm -rf $tool_temp_path
fi
exit 0
fi
# Install tool
if [[ -f $tool_file_path ]] && [[ $force = false ]]; then
echo "$tool_name ($version) already exists, skipping install"
exit 0
fi
DownloadAndExtract $uri $tool_install_directory $force $download_retries $retry_wait_time_seconds
if [[ $? != 0 ]]; then
Write-PipelineTelemetryError -category 'NativeToolsBootstrap' 'Installation failed'
exit 1
fi
# Generate Shim
# Always rewrite shims so that we are referencing the expected version
NewScriptShim $shim_path $tool_file_path true
if [[ $? != 0 ]]; then
Write-PipelineTelemetryError -category 'NativeToolsBootstrap' 'Shim generation failed'
exit 1
fi
exit 0
================================================
FILE: eng/common/native/install-cmake.sh
================================================
#!/usr/bin/env bash
source="${BASH_SOURCE[0]}"
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
. $scriptroot/common-library.sh
base_uri=
install_path=
version=
clean=false
force=false
download_retries=5
retry_wait_time_seconds=30
while (($# > 0)); do
lowerI="$(echo $1 | tr "[:upper:]" "[:lower:]")"
case $lowerI in
--baseuri)
base_uri=$2
shift 2
;;
--installpath)
install_path=$2
shift 2
;;
--version)
version=$2
shift 2
;;
--clean)
clean=true
shift 1
;;
--force)
force=true
shift 1
;;
--downloadretries)
download_retries=$2
shift 2
;;
--retrywaittimeseconds)
retry_wait_time_seconds=$2
shift 2
;;
--help)
echo "Common settings:"
echo " --baseuri Base file directory or Url wrom which to acquire tool archives"
echo " --installpath Base directory to install native tool to"
echo " --clean Don't install the tool, just clean up the current install of the tool"
echo " --force Force install of tools even if they previously exist"
echo " --help Print help and exit"
echo ""
echo "Advanced settings:"
echo " --downloadretries Total number of retry attempts"
echo " --retrywaittimeseconds Wait time between retry attempts in seconds"
echo ""
exit 0
;;
esac
done
tool_name="cmake"
tool_os=$(GetCurrentOS)
tool_folder="$(echo $tool_os | tr "[:upper:]" "[:lower:]")"
tool_arch="x86_64"
tool_name_moniker="$tool_name-$version-$tool_os-$tool_arch"
tool_install_directory="$install_path/$tool_name/$version"
tool_file_path="$tool_install_directory/$tool_name_moniker/bin/$tool_name"
shim_path="$install_path/$tool_name.sh"
uri="${base_uri}/$tool_folder/$tool_name/$tool_name_moniker.tar.gz"
# Clean up tool and installers
if [[ $clean = true ]]; then
echo "Cleaning $tool_install_directory"
if [[ -d $tool_install_directory ]]; then
rm -rf $tool_install_directory
fi
echo "Cleaning $shim_path"
if [[ -f $shim_path ]]; then
rm -rf $shim_path
fi
tool_temp_path=$(GetTempPathFileName $uri)
echo "Cleaning $tool_temp_path"
if [[ -f $tool_temp_path ]]; then
rm -rf $tool_temp_path
fi
exit 0
fi
# Install tool
if [[ -f $tool_file_path ]] && [[ $force = false ]]; then
echo "$tool_name ($version) already exists, skipping install"
exit 0
fi
DownloadAndExtract $uri $tool_install_directory $force $download_retries $retry_wait_time_seconds
if [[ $? != 0 ]]; then
Write-PipelineTelemetryError -category 'NativeToolsBootstrap' 'Installation failed'
exit 1
fi
# Generate Shim
# Always rewrite shims so that we are referencing the expected version
NewScriptShim $shim_path $tool_file_path true
if [[ $? != 0 ]]; then
Write-PipelineTelemetryError -category 'NativeToolsBootstrap' 'Shim generation failed'
exit 1
fi
exit 0
================================================
FILE: eng/common/native/install-dependencies.sh
================================================
#!/bin/sh
set -e
# This is a simple script primarily used for CI to install necessary dependencies
#
# Usage:
#
# ./install-dependencies.sh
os="$(echo "$1" | tr "[:upper:]" "[:lower:]")"
if [ -z "$os" ]; then
. "$(dirname "$0")"/init-os-and-arch.sh
fi
case "$os" in
linux)
if [ -e /etc/os-release ]; then
. /etc/os-release
fi
if [ "$ID" = "debian" ] || [ "$ID_LIKE" = "debian" ]; then
apt update
apt install -y build-essential gettext locales cmake llvm clang lld lldb liblldb-dev libunwind8-dev libicu-dev liblttng-ust-dev \
libssl-dev libkrb5-dev pigz cpio
localedef -i en_US -c -f UTF-8 -A /usr/share/locale/locale.alias en_US.UTF-8
elif [ "$ID" = "fedora" ] || [ "$ID" = "rhel" ] || [ "$ID" = "azurelinux" ]; then
pkg_mgr="$(command -v tdnf 2>/dev/null || command -v dnf)"
$pkg_mgr install -y cmake llvm lld lldb clang python curl libicu-devel openssl-devel krb5-devel lttng-ust-devel pigz cpio
elif [ "$ID" = "alpine" ]; then
apk add build-base cmake bash curl clang llvm-dev lld lldb krb5-dev lttng-ust-dev icu-dev openssl-dev pigz cpio
else
echo "Unsupported distro. distro: $ID"
exit 1
fi
;;
osx|maccatalyst|ios|iossimulator|tvos|tvossimulator)
echo "Installed xcode version: $(xcode-select -p)"
export HOMEBREW_NO_INSTALL_CLEANUP=1
export HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK=1
# Skip brew update for now, see https://github.com/actions/setup-python/issues/577
# brew update --preinstall
brew bundle --no-upgrade --file=- <
[CmdletBinding(PositionalBinding=$false)]
Param (
[Parameter(Mandatory=$True)]
[string] $ToolName,
[Parameter(Mandatory=$True)]
[string] $InstallPath,
[Parameter(Mandatory=$True)]
[string] $BaseUri,
[Parameter(Mandatory=$True)]
[string] $Version,
[string] $CommonLibraryDirectory = $PSScriptRoot,
[switch] $Force = $False,
[switch] $Clean = $False,
[int] $DownloadRetries = 5,
[int] $RetryWaitTimeInSeconds = 30
)
. $PSScriptRoot\..\pipeline-logging-functions.ps1
# Import common library modules
Import-Module -Name (Join-Path $CommonLibraryDirectory "CommonLibrary.psm1")
try {
# Define verbose switch if undefined
$Verbose = $VerbosePreference -Eq "Continue"
$Arch = CommonLibrary\Get-MachineArchitecture
$ToolOs = "win64"
if($Arch -Eq "x32") {
$ToolOs = "win32"
}
$ToolNameMoniker = "$ToolName-$Version-$ToolOs-$Arch"
$ToolInstallDirectory = Join-Path $InstallPath "$ToolName\$Version\"
$Uri = "$BaseUri/windows/$ToolName/$ToolNameMoniker.zip"
$ShimPath = Join-Path $InstallPath "$ToolName.exe"
if ($Clean) {
Write-Host "Cleaning $ToolInstallDirectory"
if (Test-Path $ToolInstallDirectory) {
Remove-Item $ToolInstallDirectory -Force -Recurse
}
Write-Host "Cleaning $ShimPath"
if (Test-Path $ShimPath) {
Remove-Item $ShimPath -Force
}
$ToolTempPath = CommonLibrary\Get-TempPathFilename -Path $Uri
Write-Host "Cleaning $ToolTempPath"
if (Test-Path $ToolTempPath) {
Remove-Item $ToolTempPath -Force
}
exit 0
}
# Install tool
if ((Test-Path $ToolInstallDirectory) -And (-Not $Force)) {
Write-Verbose "$ToolName ($Version) already exists, skipping install"
}
else {
$InstallStatus = CommonLibrary\DownloadAndExtract -Uri $Uri `
-InstallDirectory $ToolInstallDirectory `
-Force:$Force `
-DownloadRetries $DownloadRetries `
-RetryWaitTimeInSeconds $RetryWaitTimeInSeconds `
-Verbose:$Verbose
if ($InstallStatus -Eq $False) {
Write-PipelineTelemetryError "Installation failed" -Category "NativeToolsetBootstrapping"
exit 1
}
}
$ToolFilePath = Get-ChildItem $ToolInstallDirectory -Recurse -Filter "$ToolName.exe" | % { $_.FullName }
if (@($ToolFilePath).Length -Gt 1) {
Write-Error "There are multiple copies of $ToolName in $($ToolInstallDirectory): `n$(@($ToolFilePath | out-string))"
exit 1
} elseif (@($ToolFilePath).Length -Lt 1) {
Write-Host "$ToolName was not found in $ToolInstallDirectory."
exit 1
}
# Generate shim
# Always rewrite shims so that we are referencing the expected version
$GenerateShimStatus = CommonLibrary\New-ScriptShim -ShimName $ToolName `
-ShimDirectory $InstallPath `
-ToolFilePath "$ToolFilePath" `
-BaseUri $BaseUri `
-Force:$Force `
-Verbose:$Verbose
if ($GenerateShimStatus -Eq $False) {
Write-PipelineTelemetryError "Generate shim failed" -Category "NativeToolsetBootstrapping"
return 1
}
exit 0
}
catch {
Write-Host $_.ScriptStackTrace
Write-PipelineTelemetryError -Category "NativeToolsetBootstrapping" -Message $_
exit 1
}
================================================
FILE: eng/common/pipeline-logging-functions.ps1
================================================
# Source for this file was taken from https://github.com/microsoft/azure-pipelines-task-lib/blob/11c9439d4af17e6475d9fe058e6b2e03914d17e6/powershell/VstsTaskSdk/LoggingCommandFunctions.ps1 and modified.
# NOTE: You should not be calling these method directly as they are likely to change. Instead you should be calling the Write-Pipeline* functions defined in tools.ps1
$script:loggingCommandPrefix = '##vso['
$script:loggingCommandEscapeMappings = @( # TODO: WHAT ABOUT "="? WHAT ABOUT "%"?
New-Object psobject -Property @{ Token = ';' ; Replacement = '%3B' }
New-Object psobject -Property @{ Token = "`r" ; Replacement = '%0D' }
New-Object psobject -Property @{ Token = "`n" ; Replacement = '%0A' }
New-Object psobject -Property @{ Token = "]" ; Replacement = '%5D' }
)
# TODO: BUG: Escape % ???
# TODO: Add test to verify don't need to escape "=".
# Specify "-Force" to force pipeline formatted output even if "$ci" is false or not set
function Write-PipelineTelemetryError {
[CmdletBinding()]
param(
[Parameter(Mandatory = $true)]
[string]$Category,
[Parameter(Mandatory = $true)]
[string]$Message,
[Parameter(Mandatory = $false)]
[string]$Type = 'error',
[string]$ErrCode,
[string]$SourcePath,
[string]$LineNumber,
[string]$ColumnNumber,
[switch]$AsOutput,
[switch]$Force)
$PSBoundParameters.Remove('Category') | Out-Null
if ($Force -Or ((Test-Path variable:ci) -And $ci)) {
$Message = "(NETCORE_ENGINEERING_TELEMETRY=$Category) $Message"
}
$PSBoundParameters.Remove('Message') | Out-Null
$PSBoundParameters.Add('Message', $Message)
Write-PipelineTaskError @PSBoundParameters
}
# Specify "-Force" to force pipeline formatted output even if "$ci" is false or not set
function Write-PipelineTaskError {
[CmdletBinding()]
param(
[Parameter(Mandatory = $true)]
[string]$Message,
[Parameter(Mandatory = $false)]
[string]$Type = 'error',
[string]$ErrCode,
[string]$SourcePath,
[string]$LineNumber,
[string]$ColumnNumber,
[switch]$AsOutput,
[switch]$Force
)
if (!$Force -And (-Not (Test-Path variable:ci) -Or !$ci)) {
if ($Type -eq 'error') {
Write-Host $Message -ForegroundColor Red
return
}
elseif ($Type -eq 'warning') {
Write-Host $Message -ForegroundColor Yellow
return
}
}
if (($Type -ne 'error') -and ($Type -ne 'warning')) {
Write-Host $Message
return
}
$PSBoundParameters.Remove('Force') | Out-Null
if (-not $PSBoundParameters.ContainsKey('Type')) {
$PSBoundParameters.Add('Type', 'error')
}
Write-LogIssue @PSBoundParameters
}
function Write-PipelineSetVariable {
[CmdletBinding()]
param(
[Parameter(Mandatory = $true)]
[string]$Name,
[string]$Value,
[switch]$Secret,
[switch]$AsOutput,
[bool]$IsMultiJobVariable = $true)
if ((Test-Path variable:ci) -And $ci) {
Write-LoggingCommand -Area 'task' -Event 'setvariable' -Data $Value -Properties @{
'variable' = $Name
'isSecret' = $Secret
'isOutput' = $IsMultiJobVariable
} -AsOutput:$AsOutput
}
}
function Write-PipelinePrependPath {
[CmdletBinding()]
param(
[Parameter(Mandatory = $true)]
[string]$Path,
[switch]$AsOutput)
if ((Test-Path variable:ci) -And $ci) {
Write-LoggingCommand -Area 'task' -Event 'prependpath' -Data $Path -AsOutput:$AsOutput
}
}
function Write-PipelineSetResult {
[CmdletBinding()]
param(
[ValidateSet("Succeeded", "SucceededWithIssues", "Failed", "Cancelled", "Skipped")]
[Parameter(Mandatory = $true)]
[string]$Result,
[string]$Message)
if ((Test-Path variable:ci) -And $ci) {
Write-LoggingCommand -Area 'task' -Event 'complete' -Data $Message -Properties @{
'result' = $Result
}
}
}
<########################################
# Private functions.
########################################>
function Format-LoggingCommandData {
[CmdletBinding()]
param([string]$Value, [switch]$Reverse)
if (!$Value) {
return ''
}
if (!$Reverse) {
foreach ($mapping in $script:loggingCommandEscapeMappings) {
$Value = $Value.Replace($mapping.Token, $mapping.Replacement)
}
}
else {
for ($i = $script:loggingCommandEscapeMappings.Length - 1 ; $i -ge 0 ; $i--) {
$mapping = $script:loggingCommandEscapeMappings[$i]
$Value = $Value.Replace($mapping.Replacement, $mapping.Token)
}
}
return $Value
}
function Format-LoggingCommand {
[CmdletBinding()]
param(
[Parameter(Mandatory = $true)]
[string]$Area,
[Parameter(Mandatory = $true)]
[string]$Event,
[string]$Data,
[hashtable]$Properties)
# Append the preamble.
[System.Text.StringBuilder]$sb = New-Object -TypeName System.Text.StringBuilder
$null = $sb.Append($script:loggingCommandPrefix).Append($Area).Append('.').Append($Event)
# Append the properties.
if ($Properties) {
$first = $true
foreach ($key in $Properties.Keys) {
[string]$value = Format-LoggingCommandData $Properties[$key]
if ($value) {
if ($first) {
$null = $sb.Append(' ')
$first = $false
}
else {
$null = $sb.Append(';')
}
$null = $sb.Append("$key=$value")
}
}
}
# Append the tail and output the value.
$Data = Format-LoggingCommandData $Data
$sb.Append(']').Append($Data).ToString()
}
function Write-LoggingCommand {
[CmdletBinding(DefaultParameterSetName = 'Parameters')]
param(
[Parameter(Mandatory = $true, ParameterSetName = 'Parameters')]
[string]$Area,
[Parameter(Mandatory = $true, ParameterSetName = 'Parameters')]
[string]$Event,
[Parameter(ParameterSetName = 'Parameters')]
[string]$Data,
[Parameter(ParameterSetName = 'Parameters')]
[hashtable]$Properties,
[Parameter(Mandatory = $true, ParameterSetName = 'Object')]
$Command,
[switch]$AsOutput)
if ($PSCmdlet.ParameterSetName -eq 'Object') {
Write-LoggingCommand -Area $Command.Area -Event $Command.Event -Data $Command.Data -Properties $Command.Properties -AsOutput:$AsOutput
return
}
$command = Format-LoggingCommand -Area $Area -Event $Event -Data $Data -Properties $Properties
if ($AsOutput) {
$command
}
else {
Write-Host $command
}
}
function Write-LogIssue {
[CmdletBinding()]
param(
[ValidateSet('warning', 'error')]
[Parameter(Mandatory = $true)]
[string]$Type,
[string]$Message,
[string]$ErrCode,
[string]$SourcePath,
[string]$LineNumber,
[string]$ColumnNumber,
[switch]$AsOutput)
$command = Format-LoggingCommand -Area 'task' -Event 'logissue' -Data $Message -Properties @{
'type' = $Type
'code' = $ErrCode
'sourcepath' = $SourcePath
'linenumber' = $LineNumber
'columnnumber' = $ColumnNumber
}
if ($AsOutput) {
return $command
}
if ($Type -eq 'error') {
$foregroundColor = $host.PrivateData.ErrorForegroundColor
$backgroundColor = $host.PrivateData.ErrorBackgroundColor
if ($foregroundColor -isnot [System.ConsoleColor] -or $backgroundColor -isnot [System.ConsoleColor]) {
$foregroundColor = [System.ConsoleColor]::Red
$backgroundColor = [System.ConsoleColor]::Black
}
}
else {
$foregroundColor = $host.PrivateData.WarningForegroundColor
$backgroundColor = $host.PrivateData.WarningBackgroundColor
if ($foregroundColor -isnot [System.ConsoleColor] -or $backgroundColor -isnot [System.ConsoleColor]) {
$foregroundColor = [System.ConsoleColor]::Yellow
$backgroundColor = [System.ConsoleColor]::Black
}
}
Write-Host $command -ForegroundColor $foregroundColor -BackgroundColor $backgroundColor
}
================================================
FILE: eng/common/pipeline-logging-functions.sh
================================================
#!/usr/bin/env bash
function Write-PipelineTelemetryError {
local telemetry_category=''
local force=false
local function_args=()
local message=''
while [[ $# -gt 0 ]]; do
opt="$(echo "${1/#--/-}" | tr "[:upper:]" "[:lower:]")"
case "$opt" in
-category|-c)
telemetry_category=$2
shift
;;
-force|-f)
force=true
;;
-*)
function_args+=("$1 $2")
shift
;;
*)
message=$*
;;
esac
shift
done
if [[ $force != true ]] && [[ "$ci" != true ]]; then
echo "$message" >&2
return
fi
if [[ $force == true ]]; then
function_args+=("-force")
fi
message="(NETCORE_ENGINEERING_TELEMETRY=$telemetry_category) $message"
function_args+=("$message")
Write-PipelineTaskError ${function_args[@]}
}
function Write-PipelineTaskError {
local message_type="error"
local sourcepath=''
local linenumber=''
local columnnumber=''
local error_code=''
local force=false
while [[ $# -gt 0 ]]; do
opt="$(echo "${1/#--/-}" | tr "[:upper:]" "[:lower:]")"
case "$opt" in
-type|-t)
message_type=$2
shift
;;
-sourcepath|-s)
sourcepath=$2
shift
;;
-linenumber|-ln)
linenumber=$2
shift
;;
-columnnumber|-cn)
columnnumber=$2
shift
;;
-errcode|-e)
error_code=$2
shift
;;
-force|-f)
force=true
;;
*)
break
;;
esac
shift
done
if [[ $force != true ]] && [[ "$ci" != true ]]; then
echo "$@" >&2
return
fi
local message="##vso[task.logissue"
message="$message type=$message_type"
if [ -n "$sourcepath" ]; then
message="$message;sourcepath=$sourcepath"
fi
if [ -n "$linenumber" ]; then
message="$message;linenumber=$linenumber"
fi
if [ -n "$columnnumber" ]; then
message="$message;columnnumber=$columnnumber"
fi
if [ -n "$error_code" ]; then
message="$message;code=$error_code"
fi
message="$message]$*"
echo "$message"
}
function Write-PipelineSetVariable {
if [[ "$ci" != true ]]; then
return
fi
local name=''
local value=''
local secret=false
local as_output=false
local is_multi_job_variable=true
while [[ $# -gt 0 ]]; do
opt="$(echo "${1/#--/-}" | tr "[:upper:]" "[:lower:]")"
case "$opt" in
-name|-n)
name=$2
shift
;;
-value|-v)
value=$2
shift
;;
-secret|-s)
secret=true
;;
-as_output|-a)
as_output=true
;;
-is_multi_job_variable|-i)
is_multi_job_variable=$2
shift
;;
esac
shift
done
value=${value/;/%3B}
value=${value/\\r/%0D}
value=${value/\\n/%0A}
value=${value/]/%5D}
local message="##vso[task.setvariable variable=$name;isSecret=$secret;isOutput=$is_multi_job_variable]$value"
if [[ "$as_output" == true ]]; then
$message
else
echo "$message"
fi
}
function Write-PipelinePrependPath {
local prepend_path=''
while [[ $# -gt 0 ]]; do
opt="$(echo "${1/#--/-}" | tr "[:upper:]" "[:lower:]")"
case "$opt" in
-path|-p)
prepend_path=$2
shift
;;
esac
shift
done
export PATH="$prepend_path:$PATH"
if [[ "$ci" == true ]]; then
echo "##vso[task.prependpath]$prepend_path"
fi
}
function Write-PipelineSetResult {
local result=''
local message=''
while [[ $# -gt 0 ]]; do
opt="$(echo "${1/#--/-}" | tr "[:upper:]" "[:lower:]")"
case "$opt" in
-result|-r)
result=$2
shift
;;
-message|-m)
message=$2
shift
;;
esac
shift
done
if [[ "$ci" == true ]]; then
echo "##vso[task.complete result=$result;]$message"
fi
}
================================================
FILE: eng/common/post-build/check-channel-consistency.ps1
================================================
param(
[Parameter(Mandatory=$true)][string] $PromoteToChannels, # List of channels that the build should be promoted to
[Parameter(Mandatory=$true)][array] $AvailableChannelIds # List of channel IDs available in the YAML implementation
)
try {
$ErrorActionPreference = 'Stop'
Set-StrictMode -Version 2.0
# `tools.ps1` checks $ci to perform some actions. Since the post-build
# scripts don't necessarily execute in the same agent that run the
# build.ps1/sh script this variable isn't automatically set.
$ci = $true
$disableConfigureToolsetImport = $true
. $PSScriptRoot\..\tools.ps1
if ($PromoteToChannels -eq "") {
Write-PipelineTaskError -Type 'warning' -Message "This build won't publish assets as it's not configured to any Maestro channel. If that wasn't intended use Darc to configure a default channel using add-default-channel for this branch or to promote it to a channel using add-build-to-channel. See https://github.com/dotnet/arcade/blob/main/Documentation/Darc.md#assigning-an-individual-build-to-a-channel for more info."
ExitWithExitCode 0
}
# Check that every channel that Maestro told to promote the build to
# is available in YAML
$PromoteToChannelsIds = $PromoteToChannels -split "\D" | Where-Object { $_ }
$hasErrors = $false
foreach ($id in $PromoteToChannelsIds) {
if (($id -ne 0) -and ($id -notin $AvailableChannelIds)) {
Write-PipelineTaskError -Message "Channel $id is not present in the post-build YAML configuration! This is an error scenario. Please contact @dnceng."
$hasErrors = $true
}
}
# The `Write-PipelineTaskError` doesn't error the script and we might report several errors
# in the previous lines. The check below makes sure that we return an error state from the
# script if we reported any validation error
if ($hasErrors) {
ExitWithExitCode 1
}
Write-Host 'done.'
}
catch {
Write-Host $_
Write-PipelineTelemetryError -Category 'CheckChannelConsistency' -Message "There was an error while trying to check consistency of Maestro default channels for the build and post-build YAML configuration."
ExitWithExitCode 1
}
================================================
FILE: eng/common/post-build/nuget-validation.ps1
================================================
# This script validates NuGet package metadata information using this
# tool: https://github.com/NuGet/NuGetGallery/tree/jver-verify/src/VerifyMicrosoftPackage
param(
[Parameter(Mandatory=$true)][string] $PackagesPath # Path to where the packages to be validated are
)
# `tools.ps1` checks $ci to perform some actions. Since the post-build
# scripts don't necessarily execute in the same agent that run the
# build.ps1/sh script this variable isn't automatically set.
$ci = $true
$disableConfigureToolsetImport = $true
. $PSScriptRoot\..\tools.ps1
try {
& $PSScriptRoot\nuget-verification.ps1 ${PackagesPath}\*.nupkg
}
catch {
Write-Host $_.ScriptStackTrace
Write-PipelineTelemetryError -Category 'NuGetValidation' -Message $_
ExitWithExitCode 1
}
================================================
FILE: eng/common/post-build/nuget-verification.ps1
================================================
<#
.SYNOPSIS
Verifies that Microsoft NuGet packages have proper metadata.
.DESCRIPTION
Downloads a verification tool and runs metadata validation on the provided NuGet packages. This script writes an
error if any of the provided packages fail validation. All arguments provided to this PowerShell script that do not
match PowerShell parameters are passed on to the verification tool downloaded during the execution of this script.
.PARAMETER NuGetExePath
The path to the nuget.exe binary to use. If not provided, nuget.exe will be downloaded into the -DownloadPath
directory.
.PARAMETER PackageSource
The package source to use to download the verification tool. If not provided, nuget.org will be used.
.PARAMETER DownloadPath
The directory path to download the verification tool and nuget.exe to. If not provided,
%TEMP%\NuGet.VerifyNuGetPackage will be used.
.PARAMETER args
Arguments that will be passed to the verification tool.
.EXAMPLE
PS> .\verify.ps1 *.nupkg
Verifies the metadata of all .nupkg files in the currect working directory.
.EXAMPLE
PS> .\verify.ps1 --help
Displays the help text of the downloaded verifiction tool.
.LINK
https://github.com/NuGet/NuGetGallery/blob/master/src/VerifyMicrosoftPackage/README.md
#>
# This script was copied from https://github.com/NuGet/NuGetGallery/blob/3e25ad135146676bcab0050a516939d9958bfa5d/src/VerifyMicrosoftPackage/verify.ps1
[CmdletBinding(PositionalBinding = $false)]
param(
[string]$NuGetExePath,
[string]$PackageSource = "https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/index.json",
[string]$DownloadPath,
[Parameter(ValueFromRemainingArguments = $true)]
[string[]]$args
)
# The URL to download nuget.exe.
$nugetExeUrl = "https://dist.nuget.org/win-x86-commandline/v4.9.4/nuget.exe"
# The package ID of the verification tool.
$packageId = "NuGet.VerifyMicrosoftPackage"
# The location that nuget.exe and the verification tool will be downloaded to.
if (!$DownloadPath) {
$DownloadPath = (Join-Path $env:TEMP "NuGet.VerifyMicrosoftPackage")
}
$fence = New-Object -TypeName string -ArgumentList '=', 80
# Create the download directory, if it doesn't already exist.
if (!(Test-Path $DownloadPath)) {
New-Item -ItemType Directory $DownloadPath | Out-Null
}
Write-Host "Using download path: $DownloadPath"
if ($NuGetExePath) {
$nuget = $NuGetExePath
} else {
$downloadedNuGetExe = Join-Path $DownloadPath "nuget.exe"
# Download nuget.exe, if it doesn't already exist.
if (!(Test-Path $downloadedNuGetExe)) {
Write-Host "Downloading nuget.exe from $nugetExeUrl..."
$ProgressPreference = 'SilentlyContinue'
try {
Invoke-WebRequest $nugetExeUrl -UseBasicParsing -OutFile $downloadedNuGetExe
$ProgressPreference = 'Continue'
} catch {
$ProgressPreference = 'Continue'
Write-Error $_
Write-Error "nuget.exe failed to download."
exit
}
}
$nuget = $downloadedNuGetExe
}
Write-Host "Using nuget.exe path: $nuget"
Write-Host " "
# Download the latest version of the verification tool.
Write-Host "Downloading the latest version of $packageId from $packageSource..."
Write-Host $fence
& $nuget install $packageId `
-Prerelease `
-OutputDirectory $DownloadPath `
-Source $PackageSource
Write-Host $fence
Write-Host " "
if ($LASTEXITCODE -ne 0) {
Write-Error "nuget.exe failed to fetch the verify tool."
exit
}
# Find the most recently downloaded tool
Write-Host "Finding the most recently downloaded verification tool."
$verifyProbePath = Join-Path $DownloadPath "$packageId.*"
$verifyPath = Get-ChildItem -Path $verifyProbePath -Directory `
| Sort-Object -Property LastWriteTime -Descending `
| Select-Object -First 1
$verify = Join-Path $verifyPath "tools\NuGet.VerifyMicrosoftPackage.exe"
Write-Host "Using verification tool: $verify"
Write-Host " "
# Execute the verification tool.
Write-Host "Executing the verify tool..."
Write-Host $fence
& $verify $args
Write-Host $fence
Write-Host " "
# Respond to the exit code.
if ($LASTEXITCODE -ne 0) {
Write-Error "The verify tool found some problems."
} else {
Write-Output "The verify tool succeeded."
}
================================================
FILE: eng/common/post-build/publish-using-darc.ps1
================================================
param(
[Parameter(Mandatory=$true)][int] $BuildId,
[Parameter(Mandatory=$true)][int] $PublishingInfraVersion,
[Parameter(Mandatory=$true)][string] $AzdoToken,
[Parameter(Mandatory=$false)][string] $MaestroApiEndPoint = 'https://maestro.dot.net',
[Parameter(Mandatory=$true)][string] $WaitPublishingFinish,
[Parameter(Mandatory=$false)][string] $ArtifactsPublishingAdditionalParameters,
[Parameter(Mandatory=$false)][string] $SymbolPublishingAdditionalParameters,
[Parameter(Mandatory=$false)][string] $RequireDefaultChannels,
[Parameter(Mandatory=$false)][string] $SkipAssetsPublishing,
[Parameter(Mandatory=$false)][string] $runtimeSourceFeed,
[Parameter(Mandatory=$false)][string] $runtimeSourceFeedKey
)
try {
# `tools.ps1` checks $ci to perform some actions. Since the post-build
# scripts don't necessarily execute in the same agent that run the
# build.ps1/sh script this variable isn't automatically set.
$ci = $true
$disableConfigureToolsetImport = $true
. $PSScriptRoot\..\tools.ps1
$darc = Get-Darc
$optionalParams = [System.Collections.ArrayList]::new()
if ("" -ne $ArtifactsPublishingAdditionalParameters) {
$optionalParams.Add("--artifact-publishing-parameters") | Out-Null
$optionalParams.Add($ArtifactsPublishingAdditionalParameters) | Out-Null
}
if ("" -ne $SymbolPublishingAdditionalParameters) {
$optionalParams.Add("--symbol-publishing-parameters") | Out-Null
$optionalParams.Add($SymbolPublishingAdditionalParameters) | Out-Null
}
if ("false" -eq $WaitPublishingFinish) {
$optionalParams.Add("--no-wait") | Out-Null
}
if ("true" -eq $RequireDefaultChannels) {
$optionalParams.Add("--default-channels-required") | Out-Null
}
if ("true" -eq $SkipAssetsPublishing) {
$optionalParams.Add("--skip-assets-publishing") | Out-Null
}
& $darc add-build-to-channel `
--id $buildId `
--publishing-infra-version $PublishingInfraVersion `
--default-channels `
--source-branch main `
--azdev-pat "$AzdoToken" `
--bar-uri "$MaestroApiEndPoint" `
--ci `
--verbose `
@optionalParams
if ($LastExitCode -ne 0) {
Write-Host "Problems using Darc to promote build ${buildId} to default channels. Stopping execution..."
exit 1
}
Write-Host 'done.'
}
catch {
Write-Host $_
Write-PipelineTelemetryError -Category 'PromoteBuild' -Message "There was an error while trying to publish build '$BuildId' to default channels."
ExitWithExitCode 1
}
================================================
FILE: eng/common/post-build/redact-logs.ps1
================================================
[CmdletBinding(PositionalBinding=$False)]
param(
[Parameter(Mandatory=$true, Position=0)][string] $InputPath,
[Parameter(Mandatory=$true)][string] $BinlogToolVersion,
[Parameter(Mandatory=$false)][string] $DotnetPath,
[Parameter(Mandatory=$false)][string] $PackageFeed = 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/index.json',
# File with strings to redact - separated by newlines.
# For comments start the line with '# ' - such lines are ignored
[Parameter(Mandatory=$false)][string] $TokensFilePath,
[Parameter(ValueFromRemainingArguments=$true)][String[]]$TokensToRedact,
[Parameter(Mandatory=$false)][string] $runtimeSourceFeed,
[Parameter(Mandatory=$false)][string] $runtimeSourceFeedKey)
try {
$ErrorActionPreference = 'Stop'
Set-StrictMode -Version 2.0
# `tools.ps1` checks $ci to perform some actions. Since the post-build
# scripts don't necessarily execute in the same agent that run the
# build.ps1/sh script this variable isn't automatically set.
$ci = $true
$disableConfigureToolsetImport = $true
. $PSScriptRoot\..\tools.ps1
$packageName = 'binlogtool'
$dotnet = $DotnetPath
if (!$dotnet) {
$dotnetRoot = InitializeDotNetCli -install:$true
$dotnet = "$dotnetRoot\dotnet.exe"
}
$toolList = & "$dotnet" tool list -g
if ($toolList -like "*$packageName*") {
& "$dotnet" tool uninstall $packageName -g
}
$toolPath = "$PSScriptRoot\..\..\..\.tools"
$verbosity = 'minimal'
New-Item -ItemType Directory -Force -Path $toolPath
Push-Location -Path $toolPath
try {
Write-Host "Installing Binlog redactor CLI..."
Write-Host "'$dotnet' new tool-manifest"
& "$dotnet" new tool-manifest
Write-Host "'$dotnet' tool install $packageName --local --add-source '$PackageFeed' -v $verbosity --version $BinlogToolVersion"
& "$dotnet" tool install $packageName --local --add-source "$PackageFeed" -v $verbosity --version $BinlogToolVersion
if (Test-Path $TokensFilePath) {
Write-Host "Adding additional sensitive data for redaction from file: " $TokensFilePath
$TokensToRedact += Get-Content -Path $TokensFilePath | Foreach {$_.Trim()} | Where { $_ -notmatch "^# " }
}
$optionalParams = [System.Collections.ArrayList]::new()
Foreach ($p in $TokensToRedact)
{
if($p -match '^\$\(.*\)$')
{
Write-Host ("Ignoring token {0} as it is probably unexpanded AzDO variable" -f $p)
}
elseif($p)
{
$optionalParams.Add("-p:" + $p) | Out-Null
}
}
& $dotnet binlogtool redact --input:$InputPath --recurse --in-place `
@optionalParams
if ($LastExitCode -ne 0) {
Write-PipelineTelemetryError -Category 'Redactor' -Type 'warning' -Message "Problems using Redactor tool (exit code: $LastExitCode). But ignoring them now."
}
}
finally {
Pop-Location
}
Write-Host 'done.'
}
catch {
Write-Host $_
Write-PipelineTelemetryError -Category 'Redactor' -Message "There was an error while trying to redact logs. Error: $_"
ExitWithExitCode 1
}
================================================
FILE: eng/common/post-build/sourcelink-validation.ps1
================================================
param(
[Parameter(Mandatory=$true)][string] $InputPath, # Full path to directory where Symbols.NuGet packages to be checked are stored
[Parameter(Mandatory=$true)][string] $ExtractPath, # Full path to directory where the packages will be extracted during validation
[Parameter(Mandatory=$false)][string] $GHRepoName, # GitHub name of the repo including the Org. E.g., dotnet/arcade
[Parameter(Mandatory=$false)][string] $GHCommit, # GitHub commit SHA used to build the packages
[Parameter(Mandatory=$true)][string] $SourcelinkCliVersion # Version of SourceLink CLI to use
)
$ErrorActionPreference = 'Stop'
Set-StrictMode -Version 2.0
# `tools.ps1` checks $ci to perform some actions. Since the post-build
# scripts don't necessarily execute in the same agent that run the
# build.ps1/sh script this variable isn't automatically set.
$ci = $true
$disableConfigureToolsetImport = $true
. $PSScriptRoot\..\tools.ps1
# Cache/HashMap (File -> Exist flag) used to consult whether a file exist
# in the repository at a specific commit point. This is populated by inserting
# all files present in the repo at a specific commit point.
$global:RepoFiles = @{}
# Maximum number of jobs to run in parallel
$MaxParallelJobs = 16
$MaxRetries = 5
$RetryWaitTimeInSeconds = 30
# Wait time between check for system load
$SecondsBetweenLoadChecks = 10
if (!$InputPath -or !(Test-Path $InputPath)){
Write-Host "No files to validate."
ExitWithExitCode 0
}
$ValidatePackage = {
param(
[string] $PackagePath # Full path to a Symbols.NuGet package
)
. $using:PSScriptRoot\..\tools.ps1
# Ensure input file exist
if (!(Test-Path $PackagePath)) {
Write-Host "Input file does not exist: $PackagePath"
return [pscustomobject]@{
result = 1
packagePath = $PackagePath
}
}
# Extensions for which we'll look for SourceLink information
# For now we'll only care about Portable & Embedded PDBs
$RelevantExtensions = @('.dll', '.exe', '.pdb')
Write-Host -NoNewLine 'Validating ' ([System.IO.Path]::GetFileName($PackagePath)) '...'
$PackageId = [System.IO.Path]::GetFileNameWithoutExtension($PackagePath)
$ExtractPath = Join-Path -Path $using:ExtractPath -ChildPath $PackageId
$FailedFiles = 0
Add-Type -AssemblyName System.IO.Compression.FileSystem
[System.IO.Directory]::CreateDirectory($ExtractPath) | Out-Null
try {
$zip = [System.IO.Compression.ZipFile]::OpenRead($PackagePath)
$zip.Entries |
Where-Object {$RelevantExtensions -contains [System.IO.Path]::GetExtension($_.Name)} |
ForEach-Object {
$FileName = $_.FullName
$Extension = [System.IO.Path]::GetExtension($_.Name)
$FakeName = -Join((New-Guid), $Extension)
$TargetFile = Join-Path -Path $ExtractPath -ChildPath $FakeName
# We ignore resource DLLs
if ($FileName.EndsWith('.resources.dll')) {
return [pscustomobject]@{
result = 0
packagePath = $PackagePath
}
}
[System.IO.Compression.ZipFileExtensions]::ExtractToFile($_, $TargetFile, $true)
$ValidateFile = {
param(
[string] $FullPath, # Full path to the module that has to be checked
[string] $RealPath,
[ref] $FailedFiles
)
$sourcelinkExe = "$env:USERPROFILE\.dotnet\tools"
$sourcelinkExe = Resolve-Path "$sourcelinkExe\sourcelink.exe"
$SourceLinkInfos = & $sourcelinkExe print-urls $FullPath | Out-String
if ($LASTEXITCODE -eq 0 -and -not ([string]::IsNullOrEmpty($SourceLinkInfos))) {
$NumFailedLinks = 0
# We only care about Http addresses
$Matches = (Select-String '(http[s]?)(:\/\/)([^\s,]+)' -Input $SourceLinkInfos -AllMatches).Matches
if ($Matches.Count -ne 0) {
$Matches.Value |
ForEach-Object {
$Link = $_
$CommitUrl = "https://raw.githubusercontent.com/${using:GHRepoName}/${using:GHCommit}/"
$FilePath = $Link.Replace($CommitUrl, "")
$Status = 200
$Cache = $using:RepoFiles
$attempts = 0
while ($attempts -lt $using:MaxRetries) {
if ( !($Cache.ContainsKey($FilePath)) ) {
try {
$Uri = $Link -as [System.URI]
if ($Link -match "submodules") {
# Skip submodule links until sourcelink properly handles submodules
$Status = 200
}
elseif ($Uri.AbsoluteURI -ne $null -and ($Uri.Host -match 'github' -or $Uri.Host -match 'githubusercontent')) {
# Only GitHub links are valid
$Status = (Invoke-WebRequest -Uri $Link -UseBasicParsing -Method HEAD -TimeoutSec 5).StatusCode
}
else {
# If it's not a github link, we want to break out of the loop and not retry.
$Status = 0
$attempts = $using:MaxRetries
}
}
catch {
Write-Host $_
$Status = 0
}
}
if ($Status -ne 200) {
$attempts++
if ($attempts -lt $using:MaxRetries)
{
$attemptsLeft = $using:MaxRetries - $attempts
Write-Warning "Download failed, $attemptsLeft attempts remaining, will retry in $using:RetryWaitTimeInSeconds seconds"
Start-Sleep -Seconds $using:RetryWaitTimeInSeconds
}
else {
if ($NumFailedLinks -eq 0) {
if ($FailedFiles.Value -eq 0) {
Write-Host
}
Write-Host "`tFile $RealPath has broken links:"
}
Write-Host "`t`tFailed to retrieve $Link"
$NumFailedLinks++
}
}
else {
break
}
}
}
}
if ($NumFailedLinks -ne 0) {
$FailedFiles.value++
$global:LASTEXITCODE = 1
}
}
}
&$ValidateFile $TargetFile $FileName ([ref]$FailedFiles)
}
}
catch {
Write-Host $_
}
finally {
$zip.Dispose()
}
if ($FailedFiles -eq 0) {
Write-Host 'Passed.'
return [pscustomobject]@{
result = 0
packagePath = $PackagePath
}
}
else {
Write-PipelineTelemetryError -Category 'SourceLink' -Message "$PackagePath has broken SourceLink links."
return [pscustomobject]@{
result = 1
packagePath = $PackagePath
}
}
}
function CheckJobResult(
$result,
$packagePath,
[ref]$ValidationFailures,
[switch]$logErrors) {
if ($result -ne '0') {
if ($logErrors) {
Write-PipelineTelemetryError -Category 'SourceLink' -Message "$packagePath has broken SourceLink links."
}
$ValidationFailures.Value++
}
}
function ValidateSourceLinkLinks {
if ($GHRepoName -ne '' -and !($GHRepoName -Match '^[^\s\/]+/[^\s\/]+$')) {
if (!($GHRepoName -Match '^[^\s-]+-[^\s]+$')) {
Write-PipelineTelemetryError -Category 'SourceLink' -Message "GHRepoName should be in the format / or -. '$GHRepoName'"
ExitWithExitCode 1
}
else {
$GHRepoName = $GHRepoName -replace '^([^\s-]+)-([^\s]+)$', '$1/$2';
}
}
if ($GHCommit -ne '' -and !($GHCommit -Match '^[0-9a-fA-F]{40}$')) {
Write-PipelineTelemetryError -Category 'SourceLink' -Message "GHCommit should be a 40 chars hexadecimal string. '$GHCommit'"
ExitWithExitCode 1
}
if ($GHRepoName -ne '' -and $GHCommit -ne '') {
$RepoTreeURL = -Join('http://api.github.com/repos/', $GHRepoName, '/git/trees/', $GHCommit, '?recursive=1')
$CodeExtensions = @('.cs', '.vb', '.fs', '.fsi', '.fsx', '.fsscript')
try {
# Retrieve the list of files in the repo at that particular commit point and store them in the RepoFiles hash
$Data = Invoke-WebRequest $RepoTreeURL -UseBasicParsing | ConvertFrom-Json | Select-Object -ExpandProperty tree
foreach ($file in $Data) {
$Extension = [System.IO.Path]::GetExtension($file.path)
if ($CodeExtensions.Contains($Extension)) {
$RepoFiles[$file.path] = 1
}
}
}
catch {
Write-Host "Problems downloading the list of files from the repo. Url used: $RepoTreeURL . Execution will proceed without caching."
}
}
elseif ($GHRepoName -ne '' -or $GHCommit -ne '') {
Write-Host 'For using the http caching mechanism both GHRepoName and GHCommit should be informed.'
}
if (Test-Path $ExtractPath) {
Remove-Item $ExtractPath -Force -Recurse -ErrorAction SilentlyContinue
}
$ValidationFailures = 0
# Process each NuGet package in parallel
Get-ChildItem "$InputPath\*.symbols.nupkg" |
ForEach-Object {
Write-Host "Starting $($_.FullName)"
Start-Job -ScriptBlock $ValidatePackage -ArgumentList $_.FullName | Out-Null
$NumJobs = @(Get-Job -State 'Running').Count
while ($NumJobs -ge $MaxParallelJobs) {
Write-Host "There are $NumJobs validation jobs running right now. Waiting $SecondsBetweenLoadChecks seconds to check again."
sleep $SecondsBetweenLoadChecks
$NumJobs = @(Get-Job -State 'Running').Count
}
foreach ($Job in @(Get-Job -State 'Completed')) {
$jobResult = Wait-Job -Id $Job.Id | Receive-Job
CheckJobResult $jobResult.result $jobResult.packagePath ([ref]$ValidationFailures) -LogErrors
Remove-Job -Id $Job.Id
}
}
foreach ($Job in @(Get-Job)) {
$jobResult = Wait-Job -Id $Job.Id | Receive-Job
CheckJobResult $jobResult.result $jobResult.packagePath ([ref]$ValidationFailures)
Remove-Job -Id $Job.Id
}
if ($ValidationFailures -gt 0) {
Write-PipelineTelemetryError -Category 'SourceLink' -Message "$ValidationFailures package(s) failed validation."
ExitWithExitCode 1
}
}
function InstallSourcelinkCli {
$sourcelinkCliPackageName = 'sourcelink'
$dotnetRoot = InitializeDotNetCli -install:$true
$dotnet = "$dotnetRoot\dotnet.exe"
$toolList = & "$dotnet" tool list --global
if (($toolList -like "*$sourcelinkCliPackageName*") -and ($toolList -like "*$sourcelinkCliVersion*")) {
Write-Host "SourceLink CLI version $sourcelinkCliVersion is already installed."
}
else {
Write-Host "Installing SourceLink CLI version $sourcelinkCliVersion..."
Write-Host 'You may need to restart your command window if this is the first dotnet tool you have installed.'
& "$dotnet" tool install $sourcelinkCliPackageName --version $sourcelinkCliVersion --verbosity "minimal" --global
}
}
try {
InstallSourcelinkCli
foreach ($Job in @(Get-Job)) {
Remove-Job -Id $Job.Id
}
ValidateSourceLinkLinks
}
catch {
Write-Host $_.Exception
Write-Host $_.ScriptStackTrace
Write-PipelineTelemetryError -Category 'SourceLink' -Message $_
ExitWithExitCode 1
}
================================================
FILE: eng/common/post-build/symbols-validation.ps1
================================================
param(
[Parameter(Mandatory = $true)][string] $InputPath, # Full path to directory where NuGet packages to be checked are stored
[Parameter(Mandatory = $true)][string] $ExtractPath, # Full path to directory where the packages will be extracted during validation
[Parameter(Mandatory = $true)][string] $DotnetSymbolVersion, # Version of dotnet symbol to use
[Parameter(Mandatory = $false)][switch] $CheckForWindowsPdbs, # If we should check for the existence of windows pdbs in addition to portable PDBs
[Parameter(Mandatory = $false)][switch] $ContinueOnError, # If we should keep checking symbols after an error
[Parameter(Mandatory = $false)][switch] $Clean, # Clean extracted symbols directory after checking symbols
[Parameter(Mandatory = $false)][string] $SymbolExclusionFile # Exclude the symbols in the file from publishing to symbol server
)
. $PSScriptRoot\..\tools.ps1
# Maximum number of jobs to run in parallel
$MaxParallelJobs = 16
# Max number of retries
$MaxRetry = 5
# Wait time between check for system load
$SecondsBetweenLoadChecks = 10
# Set error codes
Set-Variable -Name "ERROR_BADEXTRACT" -Option Constant -Value -1
Set-Variable -Name "ERROR_FILEDOESNOTEXIST" -Option Constant -Value -2
$WindowsPdbVerificationParam = ""
if ($CheckForWindowsPdbs) {
$WindowsPdbVerificationParam = "--windows-pdbs"
}
$ExclusionSet = New-Object System.Collections.Generic.HashSet[string];
if (!$InputPath -or !(Test-Path $InputPath)){
Write-Host "No symbols to validate."
ExitWithExitCode 0
}
#Check if the path exists
if ($SymbolExclusionFile -and (Test-Path $SymbolExclusionFile)){
[string[]]$Exclusions = Get-Content "$SymbolExclusionFile"
$Exclusions | foreach { if($_ -and $_.Trim()){$ExclusionSet.Add($_)} }
}
else{
Write-Host "Symbol Exclusion file does not exists. No symbols to exclude."
}
$CountMissingSymbols = {
param(
[string] $PackagePath, # Path to a NuGet package
[string] $WindowsPdbVerificationParam # If we should check for the existence of windows pdbs in addition to portable PDBs
)
Add-Type -AssemblyName System.IO.Compression.FileSystem
Write-Host "Validating $PackagePath "
# Ensure input file exist
if (!(Test-Path $PackagePath)) {
Write-PipelineTaskError "Input file does not exist: $PackagePath"
return [pscustomobject]@{
result = $using:ERROR_FILEDOESNOTEXIST
packagePath = $PackagePath
}
}
# Extensions for which we'll look for symbols
$RelevantExtensions = @('.dll', '.exe', '.so', '.dylib')
# How many files are missing symbol information
$MissingSymbols = 0
$PackageId = [System.IO.Path]::GetFileNameWithoutExtension($PackagePath)
$PackageGuid = New-Guid
$ExtractPath = Join-Path -Path $using:ExtractPath -ChildPath $PackageGuid
$SymbolsPath = Join-Path -Path $ExtractPath -ChildPath 'Symbols'
try {
[System.IO.Compression.ZipFile]::ExtractToDirectory($PackagePath, $ExtractPath)
}
catch {
Write-Host "Something went wrong extracting $PackagePath"
Write-Host $_
return [pscustomobject]@{
result = $using:ERROR_BADEXTRACT
packagePath = $PackagePath
}
}
Get-ChildItem -Recurse $ExtractPath |
Where-Object { $RelevantExtensions -contains $_.Extension } |
ForEach-Object {
$FileName = $_.FullName
if ($FileName -Match '\\ref\\') {
Write-Host "`t Ignoring reference assembly file " $FileName
return
}
$FirstMatchingSymbolDescriptionOrDefault = {
param(
[string] $FullPath, # Full path to the module that has to be checked
[string] $TargetServerParam, # Parameter to pass to `Symbol Tool` indicating the server to lookup for symbols
[string] $WindowsPdbVerificationParam, # Parameter to pass to potential check for windows-pdbs.
[string] $SymbolsPath
)
$FileName = [System.IO.Path]::GetFileName($FullPath)
$Extension = [System.IO.Path]::GetExtension($FullPath)
# Those below are potential symbol files that the `dotnet symbol` might
# return. Which one will be returned depend on the type of file we are
# checking and which type of file was uploaded.
# The file itself is returned
$SymbolPath = $SymbolsPath + '\' + $FileName
# PDB file for the module
$PdbPath = $SymbolPath.Replace($Extension, '.pdb')
# PDB file for R2R module (created by crossgen)
$NGenPdb = $SymbolPath.Replace($Extension, '.ni.pdb')
# DBG file for a .so library
$SODbg = $SymbolPath.Replace($Extension, '.so.dbg')
# DWARF file for a .dylib
$DylibDwarf = $SymbolPath.Replace($Extension, '.dylib.dwarf')
$dotnetSymbolExe = "$env:USERPROFILE\.dotnet\tools"
$dotnetSymbolExe = Resolve-Path "$dotnetSymbolExe\dotnet-symbol.exe"
$totalRetries = 0
while ($totalRetries -lt $using:MaxRetry) {
# Save the output and get diagnostic output
$output = & $dotnetSymbolExe --symbols --modules $WindowsPdbVerificationParam $TargetServerParam $FullPath -o $SymbolsPath --diagnostics | Out-String
if ((Test-Path $PdbPath) -and (Test-path $SymbolPath)) {
return 'Module and PDB for Module'
}
elseif ((Test-Path $NGenPdb) -and (Test-Path $PdbPath) -and (Test-Path $SymbolPath)) {
return 'Dll, PDB and NGen PDB'
}
elseif ((Test-Path $SODbg) -and (Test-Path $SymbolPath)) {
return 'So and DBG for SO'
}
elseif ((Test-Path $DylibDwarf) -and (Test-Path $SymbolPath)) {
return 'Dylib and Dwarf for Dylib'
}
elseif (Test-Path $SymbolPath) {
return 'Module'
}
else
{
$totalRetries++
}
}
return $null
}
$FileRelativePath = $FileName.Replace("$ExtractPath\", "")
if (($($using:ExclusionSet) -ne $null) -and ($($using:ExclusionSet).Contains($FileRelativePath) -or ($($using:ExclusionSet).Contains($FileRelativePath.Replace("\", "/"))))){
Write-Host "Skipping $FileName from symbol validation"
}
else {
$FileGuid = New-Guid
$ExpandedSymbolsPath = Join-Path -Path $SymbolsPath -ChildPath $FileGuid
$SymbolsOnMSDL = & $FirstMatchingSymbolDescriptionOrDefault `
-FullPath $FileName `
-TargetServerParam '--microsoft-symbol-server' `
-SymbolsPath "$ExpandedSymbolsPath-msdl" `
-WindowsPdbVerificationParam $WindowsPdbVerificationParam
$SymbolsOnSymWeb = & $FirstMatchingSymbolDescriptionOrDefault `
-FullPath $FileName `
-TargetServerParam '--internal-server' `
-SymbolsPath "$ExpandedSymbolsPath-symweb" `
-WindowsPdbVerificationParam $WindowsPdbVerificationParam
Write-Host -NoNewLine "`t Checking file " $FileName "... "
if ($SymbolsOnMSDL -ne $null -and $SymbolsOnSymWeb -ne $null) {
Write-Host "Symbols found on MSDL ($SymbolsOnMSDL) and SymWeb ($SymbolsOnSymWeb)"
}
else {
$MissingSymbols++
if ($SymbolsOnMSDL -eq $null -and $SymbolsOnSymWeb -eq $null) {
Write-Host 'No symbols found on MSDL or SymWeb!'
}
else {
if ($SymbolsOnMSDL -eq $null) {
Write-Host 'No symbols found on MSDL!'
}
else {
Write-Host 'No symbols found on SymWeb!'
}
}
}
}
}
if ($using:Clean) {
Remove-Item $ExtractPath -Recurse -Force
}
Pop-Location
return [pscustomobject]@{
result = $MissingSymbols
packagePath = $PackagePath
}
}
function CheckJobResult(
$result,
$packagePath,
[ref]$DupedSymbols,
[ref]$TotalFailures) {
if ($result -eq $ERROR_BADEXTRACT) {
Write-PipelineTelemetryError -Category 'CheckSymbols' -Message "$packagePath has duplicated symbol files"
$DupedSymbols.Value++
}
elseif ($result -eq $ERROR_FILEDOESNOTEXIST) {
Write-PipelineTelemetryError -Category 'CheckSymbols' -Message "$packagePath does not exist"
$TotalFailures.Value++
}
elseif ($result -gt '0') {
Write-PipelineTelemetryError -Category 'CheckSymbols' -Message "Missing symbols for $result modules in the package $packagePath"
$TotalFailures.Value++
}
else {
Write-Host "All symbols verified for package $packagePath"
}
}
function CheckSymbolsAvailable {
if (Test-Path $ExtractPath) {
Remove-Item $ExtractPath -Force -Recurse -ErrorAction SilentlyContinue
}
$TotalPackages = 0
$TotalFailures = 0
$DupedSymbols = 0
Get-ChildItem "$InputPath\*.nupkg" |
ForEach-Object {
$FileName = $_.Name
$FullName = $_.FullName
# These packages from Arcade-Services include some native libraries that
# our current symbol uploader can't handle. Below is a workaround until
# we get issue: https://github.com/dotnet/arcade/issues/2457 sorted.
if ($FileName -Match 'Microsoft\.DotNet\.Darc\.') {
Write-Host "Ignoring Arcade-services file: $FileName"
Write-Host
return
}
elseif ($FileName -Match 'Microsoft\.DotNet\.Maestro\.Tasks\.') {
Write-Host "Ignoring Arcade-services file: $FileName"
Write-Host
return
}
$TotalPackages++
Start-Job -ScriptBlock $CountMissingSymbols -ArgumentList @($FullName,$WindowsPdbVerificationParam) | Out-Null
$NumJobs = @(Get-Job -State 'Running').Count
while ($NumJobs -ge $MaxParallelJobs) {
Write-Host "There are $NumJobs validation jobs running right now. Waiting $SecondsBetweenLoadChecks seconds to check again."
sleep $SecondsBetweenLoadChecks
$NumJobs = @(Get-Job -State 'Running').Count
}
foreach ($Job in @(Get-Job -State 'Completed')) {
$jobResult = Wait-Job -Id $Job.Id | Receive-Job
CheckJobResult $jobResult.result $jobResult.packagePath ([ref]$DupedSymbols) ([ref]$TotalFailures)
Remove-Job -Id $Job.Id
}
Write-Host
}
foreach ($Job in @(Get-Job)) {
$jobResult = Wait-Job -Id $Job.Id | Receive-Job
CheckJobResult $jobResult.result $jobResult.packagePath ([ref]$DupedSymbols) ([ref]$TotalFailures)
}
if ($TotalFailures -gt 0 -or $DupedSymbols -gt 0) {
if ($TotalFailures -gt 0) {
Write-PipelineTelemetryError -Category 'CheckSymbols' -Message "Symbols missing for $TotalFailures/$TotalPackages packages"
}
if ($DupedSymbols -gt 0) {
Write-PipelineTelemetryError -Category 'CheckSymbols' -Message "$DupedSymbols/$TotalPackages packages had duplicated symbol files and could not be extracted"
}
ExitWithExitCode 1
}
else {
Write-Host "All symbols validated!"
}
}
function InstallDotnetSymbol {
$dotnetSymbolPackageName = 'dotnet-symbol'
$dotnetRoot = InitializeDotNetCli -install:$true
$dotnet = "$dotnetRoot\dotnet.exe"
$toolList = & "$dotnet" tool list --global
if (($toolList -like "*$dotnetSymbolPackageName*") -and ($toolList -like "*$dotnetSymbolVersion*")) {
Write-Host "dotnet-symbol version $dotnetSymbolVersion is already installed."
}
else {
Write-Host "Installing dotnet-symbol version $dotnetSymbolVersion..."
Write-Host 'You may need to restart your command window if this is the first dotnet tool you have installed.'
& "$dotnet" tool install $dotnetSymbolPackageName --version $dotnetSymbolVersion --verbosity "minimal" --global
}
}
try {
InstallDotnetSymbol
foreach ($Job in @(Get-Job)) {
Remove-Job -Id $Job.Id
}
CheckSymbolsAvailable
}
catch {
Write-Host $_.ScriptStackTrace
Write-PipelineTelemetryError -Category 'CheckSymbols' -Message $_
ExitWithExitCode 1
}
================================================
FILE: eng/common/retain-build.ps1
================================================
Param(
[Parameter(Mandatory=$true)][int] $buildId,
[Parameter(Mandatory=$true)][string] $azdoOrgUri,
[Parameter(Mandatory=$true)][string] $azdoProject,
[Parameter(Mandatory=$true)][string] $token
)
$ErrorActionPreference = 'Stop'
Set-StrictMode -Version 2.0
function Get-AzDOHeaders(
[string] $token)
{
$base64AuthInfo = [Convert]::ToBase64String([Text.Encoding]::ASCII.GetBytes(":${token}"))
$headers = @{"Authorization"="Basic $base64AuthInfo"}
return $headers
}
function Update-BuildRetention(
[string] $azdoOrgUri,
[string] $azdoProject,
[int] $buildId,
[string] $token)
{
$headers = Get-AzDOHeaders -token $token
$requestBody = "{
`"keepForever`": `"true`"
}"
$requestUri = "${azdoOrgUri}/${azdoProject}/_apis/build/builds/${buildId}?api-version=6.0"
write-Host "Attempting to retain build using the following URI: ${requestUri} ..."
try {
Invoke-RestMethod -Uri $requestUri -Method Patch -Body $requestBody -Header $headers -contentType "application/json"
Write-Host "Updated retention settings for build ${buildId}."
}
catch {
Write-Error "Failed to update retention settings for build: $_.Exception.Response.StatusDescription"
exit 1
}
}
Update-BuildRetention -azdoOrgUri $azdoOrgUri -azdoProject $azdoProject -buildId $buildId -token $token
exit 0
================================================
FILE: eng/common/sdk-task.ps1
================================================
[CmdletBinding(PositionalBinding=$false)]
Param(
[string] $configuration = 'Debug',
[string] $task,
[string] $verbosity = 'minimal',
[string] $msbuildEngine = $null,
[switch] $restore,
[switch] $prepareMachine,
[switch][Alias('nobl')]$excludeCIBinaryLog,
[switch]$noWarnAsError,
[switch] $help,
[string] $runtimeSourceFeed = '',
[string] $runtimeSourceFeedKey = '',
[Parameter(ValueFromRemainingArguments=$true)][String[]]$properties
)
$ci = $true
$binaryLog = if ($excludeCIBinaryLog) { $false } else { $true }
$warnAsError = if ($noWarnAsError) { $false } else { $true }
. $PSScriptRoot\tools.ps1
function Print-Usage() {
Write-Host "Common settings:"
Write-Host " -task Name of Arcade task (name of a project in SdkTasks directory of the Arcade SDK package)"
Write-Host " -restore Restore dependencies"
Write-Host " -verbosity Msbuild verbosity: q[uiet], m[inimal], n[ormal], d[etailed], and diag[nostic]"
Write-Host " -help Print help and exit"
Write-Host ""
Write-Host "Advanced settings:"
Write-Host " -prepareMachine Prepare machine for CI run"
Write-Host " -msbuildEngine Msbuild engine to use to run build ('dotnet', 'vs', or unspecified)."
Write-Host " -excludeCIBinaryLog When running on CI, allow no binary log (short: -nobl)"
Write-Host ""
Write-Host "Command line arguments not listed above are passed thru to msbuild."
}
function Build([string]$target) {
$logSuffix = if ($target -eq 'Execute') { '' } else { ".$target" }
$log = Join-Path $LogDir "$task$logSuffix.binlog"
$binaryLogArg = if ($binaryLog) { "/bl:$log" } else { "" }
$outputPath = Join-Path $ToolsetDir "$task\"
MSBuild $taskProject `
$binaryLogArg `
/t:$target `
/p:Configuration=$configuration `
/p:RepoRoot=$RepoRoot `
/p:BaseIntermediateOutputPath=$outputPath `
/v:$verbosity `
@properties
}
try {
if ($help -or (($null -ne $properties) -and ($properties.Contains('/help') -or $properties.Contains('/?')))) {
Print-Usage
exit 0
}
if ($task -eq "") {
Write-PipelineTelemetryError -Category 'Build' -Message "Missing required parameter '-task '"
Print-Usage
ExitWithExitCode 1
}
if( $msbuildEngine -eq "vs") {
# Ensure desktop MSBuild is available for sdk tasks.
if( -not ($GlobalJson.tools.PSObject.Properties.Name -contains "vs" )) {
$GlobalJson.tools | Add-Member -Name "vs" -Value (ConvertFrom-Json "{ `"version`": `"16.5`" }") -MemberType NoteProperty
}
if( -not ($GlobalJson.tools.PSObject.Properties.Name -match "xcopy-msbuild" )) {
$GlobalJson.tools | Add-Member -Name "xcopy-msbuild" -Value "18.0.0" -MemberType NoteProperty
}
if ($GlobalJson.tools."xcopy-msbuild".Trim() -ine "none") {
$xcopyMSBuildToolsFolder = InitializeXCopyMSBuild $GlobalJson.tools."xcopy-msbuild" -install $true
}
if ($xcopyMSBuildToolsFolder -eq $null) {
throw 'Unable to get xcopy downloadable version of msbuild'
}
$global:_MSBuildExe = "$($xcopyMSBuildToolsFolder)\MSBuild\Current\Bin\MSBuild.exe"
}
$taskProject = GetSdkTaskProject $task
if (!(Test-Path $taskProject)) {
Write-PipelineTelemetryError -Category 'Build' -Message "Unknown task: $task"
ExitWithExitCode 1
}
if ($restore) {
Build 'Restore'
}
Build 'Execute'
}
catch {
Write-Host $_.ScriptStackTrace
Write-PipelineTelemetryError -Category 'Build' -Message $_
ExitWithExitCode 1
}
ExitWithExitCode 0
================================================
FILE: eng/common/sdk-task.sh
================================================
#!/usr/bin/env bash
show_usage() {
echo "Common settings:"
echo " --task Name of Arcade task (name of a project in SdkTasks directory of the Arcade SDK package)"
echo " --restore Restore dependencies"
echo " --verbosity Msbuild verbosity: q[uiet], m[inimal], n[ormal], d[etailed], and diag[nostic]"
echo " --help Print help and exit"
echo ""
echo "Advanced settings:"
echo " --excludeCIBinarylog Don't output binary log (short: -nobl)"
echo " --noWarnAsError Do not warn as error"
echo ""
echo "Command line arguments not listed above are passed thru to msbuild."
}
source="${BASH_SOURCE[0]}"
# resolve $source until the file is no longer a symlink
while [[ -h "$source" ]]; do
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
source="$(readlink "$source")"
# if $source was a relative symlink, we need to resolve it relative to the path where the
# symlink file was located
[[ $source != /* ]] && source="$scriptroot/$source"
done
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
Build() {
local target=$1
local log_suffix=""
[[ "$target" != "Execute" ]] && log_suffix=".$target"
local log="$log_dir/$task$log_suffix.binlog"
local binaryLogArg=""
[[ $binary_log == true ]] && binaryLogArg="/bl:$log"
local output_path="$toolset_dir/$task/"
MSBuild "$taskProject" \
$binaryLogArg \
/t:"$target" \
/p:Configuration="$configuration" \
/p:RepoRoot="$repo_root" \
/p:BaseIntermediateOutputPath="$output_path" \
/v:"$verbosity" \
$properties
}
binary_log=true
configuration="Debug"
verbosity="minimal"
exclude_ci_binary_log=false
restore=false
help=false
properties=''
warnAsError=true
while (($# > 0)); do
lowerI="$(echo $1 | tr "[:upper:]" "[:lower:]")"
case $lowerI in
--task)
task=$2
shift 2
;;
--restore)
restore=true
shift 1
;;
--verbosity)
verbosity=$2
shift 2
;;
--excludecibinarylog|--nobl)
binary_log=false
exclude_ci_binary_log=true
shift 1
;;
--noWarnAsError)
warnAsError=false
shift 1
;;
--help)
help=true
shift 1
;;
*)
properties="$properties $1"
shift 1
;;
esac
done
ci=true
if $help; then
show_usage
exit 0
fi
. "$scriptroot/tools.sh"
InitializeToolset
if [[ -z "$task" ]]; then
Write-PipelineTelemetryError -Category 'Task' -Name 'MissingTask' -Message "Missing required parameter '-task '"
ExitWithExitCode 1
fi
taskProject=$(GetSdkTaskProject "$task")
if [[ ! -e "$taskProject" ]]; then
Write-PipelineTelemetryError -Category 'Task' -Name 'UnknownTask' -Message "Unknown task: $task"
ExitWithExitCode 1
fi
if $restore; then
Build "Restore"
fi
Build "Execute"
ExitWithExitCode 0
================================================
FILE: eng/common/sdl/NuGet.config
================================================
================================================
FILE: eng/common/sdl/configure-sdl-tool.ps1
================================================
Param(
[string] $GuardianCliLocation,
[string] $WorkingDirectory,
[string] $TargetDirectory,
[string] $GdnFolder,
# The list of Guardian tools to configure. For each object in the array:
# - If the item is a [hashtable], it must contain these entries:
# - Name = The tool name as Guardian knows it.
# - Scenario = (Optional) Scenario-specific name for this configuration entry. It must be unique
# among all tool entries with the same Name.
# - Args = (Optional) Array of Guardian tool configuration args, like '@("Target > C:\temp")'
# - If the item is a [string] $v, it is treated as '@{ Name="$v" }'
[object[]] $ToolsList,
[string] $GuardianLoggerLevel='Standard',
# Optional: Additional params to add to any tool using CredScan.
[string[]] $CrScanAdditionalRunConfigParams,
# Optional: Additional params to add to any tool using PoliCheck.
[string[]] $PoliCheckAdditionalRunConfigParams,
# Optional: Additional params to add to any tool using CodeQL/Semmle.
[string[]] $CodeQLAdditionalRunConfigParams,
# Optional: Additional params to add to any tool using Binskim.
[string[]] $BinskimAdditionalRunConfigParams
)
$ErrorActionPreference = 'Stop'
Set-StrictMode -Version 2.0
$disableConfigureToolsetImport = $true
$global:LASTEXITCODE = 0
try {
# `tools.ps1` checks $ci to perform some actions. Since the SDL
# scripts don't necessarily execute in the same agent that run the
# build.ps1/sh script this variable isn't automatically set.
$ci = $true
. $PSScriptRoot\..\tools.ps1
# Normalize tools list: all in [hashtable] form with defined values for each key.
$ToolsList = $ToolsList |
ForEach-Object {
if ($_ -is [string]) {
$_ = @{ Name = $_ }
}
if (-not ($_['Scenario'])) { $_.Scenario = "" }
if (-not ($_['Args'])) { $_.Args = @() }
$_
}
Write-Host "List of tools to configure:"
$ToolsList | ForEach-Object { $_ | Out-String | Write-Host }
# We store config files in the r directory of .gdn
$gdnConfigPath = Join-Path $GdnFolder 'r'
$ValidPath = Test-Path $GuardianCliLocation
if ($ValidPath -eq $False)
{
Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Invalid Guardian CLI Location."
ExitWithExitCode 1
}
foreach ($tool in $ToolsList) {
# Put together the name and scenario to make a unique key.
$toolConfigName = $tool.Name
if ($tool.Scenario) {
$toolConfigName += "_" + $tool.Scenario
}
Write-Host "=== Configuring $toolConfigName..."
$gdnConfigFile = Join-Path $gdnConfigPath "$toolConfigName-configure.gdnconfig"
# For some tools, add default and automatic args.
switch -Exact ($tool.Name) {
'credscan' {
if ($targetDirectory) {
$tool.Args += "`"TargetDirectory < $TargetDirectory`""
}
$tool.Args += "`"OutputType < pre`""
$tool.Args += $CrScanAdditionalRunConfigParams
}
'policheck' {
if ($targetDirectory) {
$tool.Args += "`"Target < $TargetDirectory`""
}
$tool.Args += $PoliCheckAdditionalRunConfigParams
}
{$_ -in 'semmle', 'codeql'} {
if ($targetDirectory) {
$tool.Args += "`"SourceCodeDirectory < $TargetDirectory`""
}
$tool.Args += $CodeQLAdditionalRunConfigParams
}
'binskim' {
if ($targetDirectory) {
# Binskim crashes due to specific PDBs. GitHub issue: https://github.com/microsoft/binskim/issues/924.
# We are excluding all `_.pdb` files from the scan.
$tool.Args += "`"Target < $TargetDirectory\**;-:file|$TargetDirectory\**\_.pdb`""
}
$tool.Args += $BinskimAdditionalRunConfigParams
}
}
# Create variable pointing to the args array directly so we can use splat syntax later.
$toolArgs = $tool.Args
# Configure the tool. If args array is provided or the current tool has some default arguments
# defined, add "--args" and splat each element on the end. Arg format is "{Arg id} < {Value}",
# one per parameter. Doc page for "guardian configure":
# https://dev.azure.com/securitytools/SecurityIntegration/_wiki/wikis/Guardian/1395/configure
Exec-BlockVerbosely {
& $GuardianCliLocation configure `
--working-directory $WorkingDirectory `
--tool $tool.Name `
--output-path $gdnConfigFile `
--logger-level $GuardianLoggerLevel `
--noninteractive `
--force `
$(if ($toolArgs) { "--args" }) @toolArgs
Exit-IfNZEC "Sdl"
}
Write-Host "Created '$toolConfigName' configuration file: $gdnConfigFile"
}
}
catch {
Write-Host $_.ScriptStackTrace
Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_
ExitWithExitCode 1
}
================================================
FILE: eng/common/sdl/execute-all-sdl-tools.ps1
================================================
Param(
[string] $GuardianPackageName, # Required: the name of guardian CLI package (not needed if GuardianCliLocation is specified)
[string] $NugetPackageDirectory, # Required: directory where NuGet packages are installed (not needed if GuardianCliLocation is specified)
[string] $GuardianCliLocation, # Optional: Direct location of Guardian CLI executable if GuardianPackageName & NugetPackageDirectory are not specified
[string] $Repository=$env:BUILD_REPOSITORY_NAME, # Required: the name of the repository (e.g. dotnet/arcade)
[string] $BranchName=$env:BUILD_SOURCEBRANCH, # Optional: name of branch or version of gdn settings; defaults to master
[string] $SourceDirectory=$env:BUILD_SOURCESDIRECTORY, # Required: the directory where source files are located
[string] $ArtifactsDirectory = (Join-Path $env:BUILD_ARTIFACTSTAGINGDIRECTORY ('artifacts')), # Required: the directory where build artifacts are located
[string] $AzureDevOpsAccessToken, # Required: access token for dnceng; should be provided via KeyVault
# Optional: list of SDL tools to run on source code. See 'configure-sdl-tool.ps1' for tools list
# format.
[object[]] $SourceToolsList,
# Optional: list of SDL tools to run on built artifacts. See 'configure-sdl-tool.ps1' for tools
# list format.
[object[]] $ArtifactToolsList,
# Optional: list of SDL tools to run without automatically specifying a target directory. See
# 'configure-sdl-tool.ps1' for tools list format.
[object[]] $CustomToolsList,
[bool] $TsaPublish=$False, # Optional: true will publish results to TSA; only set to true after onboarding to TSA; TSA is the automated framework used to upload test results as bugs.
[string] $TsaBranchName=$env:BUILD_SOURCEBRANCH, # Optional: required for TSA publish; defaults to $(Build.SourceBranchName); TSA is the automated framework used to upload test results as bugs.
[string] $TsaRepositoryName=$env:BUILD_REPOSITORY_NAME, # Optional: TSA repository name; will be generated automatically if not submitted; TSA is the automated framework used to upload test results as bugs.
[string] $BuildNumber=$env:BUILD_BUILDNUMBER, # Optional: required for TSA publish; defaults to $(Build.BuildNumber)
[bool] $UpdateBaseline=$False, # Optional: if true, will update the baseline in the repository; should only be run after fixing any issues which need to be fixed
[bool] $TsaOnboard=$False, # Optional: if true, will onboard the repository to TSA; should only be run once; TSA is the automated framework used to upload test results as bugs.
[string] $TsaInstanceUrl, # Optional: only needed if TsaOnboard or TsaPublish is true; the instance-url registered with TSA; TSA is the automated framework used to upload test results as bugs.
[string] $TsaCodebaseName, # Optional: only needed if TsaOnboard or TsaPublish is true; the name of the codebase registered with TSA; TSA is the automated framework used to upload test results as bugs.
[string] $TsaProjectName, # Optional: only needed if TsaOnboard or TsaPublish is true; the name of the project registered with TSA; TSA is the automated framework used to upload test results as bugs.
[string] $TsaNotificationEmail, # Optional: only needed if TsaOnboard is true; the email(s) which will receive notifications of TSA bug filings (e.g. alias@microsoft.com); TSA is the automated framework used to upload test results as bugs.
[string] $TsaCodebaseAdmin, # Optional: only needed if TsaOnboard is true; the aliases which are admins of the TSA codebase (e.g. DOMAIN\alias); TSA is the automated framework used to upload test results as bugs.
[string] $TsaBugAreaPath, # Optional: only needed if TsaOnboard is true; the area path where TSA will file bugs in AzDO; TSA is the automated framework used to upload test results as bugs.
[string] $TsaIterationPath, # Optional: only needed if TsaOnboard is true; the iteration path where TSA will file bugs in AzDO; TSA is the automated framework used to upload test results as bugs.
[string] $GuardianLoggerLevel='Standard', # Optional: the logger level for the Guardian CLI; options are Trace, Verbose, Standard, Warning, and Error
[string[]] $CrScanAdditionalRunConfigParams, # Optional: Additional Params to custom build a CredScan run config in the format @("xyz:abc","sdf:1")
[string[]] $PoliCheckAdditionalRunConfigParams, # Optional: Additional Params to custom build a Policheck run config in the format @("xyz:abc","sdf:1")
[string[]] $CodeQLAdditionalRunConfigParams, # Optional: Additional Params to custom build a Semmle/CodeQL run config in the format @("xyz < abc","sdf < 1")
[string[]] $BinskimAdditionalRunConfigParams, # Optional: Additional Params to custom build a Binskim run config in the format @("xyz < abc","sdf < 1")
[bool] $BreakOnFailure=$False # Optional: Fail the build if there were errors during the run
)
try {
$ErrorActionPreference = 'Stop'
Set-StrictMode -Version 2.0
$disableConfigureToolsetImport = $true
$global:LASTEXITCODE = 0
# `tools.ps1` checks $ci to perform some actions. Since the SDL
# scripts don't necessarily execute in the same agent that run the
# build.ps1/sh script this variable isn't automatically set.
$ci = $true
. $PSScriptRoot\..\tools.ps1
#Replace repo names to the format of org/repo
if (!($Repository.contains('/'))) {
$RepoName = $Repository -replace '(.*?)-(.*)', '$1/$2';
}
else{
$RepoName = $Repository;
}
if ($GuardianPackageName) {
$guardianCliLocation = Join-Path $NugetPackageDirectory (Join-Path $GuardianPackageName (Join-Path 'tools' 'guardian.cmd'))
} else {
$guardianCliLocation = $GuardianCliLocation
}
$workingDirectory = (Split-Path $SourceDirectory -Parent)
$ValidPath = Test-Path $guardianCliLocation
if ($ValidPath -eq $False)
{
Write-PipelineTelemetryError -Force -Category 'Sdl' -Message 'Invalid Guardian CLI Location.'
ExitWithExitCode 1
}
Exec-BlockVerbosely {
& $(Join-Path $PSScriptRoot 'init-sdl.ps1') -GuardianCliLocation $guardianCliLocation -Repository $RepoName -BranchName $BranchName -WorkingDirectory $workingDirectory -AzureDevOpsAccessToken $AzureDevOpsAccessToken -GuardianLoggerLevel $GuardianLoggerLevel
}
$gdnFolder = Join-Path $workingDirectory '.gdn'
if ($TsaOnboard) {
if ($TsaCodebaseName -and $TsaNotificationEmail -and $TsaCodebaseAdmin -and $TsaBugAreaPath) {
Exec-BlockVerbosely {
& $guardianCliLocation tsa-onboard --codebase-name "$TsaCodebaseName" --notification-alias "$TsaNotificationEmail" --codebase-admin "$TsaCodebaseAdmin" --instance-url "$TsaInstanceUrl" --project-name "$TsaProjectName" --area-path "$TsaBugAreaPath" --iteration-path "$TsaIterationPath" --working-directory $workingDirectory --logger-level $GuardianLoggerLevel
}
if ($LASTEXITCODE -ne 0) {
Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Guardian tsa-onboard failed with exit code $LASTEXITCODE."
ExitWithExitCode $LASTEXITCODE
}
} else {
Write-PipelineTelemetryError -Force -Category 'Sdl' -Message 'Could not onboard to TSA -- not all required values ($TsaCodebaseName, $TsaNotificationEmail, $TsaCodebaseAdmin, $TsaBugAreaPath) were specified.'
ExitWithExitCode 1
}
}
# Configure a list of tools with a default target directory. Populates the ".gdn/r" directory.
function Configure-ToolsList([object[]] $tools, [string] $targetDirectory) {
if ($tools -and $tools.Count -gt 0) {
Exec-BlockVerbosely {
& $(Join-Path $PSScriptRoot 'configure-sdl-tool.ps1') `
-GuardianCliLocation $guardianCliLocation `
-WorkingDirectory $workingDirectory `
-TargetDirectory $targetDirectory `
-GdnFolder $gdnFolder `
-ToolsList $tools `
-AzureDevOpsAccessToken $AzureDevOpsAccessToken `
-GuardianLoggerLevel $GuardianLoggerLevel `
-CrScanAdditionalRunConfigParams $CrScanAdditionalRunConfigParams `
-PoliCheckAdditionalRunConfigParams $PoliCheckAdditionalRunConfigParams `
-CodeQLAdditionalRunConfigParams $CodeQLAdditionalRunConfigParams `
-BinskimAdditionalRunConfigParams $BinskimAdditionalRunConfigParams
if ($BreakOnFailure) {
Exit-IfNZEC "Sdl"
}
}
}
}
# Configure Artifact and Source tools with default Target directories.
Configure-ToolsList $ArtifactToolsList $ArtifactsDirectory
Configure-ToolsList $SourceToolsList $SourceDirectory
# Configure custom tools with no default Target directory.
Configure-ToolsList $CustomToolsList $null
# At this point, all tools are configured in the ".gdn" directory. Run them all in a single call.
# (If we used "run" multiple times, each run would overwrite data from earlier runs.)
Exec-BlockVerbosely {
& $(Join-Path $PSScriptRoot 'run-sdl.ps1') `
-GuardianCliLocation $guardianCliLocation `
-WorkingDirectory $SourceDirectory `
-UpdateBaseline $UpdateBaseline `
-GdnFolder $gdnFolder
}
if ($TsaPublish) {
if ($TsaBranchName -and $BuildNumber) {
if (-not $TsaRepositoryName) {
$TsaRepositoryName = "$($Repository)-$($BranchName)"
}
Exec-BlockVerbosely {
& $guardianCliLocation tsa-publish --all-tools --repository-name "$TsaRepositoryName" --branch-name "$TsaBranchName" --build-number "$BuildNumber" --onboard $True --codebase-name "$TsaCodebaseName" --notification-alias "$TsaNotificationEmail" --codebase-admin "$TsaCodebaseAdmin" --instance-url "$TsaInstanceUrl" --project-name "$TsaProjectName" --area-path "$TsaBugAreaPath" --iteration-path "$TsaIterationPath" --working-directory $workingDirectory --logger-level $GuardianLoggerLevel
}
if ($LASTEXITCODE -ne 0) {
Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Guardian tsa-publish failed with exit code $LASTEXITCODE."
ExitWithExitCode $LASTEXITCODE
}
} else {
Write-PipelineTelemetryError -Force -Category 'Sdl' -Message 'Could not publish to TSA -- not all required values ($TsaBranchName, $BuildNumber) were specified.'
ExitWithExitCode 1
}
}
if ($BreakOnFailure) {
Write-Host "Failing the build in case of breaking results..."
Exec-BlockVerbosely {
& $guardianCliLocation break --working-directory $workingDirectory --logger-level $GuardianLoggerLevel
}
} else {
Write-Host "Letting the build pass even if there were breaking results..."
}
}
catch {
Write-Host $_.ScriptStackTrace
Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_
exit 1
}
================================================
FILE: eng/common/sdl/extract-artifact-archives.ps1
================================================
# This script looks for each archive file in a directory and extracts it into the target directory.
# For example, the file "$InputPath/bin.tar.gz" extracts to "$ExtractPath/bin.tar.gz.extracted/**".
# Uses the "tar" utility added to Windows 10 / Windows 2019 that supports tar.gz and zip.
param(
# Full path to directory where archives are stored.
[Parameter(Mandatory=$true)][string] $InputPath,
# Full path to directory to extract archives into. May be the same as $InputPath.
[Parameter(Mandatory=$true)][string] $ExtractPath
)
$ErrorActionPreference = 'Stop'
Set-StrictMode -Version 2.0
$disableConfigureToolsetImport = $true
try {
# `tools.ps1` checks $ci to perform some actions. Since the SDL
# scripts don't necessarily execute in the same agent that run the
# build.ps1/sh script this variable isn't automatically set.
$ci = $true
. $PSScriptRoot\..\tools.ps1
Measure-Command {
$jobs = @()
# Find archive files for non-Windows and Windows builds.
$archiveFiles = @(
Get-ChildItem (Join-Path $InputPath "*.tar.gz")
Get-ChildItem (Join-Path $InputPath "*.zip")
)
foreach ($targzFile in $archiveFiles) {
$jobs += Start-Job -ScriptBlock {
$file = $using:targzFile
$fileName = [System.IO.Path]::GetFileName($file)
$extractDir = Join-Path $using:ExtractPath "$fileName.extracted"
New-Item $extractDir -ItemType Directory -Force | Out-Null
Write-Host "Extracting '$file' to '$extractDir'..."
# Pipe errors to stdout to prevent PowerShell detecting them and quitting the job early.
# This type of quit skips the catch, so we wouldn't be able to tell which file triggered the
# error. Save output so it can be stored in the exception string along with context.
$output = tar -xf $file -C $extractDir 2>&1
# Handle NZEC manually rather than using Exit-IfNZEC: we are in a background job, so we
# don't have access to the outer scope.
if ($LASTEXITCODE -ne 0) {
throw "Error extracting '$file': non-zero exit code ($LASTEXITCODE). Output: '$output'"
}
Write-Host "Extracted to $extractDir"
}
}
Receive-Job $jobs -Wait
}
}
catch {
Write-Host $_
Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_
ExitWithExitCode 1
}
================================================
FILE: eng/common/sdl/extract-artifact-packages.ps1
================================================
param(
[Parameter(Mandatory=$true)][string] $InputPath, # Full path to directory where artifact packages are stored
[Parameter(Mandatory=$true)][string] $ExtractPath # Full path to directory where the packages will be extracted
)
$ErrorActionPreference = 'Stop'
Set-StrictMode -Version 2.0
$disableConfigureToolsetImport = $true
function ExtractArtifacts {
if (!(Test-Path $InputPath)) {
Write-Host "Input Path does not exist: $InputPath"
ExitWithExitCode 0
}
$Jobs = @()
Get-ChildItem "$InputPath\*.nupkg" |
ForEach-Object {
$Jobs += Start-Job -ScriptBlock $ExtractPackage -ArgumentList $_.FullName
}
foreach ($Job in $Jobs) {
Wait-Job -Id $Job.Id | Receive-Job
}
}
try {
# `tools.ps1` checks $ci to perform some actions. Since the SDL
# scripts don't necessarily execute in the same agent that run the
# build.ps1/sh script this variable isn't automatically set.
$ci = $true
. $PSScriptRoot\..\tools.ps1
$ExtractPackage = {
param(
[string] $PackagePath # Full path to a NuGet package
)
if (!(Test-Path $PackagePath)) {
Write-PipelineTelemetryError -Category 'Build' -Message "Input file does not exist: $PackagePath"
ExitWithExitCode 1
}
$RelevantExtensions = @('.dll', '.exe', '.pdb')
Write-Host -NoNewLine 'Extracting ' ([System.IO.Path]::GetFileName($PackagePath)) '...'
$PackageId = [System.IO.Path]::GetFileNameWithoutExtension($PackagePath)
$ExtractPath = Join-Path -Path $using:ExtractPath -ChildPath $PackageId
Add-Type -AssemblyName System.IO.Compression.FileSystem
[System.IO.Directory]::CreateDirectory($ExtractPath);
try {
$zip = [System.IO.Compression.ZipFile]::OpenRead($PackagePath)
$zip.Entries |
Where-Object {$RelevantExtensions -contains [System.IO.Path]::GetExtension($_.Name)} |
ForEach-Object {
$TargetPath = Join-Path -Path $ExtractPath -ChildPath (Split-Path -Path $_.FullName)
[System.IO.Directory]::CreateDirectory($TargetPath);
$TargetFile = Join-Path -Path $ExtractPath -ChildPath $_.FullName
[System.IO.Compression.ZipFileExtensions]::ExtractToFile($_, $TargetFile)
}
}
catch {
Write-Host $_
Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_
ExitWithExitCode 1
}
finally {
$zip.Dispose()
}
}
Measure-Command { ExtractArtifacts }
}
catch {
Write-Host $_
Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_
ExitWithExitCode 1
}
================================================
FILE: eng/common/sdl/init-sdl.ps1
================================================
Param(
[string] $GuardianCliLocation,
[string] $Repository,
[string] $BranchName='master',
[string] $WorkingDirectory,
[string] $AzureDevOpsAccessToken,
[string] $GuardianLoggerLevel='Standard'
)
$ErrorActionPreference = 'Stop'
Set-StrictMode -Version 2.0
$disableConfigureToolsetImport = $true
$global:LASTEXITCODE = 0
# `tools.ps1` checks $ci to perform some actions. Since the SDL
# scripts don't necessarily execute in the same agent that run the
# build.ps1/sh script this variable isn't automatically set.
$ci = $true
. $PSScriptRoot\..\tools.ps1
# Don't display the console progress UI - it's a huge perf hit
$ProgressPreference = 'SilentlyContinue'
# Construct basic auth from AzDO access token; construct URI to the repository's gdn folder stored in that repository; construct location of zip file
$encodedPat = [Convert]::ToBase64String([System.Text.Encoding]::ASCII.GetBytes(":$AzureDevOpsAccessToken"))
$escapedRepository = [Uri]::EscapeDataString("/$Repository/$BranchName/.gdn")
$uri = "https://dev.azure.com/dnceng/internal/_apis/git/repositories/sdl-tool-cfg/Items?path=$escapedRepository&versionDescriptor[versionOptions]=0&`$format=zip&api-version=5.0"
$zipFile = "$WorkingDirectory/gdn.zip"
Add-Type -AssemblyName System.IO.Compression.FileSystem
$gdnFolder = (Join-Path $WorkingDirectory '.gdn')
try {
# if the folder does not exist, we'll do a guardian init and push it to the remote repository
Write-Host 'Initializing Guardian...'
Write-Host "$GuardianCliLocation init --working-directory $WorkingDirectory --logger-level $GuardianLoggerLevel"
& $GuardianCliLocation init --working-directory $WorkingDirectory --logger-level $GuardianLoggerLevel
if ($LASTEXITCODE -ne 0) {
Write-PipelineTelemetryError -Force -Category 'Build' -Message "Guardian init failed with exit code $LASTEXITCODE."
ExitWithExitCode $LASTEXITCODE
}
# We create the mainbaseline so it can be edited later
Write-Host "$GuardianCliLocation baseline --working-directory $WorkingDirectory --name mainbaseline"
& $GuardianCliLocation baseline --working-directory $WorkingDirectory --name mainbaseline
if ($LASTEXITCODE -ne 0) {
Write-PipelineTelemetryError -Force -Category 'Build' -Message "Guardian baseline failed with exit code $LASTEXITCODE."
ExitWithExitCode $LASTEXITCODE
}
ExitWithExitCode 0
}
catch {
Write-Host $_.ScriptStackTrace
Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_
ExitWithExitCode 1
}
================================================
FILE: eng/common/sdl/packages.config
================================================
================================================
FILE: eng/common/sdl/run-sdl.ps1
================================================
Param(
[string] $GuardianCliLocation,
[string] $WorkingDirectory,
[string] $GdnFolder,
[string] $UpdateBaseline,
[string] $GuardianLoggerLevel='Standard'
)
$ErrorActionPreference = 'Stop'
Set-StrictMode -Version 2.0
$disableConfigureToolsetImport = $true
$global:LASTEXITCODE = 0
try {
# `tools.ps1` checks $ci to perform some actions. Since the SDL
# scripts don't necessarily execute in the same agent that run the
# build.ps1/sh script this variable isn't automatically set.
$ci = $true
. $PSScriptRoot\..\tools.ps1
# We store config files in the r directory of .gdn
$gdnConfigPath = Join-Path $GdnFolder 'r'
$ValidPath = Test-Path $GuardianCliLocation
if ($ValidPath -eq $False)
{
Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Invalid Guardian CLI Location."
ExitWithExitCode 1
}
$gdnConfigFiles = Get-ChildItem $gdnConfigPath -Recurse -Include '*.gdnconfig'
Write-Host "Discovered Guardian config files:"
$gdnConfigFiles | Out-String | Write-Host
Exec-BlockVerbosely {
& $GuardianCliLocation run `
--working-directory $WorkingDirectory `
--baseline mainbaseline `
--update-baseline $UpdateBaseline `
--logger-level $GuardianLoggerLevel `
--config @gdnConfigFiles
Exit-IfNZEC "Sdl"
}
}
catch {
Write-Host $_.ScriptStackTrace
Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_
ExitWithExitCode 1
}
================================================
FILE: eng/common/sdl/sdl.ps1
================================================
function Install-Gdn {
param(
[Parameter(Mandatory=$true)]
[string]$Path,
# If omitted, install the latest version of Guardian, otherwise install that specific version.
[string]$Version
)
$ErrorActionPreference = 'Stop'
Set-StrictMode -Version 2.0
$disableConfigureToolsetImport = $true
$global:LASTEXITCODE = 0
# `tools.ps1` checks $ci to perform some actions. Since the SDL
# scripts don't necessarily execute in the same agent that run the
# build.ps1/sh script this variable isn't automatically set.
$ci = $true
. $PSScriptRoot\..\tools.ps1
$argumentList = @("install", "Microsoft.Guardian.Cli", "-Source https://securitytools.pkgs.visualstudio.com/_packaging/Guardian/nuget/v3/index.json", "-OutputDirectory $Path", "-NonInteractive", "-NoCache")
if ($Version) {
$argumentList += "-Version $Version"
}
Start-Process nuget -Verbose -ArgumentList $argumentList -NoNewWindow -Wait
$gdnCliPath = Get-ChildItem -Filter guardian.cmd -Recurse -Path $Path
if (!$gdnCliPath)
{
Write-PipelineTelemetryError -Category 'Sdl' -Message 'Failure installing Guardian'
}
return $gdnCliPath.FullName
}
================================================
FILE: eng/common/sdl/trim-assets-version.ps1
================================================
<#
.SYNOPSIS
Install and run the 'Microsoft.DotNet.VersionTools.Cli' tool with the 'trim-artifacts-version' command to trim the version from the NuGet assets file name.
.PARAMETER InputPath
Full path to directory where artifact packages are stored
.PARAMETER Recursive
Search for NuGet packages recursively
#>
Param(
[string] $InputPath,
[bool] $Recursive = $true
)
$CliToolName = "Microsoft.DotNet.VersionTools.Cli"
function Install-VersionTools-Cli {
param(
[Parameter(Mandatory=$true)][string]$Version
)
Write-Host "Installing the package '$CliToolName' with a version of '$version' ..."
$feed = "https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json"
$argumentList = @("tool", "install", "--local", "$CliToolName", "--add-source $feed", "--no-cache", "--version $Version", "--create-manifest-if-needed")
Start-Process "$dotnet" -Verbose -ArgumentList $argumentList -NoNewWindow -Wait
}
# -------------------------------------------------------------------
if (!(Test-Path $InputPath)) {
Write-Host "Input Path '$InputPath' does not exist"
ExitWithExitCode 1
}
$ErrorActionPreference = 'Stop'
Set-StrictMode -Version 2.0
$disableConfigureToolsetImport = $true
$global:LASTEXITCODE = 0
# `tools.ps1` checks $ci to perform some actions. Since the SDL
# scripts don't necessarily execute in the same agent that run the
# build.ps1/sh script this variable isn't automatically set.
$ci = $true
. $PSScriptRoot\..\tools.ps1
try {
$dotnetRoot = InitializeDotNetCli -install:$true
$dotnet = "$dotnetRoot\dotnet.exe"
$toolsetVersion = Read-ArcadeSdkVersion
Install-VersionTools-Cli -Version $toolsetVersion
$cliToolFound = (& "$dotnet" tool list --local | Where-Object {$_.Split(' ')[0] -eq $CliToolName})
if ($null -eq $cliToolFound) {
Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "The '$CliToolName' tool is not installed."
ExitWithExitCode 1
}
Exec-BlockVerbosely {
& "$dotnet" $CliToolName trim-assets-version `
--assets-path $InputPath `
--recursive $Recursive
Exit-IfNZEC "Sdl"
}
}
catch {
Write-Host $_
Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_
ExitWithExitCode 1
}
================================================
FILE: eng/common/template-guidance.md
================================================
# Overview
Arcade provides templates for public (`/templates`) and 1ES pipeline templates (`/templates-official`) scenarios. Pipelines which are required to be managed by 1ES pipeline templates should reference `/templates-offical`, all other pipelines may reference `/templates`.
## How to use
Basic guidance is:
- 1ES Pipeline Template or 1ES Microbuild template runs should reference `eng/common/templates-official`. Any internal production-graded pipeline should use these templates.
- All other runs should reference `eng/common/templates`.
See [azure-pipelines.yml](../../azure-pipelines.yml) (templates-official example) or [azure-pipelines-pr.yml](../../azure-pipelines-pr.yml) (templates example) for examples.
#### The `templateIs1ESManaged` parameter
The `templateIs1ESManaged` is available on most templates and affects which of the variants is used for nested templates. See [Development Notes](#development-notes) below for more information on the `templateIs1ESManaged1 parameter.
- For templates under `job/`, `jobs/`, `steps`, or `post-build/`, this parameter must be explicitly set.
## Multiple outputs
1ES pipeline templates impose a policy where every publish artifact execution results in additional security scans being injected into your pipeline. When using `templates-official/jobs/jobs.yml`, Arcade reduces the number of additional security injections by gathering all publishing outputs into the [Build.ArtifactStagingDirectory](https://learn.microsoft.com/en-us/azure/devops/pipelines/build/variables?view=azure-devops&tabs=yaml#build-variables-devops-services), and utilizing the [outputParentDirectory](https://eng.ms/docs/cloud-ai-platform/devdiv/one-engineering-system-1es/1es-docs/1es-pipeline-templates/features/outputs#multiple-outputs) feature of 1ES pipeline templates. When implementing your pipeline, if you ensure publish artifacts are located in the `$(Build.ArtifactStagingDirectory)`, and utilize the 1ES provided template context, then you can reduce the number of security scans for your pipeline.
Example:
``` yaml
# azure-pipelines.yml
extends:
template: azure-pipelines/MicroBuild.1ES.Official.yml@MicroBuildTemplate
parameters:
stages:
- stage: build
jobs:
- template: /eng/common/templates-official/jobs/jobs.yml@self
parameters:
# 1ES makes use of outputs to reduce security task injection overhead
templateContext:
outputs:
- output: pipelineArtifact
displayName: 'Publish logs from source'
continueOnError: true
condition: always()
targetPath: $(Build.ArtifactStagingDirectory)/artifacts/log
artifactName: Logs
jobs:
- job: Windows
steps:
- script: echo "friendly neighborhood" > artifacts/marvel/spiderman.txt
# copy build outputs to artifact staging directory for publishing
- task: CopyFiles@2
displayName: Gather build output
inputs:
SourceFolder: '$(System.DefaultWorkingDirectory)/artifacts/marvel'
Contents: '**'
TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/marvel'
```
Note: Multiple outputs are ONLY applicable to 1ES PT publishing (only usable when referencing `templates-official`).
## Development notes
**Folder / file structure**
``` text
eng\common\
[templates || templates-official]\
job\
job.yml (shim + artifact publishing logic)
onelocbuild.yml (shim)
publish-build-assets.yml (shim)
source-build.yml (shim)
source-index-stage1.yml (shim)
jobs\
codeql-build.yml (shim)
jobs.yml (shim)
source-build.yml (shim)
post-build\
post-build.yml (shim)
common-variabls.yml (shim)
setup-maestro-vars.yml (shim)
steps\
publish-build-artifacts.yml (logic)
publish-pipeline-artifacts.yml (logic)
component-governance.yml (shim)
publish-logs.yml (shim)
retain-build.yml (shim)
send-to-helix.yml (shim)
source-build.yml (shim)
variables\
pool-providers.yml (logic + redirect) # templates/variables/pool-providers.yml will redirect to templates-official/variables/pool-providers.yml if you are running in the internal project
sdl-variables.yml (logic)
core-templates\
job\
job.yml (logic)
onelocbuild.yml (logic)
publish-build-assets.yml (logic)
source-build.yml (logic)
source-index-stage1.yml (logic)
jobs\
codeql-build.yml (logic)
jobs.yml (logic)
source-build.yml (logic)
post-build\
common-variabls.yml (logic)
post-build.yml (logic)
setup-maestro-vars.yml (logic)
steps\
component-governance.yml (logic)
publish-build-artifacts.yml (redirect)
publish-logs.yml (logic)
publish-pipeline-artifacts.yml (redirect)
retain-build.yml (logic)
send-to-helix.yml (logic)
source-build.yml (logic)
variables\
pool-providers.yml (redirect)
```
In the table above, a file is designated as "shim", "logic", or "redirect".
- shim - represents a yaml file which is an intermediate step between pipeline logic and .Net Core Engineering's templates (`core-templates`) and defines the `is1ESPipeline` parameter value.
- logic - represents actual base template logic.
- redirect- represents a file in `core-templates` which redirects to the "logic" file in either `templates` or `templates-official`.
Logic for Arcade's templates live **primarily** in the `core-templates` folder. The exceptions to the location of the logic files are around artifact publishing, which is handled differently between 1es pipeline templates and standard templates. `templates` and `templates-official` provide shim entry points which redirect to `core-templates` while also defining the `is1ESPipeline` parameter. If a shim is referenced in `templates`, then `is1ESPipeline` is set to `false`. If a shim is referenced in `templates-official`, then `is1ESPipeline` is set to `true`.
Within `templates` and `templates-official`, the templates at the "stages", and "jobs" / "job" level have been replaced with shims. Templates at the "steps" and "variables" level are typically too granular to be replaced with shims and instead persist logic which is directly applicable to either scenario.
Within `core-templates`, there are a handful of places where logic is dependent on which shim entry point was used. In those places, we redirect back to the respective logic file in `templates` or `templates-official`.
================================================
FILE: eng/common/templates/job/job.yml
================================================
parameters:
enablePublishBuildArtifacts: false
runAsPublic: false
# CG related params, unused now and can eventually be removed
disableComponentGovernance: unused
# Sbom related params, unused now and can eventually be removed
enableSbom: unused
PackageVersion: unused
BuildDropPath: unused
jobs:
- template: /eng/common/core-templates/job/job.yml
parameters:
is1ESPipeline: false
${{ each parameter in parameters }}:
${{ if and(ne(parameter.key, 'steps'), ne(parameter.key, 'is1ESPipeline')) }}:
${{ parameter.key }}: ${{ parameter.value }}
steps:
- ${{ each step in parameters.steps }}:
- ${{ step }}
# we don't run CG in public
- ${{ if eq(variables['System.TeamProject'], 'public') }}:
- script: echo "##vso[task.setvariable variable=skipComponentGovernanceDetection]true"
displayName: Set skipComponentGovernanceDetection variable
artifactPublishSteps:
- ${{ if ne(parameters.artifacts.publish, '') }}:
- ${{ if and(ne(parameters.artifacts.publish.artifacts, 'false'), ne(parameters.artifacts.publish.artifacts, '')) }}:
- template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml
parameters:
is1ESPipeline: false
args:
displayName: Publish pipeline artifacts
targetPath: '$(Build.ArtifactStagingDirectory)/artifacts'
artifactName: ${{ coalesce(parameters.artifacts.publish.artifacts.name , 'Artifacts_$(Agent.Os)_$(_BuildConfig)') }}
continueOnError: true
condition: succeeded()
retryCountOnTaskFailure: 10 # for any files being locked
- template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml
parameters:
is1ESPipeline: false
args:
displayName: Publish pipeline artifacts
targetPath: '$(Build.ArtifactStagingDirectory)/artifacts'
artifactName: ${{ coalesce(parameters.artifacts.publish.artifacts.name , 'Artifacts_$(Agent.Os)_$(_BuildConfig)') }}_Attempt$(System.JobAttempt)
continueOnError: true
condition: not(succeeded())
retryCountOnTaskFailure: 10 # for any files being locked
- ${{ if and(ne(parameters.artifacts.publish.logs, 'false'), ne(parameters.artifacts.publish.logs, '')) }}:
- template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml
parameters:
is1ESPipeline: false
args:
targetPath: '$(Build.ArtifactStagingDirectory)/artifacts/log'
artifactName: ${{ coalesce(parameters.artifacts.publish.logs.name, 'Logs_Build_$(Agent.Os)_$(_BuildConfig)') }}
displayName: 'Publish logs'
continueOnError: true
condition: always()
retryCountOnTaskFailure: 10 # for any files being locked
- ${{ if ne(parameters.enablePublishBuildArtifacts, 'false') }}:
- template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml
parameters:
is1ESPipeline: false
args:
displayName: Publish Logs
targetPath: '$(Build.ArtifactStagingDirectory)/artifacts/log/$(_BuildConfig)'
artifactName: ${{ coalesce(parameters.enablePublishBuildArtifacts.artifactName, '$(Agent.Os)_$(Agent.JobName)_Attempt$(System.JobAttempt)' ) }}
continueOnError: true
condition: always()
retryCountOnTaskFailure: 10 # for any files being locked
- ${{ if eq(parameters.enableBuildRetry, 'true') }}:
- template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml
parameters:
is1ESPipeline: false
args:
targetPath: '$(System.DefaultWorkingDirectory)\eng\common\BuildConfiguration'
artifactName: 'BuildConfiguration'
displayName: 'Publish build retry configuration'
continueOnError: true
retryCountOnTaskFailure: 10 # for any files being locked
================================================
FILE: eng/common/templates/job/onelocbuild.yml
================================================
jobs:
- template: /eng/common/core-templates/job/onelocbuild.yml
parameters:
is1ESPipeline: false
${{ each parameter in parameters }}:
${{ parameter.key }}: ${{ parameter.value }}
================================================
FILE: eng/common/templates/job/publish-build-assets.yml
================================================
jobs:
- template: /eng/common/core-templates/job/publish-build-assets.yml
parameters:
is1ESPipeline: false
${{ each parameter in parameters }}:
${{ parameter.key }}: ${{ parameter.value }}
================================================
FILE: eng/common/templates/job/source-build.yml
================================================
jobs:
- template: /eng/common/core-templates/job/source-build.yml
parameters:
is1ESPipeline: false
${{ each parameter in parameters }}:
${{ parameter.key }}: ${{ parameter.value }}
================================================
FILE: eng/common/templates/job/source-index-stage1.yml
================================================
jobs:
- template: /eng/common/core-templates/job/source-index-stage1.yml
parameters:
is1ESPipeline: false
${{ each parameter in parameters }}:
${{ parameter.key }}: ${{ parameter.value }}
================================================
FILE: eng/common/templates/jobs/codeql-build.yml
================================================
jobs:
- template: /eng/common/core-templates/jobs/codeql-build.yml
parameters:
is1ESPipeline: false
${{ each parameter in parameters }}:
${{ parameter.key }}: ${{ parameter.value }}
================================================
FILE: eng/common/templates/jobs/jobs.yml
================================================
jobs:
- template: /eng/common/core-templates/jobs/jobs.yml
parameters:
is1ESPipeline: false
${{ each parameter in parameters }}:
${{ parameter.key }}: ${{ parameter.value }}
================================================
FILE: eng/common/templates/jobs/source-build.yml
================================================
jobs:
- template: /eng/common/core-templates/jobs/source-build.yml
parameters:
is1ESPipeline: false
${{ each parameter in parameters }}:
${{ parameter.key }}: ${{ parameter.value }}
================================================
FILE: eng/common/templates/post-build/common-variables.yml
================================================
variables:
- template: /eng/common/core-templates/post-build/common-variables.yml
parameters:
# Specifies whether to use 1ES
is1ESPipeline: false
${{ each parameter in parameters }}:
${{ parameter.key }}: ${{ parameter.value }}
================================================
FILE: eng/common/templates/post-build/post-build.yml
================================================
stages:
- template: /eng/common/core-templates/post-build/post-build.yml
parameters:
# Specifies whether to use 1ES
is1ESPipeline: false
${{ each parameter in parameters }}:
${{ parameter.key }}: ${{ parameter.value }}
================================================
FILE: eng/common/templates/post-build/setup-maestro-vars.yml
================================================
steps:
- template: /eng/common/core-templates/post-build/setup-maestro-vars.yml
parameters:
# Specifies whether to use 1ES
is1ESPipeline: false
${{ each parameter in parameters }}:
${{ parameter.key }}: ${{ parameter.value }}
================================================
FILE: eng/common/templates/steps/enable-internal-runtimes.yml
================================================
# Obtains internal runtime download credentials and populates the 'dotnetbuilds-internal-container-read-token-base64'
# variable with the base64-encoded SAS token, by default
steps:
- template: /eng/common/core-templates/steps/enable-internal-runtimes.yml
parameters:
is1ESPipeline: false
${{ each parameter in parameters }}:
${{ parameter.key }}: ${{ parameter.value }}
================================================
FILE: eng/common/templates/steps/enable-internal-sources.yml
================================================
steps:
- template: /eng/common/core-templates/steps/enable-internal-sources.yml
parameters:
is1ESPipeline: false
${{ each parameter in parameters }}:
${{ parameter.key }}: ${{ parameter.value }}
================================================
FILE: eng/common/templates/steps/generate-sbom.yml
================================================
steps:
- template: /eng/common/core-templates/steps/generate-sbom.yml
parameters:
is1ESPipeline: false
${{ each parameter in parameters }}:
${{ parameter.key }}: ${{ parameter.value }}
================================================
FILE: eng/common/templates/steps/get-delegation-sas.yml
================================================
steps:
- template: /eng/common/core-templates/steps/get-delegation-sas.yml
parameters:
is1ESPipeline: false
${{ each parameter in parameters }}:
${{ parameter.key }}: ${{ parameter.value }}
================================================
FILE: eng/common/templates/steps/get-federated-access-token.yml
================================================
steps:
- template: /eng/common/core-templates/steps/get-federated-access-token.yml
parameters:
is1ESPipeline: false
${{ each parameter in parameters }}:
${{ parameter.key }}: ${{ parameter.value }}
================================================
FILE: eng/common/templates/steps/publish-build-artifacts.yml
================================================
parameters:
- name: is1ESPipeline
type: boolean
default: false
- name: displayName
type: string
default: 'Publish to Build Artifact'
- name: condition
type: string
default: succeeded()
- name: artifactName
type: string
- name: pathToPublish
type: string
- name: continueOnError
type: boolean
default: false
- name: publishLocation
type: string
default: 'Container'
- name: retryCountOnTaskFailure
type: string
default: 10
steps:
- ${{ if eq(parameters.is1ESPipeline, true) }}:
- 'eng/common/templates cannot be referenced from a 1ES managed template': error
- task: PublishBuildArtifacts@1
displayName: ${{ parameters.displayName }}
condition: ${{ parameters.condition }}
${{ if parameters.continueOnError }}:
continueOnError: ${{ parameters.continueOnError }}
inputs:
PublishLocation: ${{ parameters.publishLocation }}
PathtoPublish: ${{ parameters.pathToPublish }}
${{ if parameters.artifactName }}:
ArtifactName: ${{ parameters.artifactName }}
${{ if parameters.retryCountOnTaskFailure }}:
retryCountOnTaskFailure: ${{ parameters.retryCountOnTaskFailure }}
================================================
FILE: eng/common/templates/steps/publish-logs.yml
================================================
steps:
- template: /eng/common/core-templates/steps/publish-logs.yml
parameters:
is1ESPipeline: false
${{ each parameter in parameters }}:
${{ parameter.key }}: ${{ parameter.value }}
================================================
FILE: eng/common/templates/steps/publish-pipeline-artifacts.yml
================================================
parameters:
- name: is1ESPipeline
type: boolean
default: false
- name: args
type: object
default: {}
steps:
- ${{ if eq(parameters.is1ESPipeline, true) }}:
- 'eng/common/templates cannot be referenced from a 1ES managed template': error
- task: PublishPipelineArtifact@1
displayName: ${{ coalesce(parameters.args.displayName, 'Publish to Build Artifact') }}
${{ if parameters.args.condition }}:
condition: ${{ parameters.args.condition }}
${{ else }}:
condition: succeeded()
${{ if parameters.args.continueOnError }}:
continueOnError: ${{ parameters.args.continueOnError }}
inputs:
targetPath: ${{ parameters.args.targetPath }}
${{ if parameters.args.artifactName }}:
artifactName: ${{ parameters.args.artifactName }}
${{ if parameters.args.publishLocation }}:
publishLocation: ${{ parameters.args.publishLocation }}
${{ if parameters.args.fileSharePath }}:
fileSharePath: ${{ parameters.args.fileSharePath }}
${{ if parameters.args.Parallel }}:
parallel: ${{ parameters.args.Parallel }}
${{ if parameters.args.parallelCount }}:
parallelCount: ${{ parameters.args.parallelCount }}
${{ if parameters.args.properties }}:
properties: ${{ parameters.args.properties }}
================================================
FILE: eng/common/templates/steps/retain-build.yml
================================================
steps:
- template: /eng/common/core-templates/steps/retain-build.yml
parameters:
is1ESPipeline: false
${{ each parameter in parameters }}:
${{ parameter.key }}: ${{ parameter.value }}
================================================
FILE: eng/common/templates/steps/send-to-helix.yml
================================================
steps:
- template: /eng/common/core-templates/steps/send-to-helix.yml
parameters:
is1ESPipeline: false
${{ each parameter in parameters }}:
${{ parameter.key }}: ${{ parameter.value }}
================================================
FILE: eng/common/templates/steps/source-build.yml
================================================
steps:
- template: /eng/common/core-templates/steps/source-build.yml
parameters:
is1ESPipeline: false
${{ each parameter in parameters }}:
${{ parameter.key }}: ${{ parameter.value }}
================================================
FILE: eng/common/templates/steps/source-index-stage1-publish.yml
================================================
steps:
- template: /eng/common/core-templates/steps/source-index-stage1-publish.yml
parameters:
is1ESPipeline: false
${{ each parameter in parameters }}:
${{ parameter.key }}: ${{ parameter.value }}
================================================
FILE: eng/common/templates/steps/vmr-sync.yml
================================================
### These steps synchronize new code from product repositories into the VMR (https://github.com/dotnet/dotnet).
### They initialize the darc CLI and pull the new updates.
### Changes are applied locally onto the already cloned VMR (located in $vmrPath).
parameters:
- name: targetRef
displayName: Target revision in dotnet/ to synchronize
type: string
default: $(Build.SourceVersion)
- name: vmrPath
displayName: Path where the dotnet/dotnet is checked out to
type: string
default: $(Agent.BuildDirectory)/vmr
- name: additionalSyncs
displayName: Optional list of package names whose repo's source will also be synchronized in the local VMR, e.g. NuGet.Protocol
type: object
default: []
steps:
- checkout: vmr
displayName: Clone dotnet/dotnet
path: vmr
clean: true
- checkout: self
displayName: Clone $(Build.Repository.Name)
path: repo
fetchDepth: 0
# This step is needed so that when we get a detached HEAD / shallow clone,
# we still pull the commit into the temporary repo clone to use it during the sync.
# Also unshallow the clone so that forwardflow command would work.
- script: |
git branch repo-head
git rev-parse HEAD
displayName: Label PR commit
workingDirectory: $(Agent.BuildDirectory)/repo
- script: |
git config --global user.name "dotnet-maestro[bot]"
git config --global user.email "dotnet-maestro[bot]@users.noreply.github.com"
displayName: Set git author to dotnet-maestro[bot]
workingDirectory: ${{ parameters.vmrPath }}
- script: |
./eng/common/vmr-sync.sh \
--vmr ${{ parameters.vmrPath }} \
--tmp $(Agent.TempDirectory) \
--azdev-pat '$(dn-bot-all-orgs-code-r)' \
--ci \
--debug
if [ "$?" -ne 0 ]; then
echo "##vso[task.logissue type=error]Failed to synchronize the VMR"
exit 1
fi
displayName: Sync repo into VMR (Unix)
condition: ne(variables['Agent.OS'], 'Windows_NT')
workingDirectory: $(Agent.BuildDirectory)/repo
- script: |
git config --global diff.astextplain.textconv echo
git config --system core.longpaths true
displayName: Configure Windows git (longpaths, astextplain)
condition: eq(variables['Agent.OS'], 'Windows_NT')
- powershell: |
./eng/common/vmr-sync.ps1 `
-vmr ${{ parameters.vmrPath }} `
-tmp $(Agent.TempDirectory) `
-azdevPat '$(dn-bot-all-orgs-code-r)' `
-ci `
-debugOutput
if ($LASTEXITCODE -ne 0) {
echo "##vso[task.logissue type=error]Failed to synchronize the VMR"
exit 1
}
displayName: Sync repo into VMR (Windows)
condition: eq(variables['Agent.OS'], 'Windows_NT')
workingDirectory: $(Agent.BuildDirectory)/repo
- ${{ if eq(variables['Build.Reason'], 'PullRequest') }}:
- task: CopyFiles@2
displayName: Collect failed patches
condition: failed()
inputs:
SourceFolder: '$(Agent.TempDirectory)'
Contents: '*.patch'
TargetFolder: '$(Build.ArtifactStagingDirectory)/FailedPatches'
- publish: '$(Build.ArtifactStagingDirectory)/FailedPatches'
artifact: $(System.JobDisplayName)_FailedPatches
displayName: Upload failed patches
condition: failed()
- ${{ each assetName in parameters.additionalSyncs }}:
# The vmr-sync script ends up staging files in the local VMR so we have to commit those
- script:
git commit --allow-empty -am "Forward-flow $(Build.Repository.Name)"
displayName: Commit local VMR changes
workingDirectory: ${{ parameters.vmrPath }}
- script: |
set -ex
echo "Searching for details of asset ${{ assetName }}..."
# Use darc to get dependencies information
dependencies=$(./.dotnet/dotnet darc get-dependencies --name '${{ assetName }}' --ci)
# Extract repository URL and commit hash
repository=$(echo "$dependencies" | grep 'Repo:' | sed 's/Repo:[[:space:]]*//' | head -1)
if [ -z "$repository" ]; then
echo "##vso[task.logissue type=error]Asset ${{ assetName }} not found in the dependency list"
exit 1
fi
commit=$(echo "$dependencies" | grep 'Commit:' | sed 's/Commit:[[:space:]]*//' | head -1)
echo "Updating the VMR from $repository / $commit..."
cd ..
git clone $repository ${{ assetName }}
cd ${{ assetName }}
git checkout $commit
git branch "sync/$commit"
./eng/common/vmr-sync.sh \
--vmr ${{ parameters.vmrPath }} \
--tmp $(Agent.TempDirectory) \
--azdev-pat '$(dn-bot-all-orgs-code-r)' \
--ci \
--debug
if [ "$?" -ne 0 ]; then
echo "##vso[task.logissue type=error]Failed to synchronize the VMR"
exit 1
fi
displayName: Sync ${{ assetName }} into (Unix)
condition: ne(variables['Agent.OS'], 'Windows_NT')
workingDirectory: $(Agent.BuildDirectory)/repo
- powershell: |
$ErrorActionPreference = 'Stop'
Write-Host "Searching for details of asset ${{ assetName }}..."
$dependencies = .\.dotnet\dotnet darc get-dependencies --name '${{ assetName }}' --ci
$repository = $dependencies | Select-String -Pattern 'Repo:\s+([^\s]+)' | Select-Object -First 1
$repository -match 'Repo:\s+([^\s]+)' | Out-Null
$repository = $matches[1]
if ($repository -eq $null) {
Write-Error "Asset ${{ assetName }} not found in the dependency list"
exit 1
}
$commit = $dependencies | Select-String -Pattern 'Commit:\s+([^\s]+)' | Select-Object -First 1
$commit -match 'Commit:\s+([^\s]+)' | Out-Null
$commit = $matches[1]
Write-Host "Updating the VMR from $repository / $commit..."
cd ..
git clone $repository ${{ assetName }}
cd ${{ assetName }}
git checkout $commit
git branch "sync/$commit"
.\eng\common\vmr-sync.ps1 `
-vmr ${{ parameters.vmrPath }} `
-tmp $(Agent.TempDirectory) `
-azdevPat '$(dn-bot-all-orgs-code-r)' `
-ci `
-debugOutput
if ($LASTEXITCODE -ne 0) {
echo "##vso[task.logissue type=error]Failed to synchronize the VMR"
exit 1
}
displayName: Sync ${{ assetName }} into (Windows)
condition: ne(variables['Agent.OS'], 'Windows_NT')
workingDirectory: $(Agent.BuildDirectory)/repo
================================================
FILE: eng/common/templates/variables/pool-providers.yml
================================================
# Select a pool provider based off branch name. Anything with branch name containing 'release' must go into an -Svc pool,
# otherwise it should go into the "normal" pools. This separates out the queueing and billing of released branches.
# Motivation:
# Once a given branch of a repository's output has been officially "shipped" once, it is then considered to be COGS
# (Cost of goods sold) and should be moved to a servicing pool provider. This allows both separation of queueing
# (allowing release builds and main PR builds to not intefere with each other) and billing (required for COGS.
# Additionally, the pool provider name itself may be subject to change when the .NET Core Engineering Services
# team needs to move resources around and create new and potentially differently-named pools. Using this template
# file from an Arcade-ified repo helps guard against both having to update one's release/* branches and renaming.
# How to use:
# This yaml assumes your shipped product branches use the naming convention "release/..." (which many do).
# If we find alternate naming conventions in broad usage it can be added to the condition below.
#
# First, import the template in an arcade-ified repo to pick up the variables, e.g.:
#
# variables:
# - template: /eng/common/templates/variables/pool-providers.yml
#
# ... then anywhere specifying the pool provider use the runtime variables,
# $(DncEngInternalBuildPool) and $ (DncEngPublicBuildPool), e.g.:
#
# pool:
# name: $(DncEngInternalBuildPool)
# demands: ImageOverride -equals windows.vs2026.amd64
variables:
- ${{ if eq(variables['System.TeamProject'], 'internal') }}:
- template: /eng/common/templates-official/variables/pool-providers.yml
- ${{ else }}:
# Coalesce the target and source branches so we know when a PR targets a release branch
# If these variables are somehow missing, fall back to main (tends to have more capacity)
# Any new -Svc alternative pools should have variables added here to allow for splitting work
- name: DncEngPublicBuildPool
value: $[
replace(
replace(
eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'),
True,
'NetCore-Svc-Public'
),
False,
'NetCore-Public'
)
]
- name: DncEngInternalBuildPool
value: $[
replace(
replace(
eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'),
True,
'NetCore1ESPool-Svc-Internal'
),
False,
'NetCore1ESPool-Internal'
)
]
================================================
FILE: eng/common/templates/vmr-build-pr.yml
================================================
# This pipeline is used for running the VMR verification of the PR changes in repo-level PRs.
#
# It will run a full set of verification jobs defined in:
# https://github.com/dotnet/dotnet/blob/10060d128e3f470e77265f8490f5e4f72dae738e/eng/pipelines/templates/stages/vmr-build.yml#L27-L38
#
# For repos that do not need to run the full set, you would do the following:
#
# 1. Copy this YML file to a repo-specific location, i.e. outside of eng/common.
#
# 2. Add `verifications` parameter to VMR template reference
#
# Examples:
# - For source-build stage 1 verification, add the following:
# verifications: [ "source-build-stage1" ]
#
# - For Windows only verifications, add the following:
# verifications: [ "unified-build-windows-x64", "unified-build-windows-x86" ]
trigger: none
pr: none
variables:
- template: /eng/common/templates/variables/pool-providers.yml@self
- name: skipComponentGovernanceDetection # we run CG on internal builds only
value: true
- name: Codeql.Enabled # we run CodeQL on internal builds only
value: false
resources:
repositories:
- repository: vmr
type: github
name: dotnet/dotnet
endpoint: dotnet
ref: refs/heads/main # Set to whatever VMR branch the PR build should insert into
stages:
- template: /eng/pipelines/templates/stages/vmr-build.yml@vmr
parameters:
isBuiltFromVmr: false
scope: lite
================================================
FILE: eng/common/templates-official/job/job.yml
================================================
parameters:
runAsPublic: false
# Sbom related params, unused now and can eventually be removed
enableSbom: unused
PackageVersion: unused
BuildDropPath: unused
jobs:
- template: /eng/common/core-templates/job/job.yml
parameters:
is1ESPipeline: true
# publish artifacts
# for 1ES managed templates, use the templateContext.output to handle multiple outputs.
templateContext:
outputParentDirectory: $(Build.ArtifactStagingDirectory)
outputs:
- ${{ if ne(parameters.artifacts.publish, '') }}:
- ${{ if and(ne(parameters.artifacts.publish.artifacts, 'false'), ne(parameters.artifacts.publish.artifacts, '')) }}:
- output: pipelineArtifact
displayName: Publish pipeline artifacts
targetPath: '$(Build.ArtifactStagingDirectory)/artifacts'
artifactName: ${{ coalesce(parameters.artifacts.publish.artifacts.name , 'Artifacts_$(Agent.Os)_$(_BuildConfig)') }}
condition: succeeded()
retryCountOnTaskFailure: 10 # for any files being locked
continueOnError: true
- output: pipelineArtifact
displayName: Publish pipeline artifacts
targetPath: '$(Build.ArtifactStagingDirectory)/artifacts'
artifactName: ${{ coalesce(parameters.artifacts.publish.artifacts.name , 'Artifacts_$(Agent.Os)_$(_BuildConfig)') }}_Attempt$(System.JobAttempt)
condition: not(succeeded())
retryCountOnTaskFailure: 10 # for any files being locked
continueOnError: true
- ${{ if and(ne(parameters.artifacts.publish.logs, 'false'), ne(parameters.artifacts.publish.logs, '')) }}:
- output: pipelineArtifact
targetPath: '$(Build.ArtifactStagingDirectory)/artifacts/log'
artifactName: ${{ coalesce(parameters.artifacts.publish.logs.name, 'Logs_Build_$(Agent.Os)_$(_BuildConfig)_Attempt$(System.JobAttempt)') }}
displayName: 'Publish logs'
continueOnError: true
condition: always()
retryCountOnTaskFailure: 10 # for any files being locked
isProduction: false # logs are non-production artifacts
- ${{ if eq(parameters.enablePublishBuildArtifacts, true) }}:
- output: pipelineArtifact
displayName: Publish Logs
targetPath: '$(Build.ArtifactStagingDirectory)/artifacts/log/$(_BuildConfig)'
artifactName: ${{ coalesce(parameters.enablePublishBuildArtifacts.artifactName, '$(Agent.Os)_$(Agent.JobName)_Attempt$(System.JobAttempt)' ) }}
continueOnError: true
condition: always()
retryCountOnTaskFailure: 10 # for any files being locked
isProduction: false # logs are non-production artifacts
- ${{ if eq(parameters.enableBuildRetry, 'true') }}:
- output: pipelineArtifact
targetPath: '$(Build.ArtifactStagingDirectory)/artifacts/eng/common/BuildConfiguration'
artifactName: 'BuildConfiguration'
displayName: 'Publish build retry configuration'
continueOnError: true
retryCountOnTaskFailure: 10 # for any files being locked
isProduction: false # BuildConfiguration is a non-production artifact
# V4 publishing: automatically publish staged artifacts as a pipeline artifact.
# The artifact name matches the SDK's FutureArtifactName ($(System.PhaseName)_Artifacts),
# which is encoded in the asset manifest for downstream publishing to discover.
# Jobs can opt in by setting enablePublishing: true.
- ${{ if and(eq(parameters.publishingVersion, 4), eq(parameters.enablePublishing, 'true')) }}:
- output: pipelineArtifact
displayName: 'Publish V4 pipeline artifacts'
targetPath: '$(Build.ArtifactStagingDirectory)/artifacts'
artifactName: '$(System.PhaseName)_Artifacts'
continueOnError: true
retryCountOnTaskFailure: 10 # for any files being locked
# add any outputs provided via root yaml
- ${{ if ne(parameters.templateContext.outputs, '') }}:
- ${{ each output in parameters.templateContext.outputs }}:
- ${{ output }}
# add any remaining templateContext properties
${{ each context in parameters.templateContext }}:
${{ if and(ne(context.key, 'outputParentDirectory'), ne(context.key, 'outputs')) }}:
${{ context.key }}: ${{ context.value }}
${{ each parameter in parameters }}:
${{ if and(ne(parameter.key, 'templateContext'), ne(parameter.key, 'is1ESPipeline')) }}:
${{ parameter.key }}: ${{ parameter.value }}
================================================
FILE: eng/common/templates-official/job/onelocbuild.yml
================================================
jobs:
- template: /eng/common/core-templates/job/onelocbuild.yml
parameters:
is1ESPipeline: true
${{ each parameter in parameters }}:
${{ parameter.key }}: ${{ parameter.value }}
================================================
FILE: eng/common/templates-official/job/publish-build-assets.yml
================================================
jobs:
- template: /eng/common/core-templates/job/publish-build-assets.yml
parameters:
is1ESPipeline: true
${{ each parameter in parameters }}:
${{ parameter.key }}: ${{ parameter.value }}
================================================
FILE: eng/common/templates-official/job/source-build.yml
================================================
jobs:
- template: /eng/common/core-templates/job/source-build.yml
parameters:
is1ESPipeline: true
${{ each parameter in parameters }}:
${{ parameter.key }}: ${{ parameter.value }}
================================================
FILE: eng/common/templates-official/job/source-index-stage1.yml
================================================
jobs:
- template: /eng/common/core-templates/job/source-index-stage1.yml
parameters:
is1ESPipeline: true
${{ each parameter in parameters }}:
${{ parameter.key }}: ${{ parameter.value }}
================================================
FILE: eng/common/templates-official/jobs/codeql-build.yml
================================================
jobs:
- template: /eng/common/core-templates/jobs/codeql-build.yml
parameters:
is1ESPipeline: true
${{ each parameter in parameters }}:
${{ parameter.key }}: ${{ parameter.value }}
================================================
FILE: eng/common/templates-official/jobs/jobs.yml
================================================
jobs:
- template: /eng/common/core-templates/jobs/jobs.yml
parameters:
is1ESPipeline: true
${{ each parameter in parameters }}:
${{ parameter.key }}: ${{ parameter.value }}
================================================
FILE: eng/common/templates-official/jobs/source-build.yml
================================================
jobs:
- template: /eng/common/core-templates/jobs/source-build.yml
parameters:
is1ESPipeline: true
${{ each parameter in parameters }}:
${{ parameter.key }}: ${{ parameter.value }}
================================================
FILE: eng/common/templates-official/post-build/common-variables.yml
================================================
variables:
- template: /eng/common/core-templates/post-build/common-variables.yml
parameters:
# Specifies whether to use 1ES
is1ESPipeline: true
${{ each parameter in parameters }}:
${{ parameter.key }}: ${{ parameter.value }}
================================================
FILE: eng/common/templates-official/post-build/post-build.yml
================================================
stages:
- template: /eng/common/core-templates/post-build/post-build.yml
parameters:
# Specifies whether to use 1ES
is1ESPipeline: true
${{ each parameter in parameters }}:
${{ parameter.key }}: ${{ parameter.value }}
================================================
FILE: eng/common/templates-official/post-build/setup-maestro-vars.yml
================================================
steps:
- template: /eng/common/core-templates/post-build/setup-maestro-vars.yml
parameters:
# Specifies whether to use 1ES
is1ESPipeline: true
${{ each parameter in parameters }}:
${{ parameter.key }}: ${{ parameter.value }}
================================================
FILE: eng/common/templates-official/steps/enable-internal-runtimes.yml
================================================
# Obtains internal runtime download credentials and populates the 'dotnetbuilds-internal-container-read-token-base64'
# variable with the base64-encoded SAS token, by default
steps:
- template: /eng/common/core-templates/steps/enable-internal-runtimes.yml
parameters:
is1ESPipeline: true
${{ each parameter in parameters }}:
${{ parameter.key }}: ${{ parameter.value }}
================================================
FILE: eng/common/templates-official/steps/enable-internal-sources.yml
================================================
steps:
- template: /eng/common/core-templates/steps/enable-internal-sources.yml
parameters:
is1ESPipeline: true
${{ each parameter in parameters }}:
${{ parameter.key }}: ${{ parameter.value }}
================================================
FILE: eng/common/templates-official/steps/generate-sbom.yml
================================================
steps:
- template: /eng/common/core-templates/steps/generate-sbom.yml
parameters:
is1ESPipeline: true
${{ each parameter in parameters }}:
${{ parameter.key }}: ${{ parameter.value }}
================================================
FILE: eng/common/templates-official/steps/get-delegation-sas.yml
================================================
steps:
- template: /eng/common/core-templates/steps/get-delegation-sas.yml
parameters:
is1ESPipeline: true
${{ each parameter in parameters }}:
${{ parameter.key }}: ${{ parameter.value }}
================================================
FILE: eng/common/templates-official/steps/get-federated-access-token.yml
================================================
steps:
- template: /eng/common/core-templates/steps/get-federated-access-token.yml
parameters:
is1ESPipeline: true
${{ each parameter in parameters }}:
${{ parameter.key }}: ${{ parameter.value }}
================================================
FILE: eng/common/templates-official/steps/publish-build-artifacts.yml
================================================
parameters:
- name: displayName
type: string
default: 'Publish to Build Artifact'
- name: condition
type: string
default: succeeded()
- name: artifactName
type: string
- name: pathToPublish
type: string
- name: continueOnError
type: boolean
default: false
- name: publishLocation
type: string
default: 'Container'
- name: is1ESPipeline
type: boolean
default: true
- name: retryCountOnTaskFailure
type: string
default: 10
steps:
- ${{ if ne(parameters.is1ESPipeline, true) }}:
- 'eng/common/templates-official cannot be referenced from a non-1ES managed template': error
- task: 1ES.PublishBuildArtifacts@1
displayName: ${{ parameters.displayName }}
condition: ${{ parameters.condition }}
${{ if parameters.continueOnError }}:
continueOnError: ${{ parameters.continueOnError }}
inputs:
PublishLocation: ${{ parameters.publishLocation }}
PathtoPublish: ${{ parameters.pathToPublish }}
${{ if parameters.artifactName }}:
ArtifactName: ${{ parameters.artifactName }}
${{ if parameters.retryCountOnTaskFailure }}:
retryCountOnTaskFailure: ${{ parameters.retryCountOnTaskFailure }}
================================================
FILE: eng/common/templates-official/steps/publish-logs.yml
================================================
steps:
- template: /eng/common/core-templates/steps/publish-logs.yml
parameters:
is1ESPipeline: true
${{ each parameter in parameters }}:
${{ parameter.key }}: ${{ parameter.value }}
================================================
FILE: eng/common/templates-official/steps/publish-pipeline-artifacts.yml
================================================
parameters:
- name: is1ESPipeline
type: boolean
default: true
- name: args
type: object
default: {}
steps:
- ${{ if ne(parameters.is1ESPipeline, true) }}:
- 'eng/common/templates-official cannot be referenced from a non-1ES managed template': error
- task: 1ES.PublishPipelineArtifact@1
displayName: ${{ coalesce(parameters.args.displayName, 'Publish to Build Artifact') }}
${{ if parameters.args.condition }}:
condition: ${{ parameters.args.condition }}
${{ else }}:
condition: succeeded()
${{ if parameters.args.continueOnError }}:
continueOnError: ${{ parameters.args.continueOnError }}
inputs:
targetPath: ${{ parameters.args.targetPath }}
${{ if parameters.args.artifactName }}:
artifactName: ${{ parameters.args.artifactName }}
${{ if parameters.args.properties }}:
properties: ${{ parameters.args.properties }}
${{ if ne(parameters.args.sbomEnabled, '') }}:
sbomEnabled: ${{ parameters.args.sbomEnabled }}
${{ if ne(parameters.args.isProduction, '') }}:
isProduction: ${{ parameters.args.isProduction }}
================================================
FILE: eng/common/templates-official/steps/retain-build.yml
================================================
steps:
- template: /eng/common/core-templates/steps/retain-build.yml
parameters:
is1ESPipeline: true
${{ each parameter in parameters }}:
${{ parameter.key }}: ${{ parameter.value }}
================================================
FILE: eng/common/templates-official/steps/send-to-helix.yml
================================================
steps:
- template: /eng/common/core-templates/steps/send-to-helix.yml
parameters:
is1ESPipeline: true
${{ each parameter in parameters }}:
${{ parameter.key }}: ${{ parameter.value }}
================================================
FILE: eng/common/templates-official/steps/source-build.yml
================================================
steps:
- template: /eng/common/core-templates/steps/source-build.yml
parameters:
is1ESPipeline: true
${{ each parameter in parameters }}:
${{ parameter.key }}: ${{ parameter.value }}
================================================
FILE: eng/common/templates-official/steps/source-index-stage1-publish.yml
================================================
steps:
- template: /eng/common/core-templates/steps/source-index-stage1-publish.yml
parameters:
is1ESPipeline: true
${{ each parameter in parameters }}:
${{ parameter.key }}: ${{ parameter.value }}
================================================
FILE: eng/common/templates-official/variables/pool-providers.yml
================================================
# Select a pool provider based off branch name. Anything with branch name containing 'release' must go into an -Svc pool,
# otherwise it should go into the "normal" pools. This separates out the queueing and billing of released branches.
# Motivation:
# Once a given branch of a repository's output has been officially "shipped" once, it is then considered to be COGS
# (Cost of goods sold) and should be moved to a servicing pool provider. This allows both separation of queueing
# (allowing release builds and main PR builds to not intefere with each other) and billing (required for COGS.
# Additionally, the pool provider name itself may be subject to change when the .NET Core Engineering Services
# team needs to move resources around and create new and potentially differently-named pools. Using this template
# file from an Arcade-ified repo helps guard against both having to update one's release/* branches and renaming.
# How to use:
# This yaml assumes your shipped product branches use the naming convention "release/..." (which many do).
# If we find alternate naming conventions in broad usage it can be added to the condition below.
#
# First, import the template in an arcade-ified repo to pick up the variables, e.g.:
#
# variables:
# - template: /eng/common/templates-official/variables/pool-providers.yml
#
# ... then anywhere specifying the pool provider use the runtime variables,
# $(DncEngInternalBuildPool)
#
# pool:
# name: $(DncEngInternalBuildPool)
# image: windows.vs2026.amd64
variables:
# Coalesce the target and source branches so we know when a PR targets a release branch
# If these variables are somehow missing, fall back to main (tends to have more capacity)
# Any new -Svc alternative pools should have variables added here to allow for splitting work
- name: DncEngInternalBuildPool
value: $[
replace(
replace(
eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'),
True,
'NetCore1ESPool-Svc-Internal'
),
False,
'NetCore1ESPool-Internal'
)
]
================================================
FILE: eng/common/templates-official/variables/sdl-variables.yml
================================================
variables:
# The Guardian version specified in 'eng/common/sdl/packages.config'. This value must be kept in
# sync with the packages.config file.
- name: DefaultGuardianVersion
value: 0.109.0
- name: GuardianPackagesConfigFile
value: $(System.DefaultWorkingDirectory)\eng\common\sdl\packages.config
================================================
FILE: eng/common/tools.ps1
================================================
# Initialize variables if they aren't already defined.
# These may be defined as parameters of the importing script, or set after importing this script.
# CI mode - set to true on CI server for PR validation build or official build.
[bool]$ci = if (Test-Path variable:ci) { $ci } else { $false }
# Build configuration. Common values include 'Debug' and 'Release', but the repository may use other names.
[string]$configuration = if (Test-Path variable:configuration) { $configuration } else { 'Debug' }
# Set to true to opt out of outputting binary log while running in CI
[bool]$excludeCIBinarylog = if (Test-Path variable:excludeCIBinarylog) { $excludeCIBinarylog } else { $false }
# Set to true to output binary log from msbuild. Note that emitting binary log slows down the build.
[bool]$binaryLog = if (Test-Path variable:binaryLog) { $binaryLog } else { $ci -and !$excludeCIBinarylog }
# Set to true to use the pipelines logger which will enable Azure logging output.
# https://github.com/Microsoft/azure-pipelines-tasks/blob/master/docs/authoring/commands.md
# This flag is meant as a temporary opt-opt for the feature while validate it across
# our consumers. It will be deleted in the future.
[bool]$pipelinesLog = if (Test-Path variable:pipelinesLog) { $pipelinesLog } else { $ci }
# Turns on machine preparation/clean up code that changes the machine state (e.g. kills build processes).
[bool]$prepareMachine = if (Test-Path variable:prepareMachine) { $prepareMachine } else { $false }
# True to restore toolsets and dependencies.
[bool]$restore = if (Test-Path variable:restore) { $restore } else { $true }
# Adjusts msbuild verbosity level.
[string]$verbosity = if (Test-Path variable:verbosity) { $verbosity } else { 'minimal' }
# Set to true to reuse msbuild nodes. Recommended to not reuse on CI.
[bool]$nodeReuse = if (Test-Path variable:nodeReuse) { $nodeReuse } else { !$ci }
# Configures warning treatment in msbuild.
[bool]$warnAsError = if (Test-Path variable:warnAsError) { $warnAsError } else { $true }
# Specifies which msbuild engine to use for build: 'vs', 'dotnet' or unspecified (determined based on presence of tools.vs in global.json).
[string]$msbuildEngine = if (Test-Path variable:msbuildEngine) { $msbuildEngine } else { $null }
# True to attempt using .NET Core already that meets requirements specified in global.json
# installed on the machine instead of downloading one.
[bool]$useInstalledDotNetCli = if (Test-Path variable:useInstalledDotNetCli) { $useInstalledDotNetCli } else { $true }
# Enable repos to use a particular version of the on-line dotnet-install scripts.
# default URL: https://builds.dotnet.microsoft.com/dotnet/scripts/v1/dotnet-install.ps1
[string]$dotnetInstallScriptVersion = if (Test-Path variable:dotnetInstallScriptVersion) { $dotnetInstallScriptVersion } else { 'v1' }
# True to use global NuGet cache instead of restoring packages to repository-local directory.
[bool]$useGlobalNuGetCache = if (Test-Path variable:useGlobalNuGetCache) { $useGlobalNuGetCache } else { !$ci }
# True to exclude prerelease versions Visual Studio during build
[bool]$excludePrereleaseVS = if (Test-Path variable:excludePrereleaseVS) { $excludePrereleaseVS } else { $false }
# An array of names of processes to stop on script exit if prepareMachine is true.
$processesToStopOnExit = if (Test-Path variable:processesToStopOnExit) { $processesToStopOnExit } else { @('msbuild', 'dotnet', 'vbcscompiler') }
$disableConfigureToolsetImport = if (Test-Path variable:disableConfigureToolsetImport) { $disableConfigureToolsetImport } else { $null }
set-strictmode -version 2.0
$ErrorActionPreference = 'Stop'
[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12
# If specifies, provides an alternate path for getting .NET Core SDKs and Runtimes. This script will still try public sources first.
[string]$runtimeSourceFeed = if (Test-Path variable:runtimeSourceFeed) { $runtimeSourceFeed } else { $null }
# Base-64 encoded SAS token that has permission to storage container described by $runtimeSourceFeed
[string]$runtimeSourceFeedKey = if (Test-Path variable:runtimeSourceFeedKey) { $runtimeSourceFeedKey } else { $null }
# True when the build is running within the VMR.
[bool]$fromVMR = if (Test-Path variable:fromVMR) { $fromVMR } else { $false }
function Create-Directory ([string[]] $path) {
New-Item -Path $path -Force -ItemType 'Directory' | Out-Null
}
function Unzip([string]$zipfile, [string]$outpath) {
Add-Type -AssemblyName System.IO.Compression.FileSystem
[System.IO.Compression.ZipFile]::ExtractToDirectory($zipfile, $outpath)
}
# This will exec a process using the console and return it's exit code.
# This will not throw when the process fails.
# Returns process exit code.
function Exec-Process([string]$command, [string]$commandArgs) {
$startInfo = New-Object System.Diagnostics.ProcessStartInfo
$startInfo.FileName = $command
$startInfo.Arguments = $commandArgs
$startInfo.UseShellExecute = $false
$startInfo.WorkingDirectory = Get-Location
$process = New-Object System.Diagnostics.Process
$process.StartInfo = $startInfo
$process.Start() | Out-Null
$finished = $false
try {
while (-not $process.WaitForExit(100)) {
# Non-blocking loop done to allow ctr-c interrupts
}
$finished = $true
return $global:LASTEXITCODE = $process.ExitCode
}
finally {
# If we didn't finish then an error occurred or the user hit ctrl-c. Either
# way kill the process
if (-not $finished) {
$process.Kill()
}
}
}
# Take the given block, print it, print what the block probably references from the current set of
# variables using low-effort string matching, then run the block.
#
# This is intended to replace the pattern of manually copy-pasting a command, wrapping it in quotes,
# and printing it using "Write-Host". The copy-paste method is more readable in build logs, but less
# maintainable and less reliable. It is easy to make a mistake and modify the command without
# properly updating the "Write-Host" line, resulting in misleading build logs. The probability of
# this mistake makes the pattern hard to trust when it shows up in build logs. Finding the bug in
# existing source code can also be difficult, because the strings are not aligned to each other and
# the line may be 300+ columns long.
#
# By removing the need to maintain two copies of the command, Exec-BlockVerbosely avoids the issues.
#
# In Bash (or any posix-like shell), "set -x" prints usable verbose output automatically.
# "Set-PSDebug" appears to be similar at first glance, but unfortunately, it isn't very useful: it
# doesn't print any info about the variables being used by the command, which is normally the
# interesting part to diagnose.
function Exec-BlockVerbosely([scriptblock] $block) {
Write-Host "--- Running script block:"
$blockString = $block.ToString().Trim()
Write-Host $blockString
Write-Host "--- List of variables that might be used:"
# For each variable x in the environment, check the block for a reference to x via simple "$x" or
# "@x" syntax. This doesn't detect other ways to reference variables ("${x}" nor "$variable:x",
# among others). It only catches what this function was originally written for: simple
# command-line commands.
$variableTable = Get-Variable |
Where-Object {
$blockString.Contains("`$$($_.Name)") -or $blockString.Contains("@$($_.Name)")
} |
Format-Table -AutoSize -HideTableHeaders -Wrap |
Out-String
Write-Host $variableTable.Trim()
Write-Host "--- Executing:"
& $block
Write-Host "--- Done running script block!"
}
# createSdkLocationFile parameter enables a file being generated under the toolset directory
# which writes the sdk's location into. This is only necessary for cmd --> powershell invocations
# as dot sourcing isn't possible.
function InitializeDotNetCli([bool]$install, [bool]$createSdkLocationFile) {
if (Test-Path variable:global:_DotNetInstallDir) {
return $global:_DotNetInstallDir
}
# Don't resolve runtime, shared framework, or SDK from other locations to ensure build determinism
$env:DOTNET_MULTILEVEL_LOOKUP=0
# Disable first run since we do not need all ASP.NET packages restored.
$env:DOTNET_NOLOGO=1
# Disable telemetry on CI.
if ($ci) {
$env:DOTNET_CLI_TELEMETRY_OPTOUT=1
}
# Find the first path on %PATH% that contains the dotnet.exe
if ($useInstalledDotNetCli -and (-not $globalJsonHasRuntimes) -and ($env:DOTNET_INSTALL_DIR -eq $null)) {
$dotnetExecutable = GetExecutableFileName 'dotnet'
$dotnetCmd = Get-Command $dotnetExecutable -ErrorAction SilentlyContinue
if ($dotnetCmd -ne $null) {
$env:DOTNET_INSTALL_DIR = Split-Path $dotnetCmd.Path -Parent
}
}
$dotnetSdkVersion = $GlobalJson.tools.dotnet
# Use dotnet installation specified in DOTNET_INSTALL_DIR if it contains the required SDK version,
# otherwise install the dotnet CLI and SDK to repo local .dotnet directory to avoid potential permission issues.
if ((-not $globalJsonHasRuntimes) -and (-not [string]::IsNullOrEmpty($env:DOTNET_INSTALL_DIR)) -and (Test-Path(Join-Path $env:DOTNET_INSTALL_DIR "sdk\$dotnetSdkVersion"))) {
$dotnetRoot = $env:DOTNET_INSTALL_DIR
} else {
$dotnetRoot = Join-Path $RepoRoot '.dotnet'
if (-not (Test-Path(Join-Path $dotnetRoot "sdk\$dotnetSdkVersion"))) {
if ($install) {
InstallDotNetSdk $dotnetRoot $dotnetSdkVersion
} else {
Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "Unable to find dotnet with SDK version '$dotnetSdkVersion'"
ExitWithExitCode 1
}
}
$env:DOTNET_INSTALL_DIR = $dotnetRoot
}
# Creates a temporary file under the toolset dir.
# The following code block is protecting against concurrent access so that this function can
# be called in parallel.
if ($createSdkLocationFile) {
do {
$sdkCacheFileTemp = Join-Path $ToolsetDir $([System.IO.Path]::GetRandomFileName())
}
until (!(Test-Path $sdkCacheFileTemp))
Set-Content -Path $sdkCacheFileTemp -Value $dotnetRoot
try {
Move-Item -Force $sdkCacheFileTemp (Join-Path $ToolsetDir 'sdk.txt')
} catch {
# Somebody beat us
Remove-Item -Path $sdkCacheFileTemp
}
}
# Add dotnet to PATH. This prevents any bare invocation of dotnet in custom
# build steps from using anything other than what we've downloaded.
# It also ensures that VS msbuild will use the downloaded sdk targets.
$env:PATH = "$dotnetRoot;$env:PATH"
# Make Sure that our bootstrapped dotnet cli is available in future steps of the Azure Pipelines build
Write-PipelinePrependPath -Path $dotnetRoot
Write-PipelineSetVariable -Name 'DOTNET_MULTILEVEL_LOOKUP' -Value '0'
Write-PipelineSetVariable -Name 'DOTNET_NOLOGO' -Value '1'
return $global:_DotNetInstallDir = $dotnetRoot
}
function Retry($downloadBlock, $maxRetries = 5) {
$retries = 1
while($true) {
try {
& $downloadBlock
break
}
catch {
Write-PipelineTelemetryError -Category 'InitializeToolset' -Message $_
}
if (++$retries -le $maxRetries) {
$delayInSeconds = [math]::Pow(2, $retries) - 1 # Exponential backoff
Write-Host "Retrying. Waiting for $delayInSeconds seconds before next attempt ($retries of $maxRetries)."
Start-Sleep -Seconds $delayInSeconds
}
else {
Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "Unable to download file in $maxRetries attempts."
break
}
}
}
function GetDotNetInstallScript([string] $dotnetRoot) {
$installScript = Join-Path $dotnetRoot 'dotnet-install.ps1'
$shouldDownload = $false
if (!(Test-Path $installScript)) {
$shouldDownload = $true
} else {
# Check if the script is older than 30 days
$fileAge = (Get-Date) - (Get-Item $installScript).LastWriteTime
if ($fileAge.Days -gt 30) {
Write-Host "Existing install script is too old, re-downloading..."
$shouldDownload = $true
}
}
if ($shouldDownload) {
Create-Directory $dotnetRoot
$ProgressPreference = 'SilentlyContinue' # Don't display the console progress UI - it's a huge perf hit
$uri = "https://builds.dotnet.microsoft.com/dotnet/scripts/$dotnetInstallScriptVersion/dotnet-install.ps1"
Retry({
Write-Host "GET $uri"
Invoke-WebRequest $uri -UseBasicParsing -OutFile $installScript
})
}
return $installScript
}
function InstallDotNetSdk([string] $dotnetRoot, [string] $version, [string] $architecture = '', [switch] $noPath) {
InstallDotNet $dotnetRoot $version $architecture '' $false $runtimeSourceFeed $runtimeSourceFeedKey -noPath:$noPath
}
function InstallDotNet([string] $dotnetRoot,
[string] $version,
[string] $architecture = '',
[string] $runtime = '',
[bool] $skipNonVersionedFiles = $false,
[string] $runtimeSourceFeed = '',
[string] $runtimeSourceFeedKey = '',
[switch] $noPath) {
$dotnetVersionLabel = "'sdk v$version'"
if ($runtime -ne '' -and $runtime -ne 'sdk') {
$runtimePath = $dotnetRoot
$runtimePath = $runtimePath + "\shared"
if ($runtime -eq "dotnet") { $runtimePath = $runtimePath + "\Microsoft.NETCore.App" }
if ($runtime -eq "aspnetcore") { $runtimePath = $runtimePath + "\Microsoft.AspNetCore.App" }
if ($runtime -eq "windowsdesktop") { $runtimePath = $runtimePath + "\Microsoft.WindowsDesktop.App" }
$runtimePath = $runtimePath + "\" + $version
$dotnetVersionLabel = "runtime toolset '$runtime/$architecture v$version'"
if (Test-Path $runtimePath) {
Write-Host " Runtime toolset '$runtime/$architecture v$version' already installed."
$installSuccess = $true
Exit
}
}
$installScript = GetDotNetInstallScript $dotnetRoot
$installParameters = @{
Version = $version
InstallDir = $dotnetRoot
}
if ($architecture) { $installParameters.Architecture = $architecture }
if ($runtime) { $installParameters.Runtime = $runtime }
if ($skipNonVersionedFiles) { $installParameters.SkipNonVersionedFiles = $skipNonVersionedFiles }
if ($noPath) { $installParameters.NoPath = $True }
$variations = @()
$variations += @($installParameters)
$dotnetBuilds = $installParameters.Clone()
$dotnetbuilds.AzureFeed = "https://ci.dot.net/public"
$variations += @($dotnetBuilds)
if ($runtimeSourceFeed) {
$runtimeSource = $installParameters.Clone()
$runtimeSource.AzureFeed = $runtimeSourceFeed
if ($runtimeSourceFeedKey) {
$decodedBytes = [System.Convert]::FromBase64String($runtimeSourceFeedKey)
$decodedString = [System.Text.Encoding]::UTF8.GetString($decodedBytes)
$runtimeSource.FeedCredential = $decodedString
}
$variations += @($runtimeSource)
}
$installSuccess = $false
foreach ($variation in $variations) {
if ($variation | Get-Member AzureFeed) {
$location = $variation.AzureFeed
} else {
$location = "public location";
}
Write-Host " Attempting to install $dotnetVersionLabel from $location."
try {
& $installScript @variation
$installSuccess = $true
break
}
catch {
Write-Host " Failed to install $dotnetVersionLabel from $location."
}
}
if (-not $installSuccess) {
Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "Failed to install $dotnetVersionLabel from any of the specified locations."
ExitWithExitCode 1
}
}
#
# Locates Visual Studio MSBuild installation.
# The preference order for MSBuild to use is as follows:
#
# 1. MSBuild from an active VS command prompt
# 2. MSBuild from a compatible VS installation
# 3. MSBuild from the xcopy tool package
#
# Returns full path to msbuild.exe.
# Throws on failure.
#
function InitializeVisualStudioMSBuild([bool]$install, [object]$vsRequirements = $null) {
if (-not (IsWindowsPlatform)) {
throw "Cannot initialize Visual Studio on non-Windows"
}
if (Test-Path variable:global:_MSBuildExe) {
return $global:_MSBuildExe
}
# Minimum VS version to require.
$vsMinVersionReqdStr = '17.7'
$vsMinVersionReqd = [Version]::new($vsMinVersionReqdStr)
# If the version of msbuild is going to be xcopied,
# use this version. Version matches a package here:
# https://dev.azure.com/dnceng/public/_artifacts/feed/dotnet-eng/NuGet/Microsoft.DotNet.Arcade.MSBuild.Xcopy/versions/18.0.0
$defaultXCopyMSBuildVersion = '18.0.0'
if (!$vsRequirements) {
if (Get-Member -InputObject $GlobalJson.tools -Name 'vs') {
$vsRequirements = $GlobalJson.tools.vs
}
else {
$vsRequirements = New-Object PSObject -Property @{ version = $vsMinVersionReqdStr }
}
}
$vsMinVersionStr = if ($vsRequirements.version) { $vsRequirements.version } else { $vsMinVersionReqdStr }
$vsMinVersion = [Version]::new($vsMinVersionStr)
# Try msbuild command available in the environment.
if ($env:VSINSTALLDIR -ne $null) {
$msbuildCmd = Get-Command 'msbuild.exe' -ErrorAction SilentlyContinue
if ($msbuildCmd -ne $null) {
# Workaround for https://github.com/dotnet/roslyn/issues/35793
# Due to this issue $msbuildCmd.Version returns 0.0.0.0 for msbuild.exe 16.2+
$msbuildVersion = [Version]::new((Get-Item $msbuildCmd.Path).VersionInfo.ProductVersion.Split([char[]]@('-', '+'))[0])
if ($msbuildVersion -ge $vsMinVersion) {
return $global:_MSBuildExe = $msbuildCmd.Path
}
# Report error - the developer environment is initialized with incompatible VS version.
throw "Developer Command Prompt for VS $($env:VisualStudioVersion) is not recent enough. Please upgrade to $vsMinVersionStr or build from a plain CMD window"
}
}
# Locate Visual Studio installation or download x-copy msbuild.
$vsInfo = LocateVisualStudio $vsRequirements
if ($vsInfo -ne $null -and $env:ForceUseXCopyMSBuild -eq $null) {
# Ensure vsInstallDir has a trailing slash
$vsInstallDir = Join-Path $vsInfo.installationPath "\"
$vsMajorVersion = $vsInfo.installationVersion.Split('.')[0]
InitializeVisualStudioEnvironmentVariables $vsInstallDir $vsMajorVersion
} else {
if (Get-Member -InputObject $GlobalJson.tools -Name 'xcopy-msbuild') {
$xcopyMSBuildVersion = $GlobalJson.tools.'xcopy-msbuild'
$vsMajorVersion = $xcopyMSBuildVersion.Split('.')[0]
} else {
#if vs version provided in global.json is incompatible (too low) then use the default version for xcopy msbuild download
if($vsMinVersion -lt $vsMinVersionReqd){
Write-Host "Using xcopy-msbuild version of $defaultXCopyMSBuildVersion since VS version $vsMinVersionStr provided in global.json is not compatible"
$xcopyMSBuildVersion = $defaultXCopyMSBuildVersion
$vsMajorVersion = $xcopyMSBuildVersion.Split('.')[0]
}
else{
# If the VS version IS compatible, look for an xcopy msbuild package
# with a version matching VS.
# Note: If this version does not exist, then an explicit version of xcopy msbuild
# can be specified in global.json. This will be required for pre-release versions of msbuild.
$vsMajorVersion = $vsMinVersion.Major
$vsMinorVersion = $vsMinVersion.Minor
$xcopyMSBuildVersion = "$vsMajorVersion.$vsMinorVersion.0"
}
}
$vsInstallDir = $null
if ($xcopyMSBuildVersion.Trim() -ine "none") {
$vsInstallDir = InitializeXCopyMSBuild $xcopyMSBuildVersion $install
if ($vsInstallDir -eq $null) {
throw "Could not xcopy msbuild. Please check that package 'Microsoft.DotNet.Arcade.MSBuild.Xcopy @ $xcopyMSBuildVersion' exists on feed 'dotnet-eng'."
}
}
if ($vsInstallDir -eq $null) {
throw 'Unable to find Visual Studio that has required version and components installed'
}
}
$msbuildVersionDir = if ([int]$vsMajorVersion -lt 16) { "$vsMajorVersion.0" } else { "Current" }
$local:BinFolder = Join-Path $vsInstallDir "MSBuild\$msbuildVersionDir\Bin"
$local:Prefer64bit = if (Get-Member -InputObject $vsRequirements -Name 'Prefer64bit') { $vsRequirements.Prefer64bit } else { $false }
if ($local:Prefer64bit -and (Test-Path(Join-Path $local:BinFolder "amd64"))) {
$global:_MSBuildExe = Join-Path $local:BinFolder "amd64\msbuild.exe"
} else {
$global:_MSBuildExe = Join-Path $local:BinFolder "msbuild.exe"
}
return $global:_MSBuildExe
}
function InitializeVisualStudioEnvironmentVariables([string] $vsInstallDir, [string] $vsMajorVersion) {
$env:VSINSTALLDIR = $vsInstallDir
Set-Item "env:VS$($vsMajorVersion)0COMNTOOLS" (Join-Path $vsInstallDir "Common7\Tools\")
$vsSdkInstallDir = Join-Path $vsInstallDir "VSSDK\"
if (Test-Path $vsSdkInstallDir) {
Set-Item "env:VSSDK$($vsMajorVersion)0Install" $vsSdkInstallDir
$env:VSSDKInstall = $vsSdkInstallDir
}
}
function InstallXCopyMSBuild([string]$packageVersion) {
return InitializeXCopyMSBuild $packageVersion -install $true
}
function InitializeXCopyMSBuild([string]$packageVersion, [bool]$install) {
$packageName = 'Microsoft.DotNet.Arcade.MSBuild.Xcopy'
$packageDir = Join-Path $ToolsDir "msbuild\$packageVersion"
$packagePath = Join-Path $packageDir "$packageName.$packageVersion.nupkg"
if (!(Test-Path $packageDir)) {
if (!$install) {
return $null
}
Create-Directory $packageDir
Write-Host "Downloading $packageName $packageVersion"
$ProgressPreference = 'SilentlyContinue' # Don't display the console progress UI - it's a huge perf hit
Retry({
Invoke-WebRequest "https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/flat2/$packageName/$packageVersion/$packageName.$packageVersion.nupkg" -UseBasicParsing -OutFile $packagePath
})
if (!(Test-Path $packagePath)) {
Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "See https://dev.azure.com/dnceng/internal/_wiki/wikis/DNCEng%20Services%20Wiki/1074/Updating-Microsoft.DotNet.Arcade.MSBuild.Xcopy-WAS-RoslynTools.MSBuild-(xcopy-msbuild)-generation?anchor=troubleshooting for help troubleshooting issues with XCopy MSBuild"
throw
}
Unzip $packagePath $packageDir
}
return Join-Path $packageDir 'tools'
}
#
# Locates Visual Studio instance that meets the minimal requirements specified by tools.vs object in global.json.
#
# The following properties of tools.vs are recognized:
# "version": "{major}.{minor}"
# Two part minimal VS version, e.g. "15.9", "16.0", etc.
# "components": ["componentId1", "componentId2", ...]
# Array of ids of workload components that must be available in the VS instance.
# See e.g. https://docs.microsoft.com/en-us/visualstudio/install/workload-component-id-vs-enterprise?view=vs-2017
#
# Returns JSON describing the located VS instance (same format as returned by vswhere),
# or $null if no instance meeting the requirements is found on the machine.
#
function LocateVisualStudio([object]$vsRequirements = $null){
if (-not (IsWindowsPlatform)) {
throw "Cannot run vswhere on non-Windows platforms."
}
if (Get-Member -InputObject $GlobalJson.tools -Name 'vswhere') {
$vswhereVersion = $GlobalJson.tools.vswhere
} else {
# keep this in sync with the VSWhereVersion in DefaultVersions.props
$vswhereVersion = '3.1.7'
}
$vsWhereDir = Join-Path $ToolsDir "vswhere\$vswhereVersion"
$vsWhereExe = Join-Path $vsWhereDir 'vswhere.exe'
if (!(Test-Path $vsWhereExe)) {
Create-Directory $vsWhereDir
Write-Host "Downloading vswhere $vswhereVersion"
$ProgressPreference = 'SilentlyContinue' # Don't display the console progress UI - it's a huge perf hit
Retry({
Invoke-WebRequest "https://netcorenativeassets.blob.core.windows.net/resource-packages/external/windows/vswhere/$vswhereVersion/vswhere.exe" -UseBasicParsing -OutFile $vswhereExe
})
}
if (!$vsRequirements) {
if (Get-Member -InputObject $GlobalJson.tools -Name 'vs' -ErrorAction SilentlyContinue) {
$vsRequirements = $GlobalJson.tools.vs
} else {
$vsRequirements = $null
}
}
$args = @('-latest', '-format', 'json', '-requires', 'Microsoft.Component.MSBuild', '-products', '*')
if (!$excludePrereleaseVS) {
$args += '-prerelease'
}
if ($vsRequirements -and (Get-Member -InputObject $vsRequirements -Name 'version' -ErrorAction SilentlyContinue)) {
$args += '-version'
$args += $vsRequirements.version
}
if ($vsRequirements -and (Get-Member -InputObject $vsRequirements -Name 'components' -ErrorAction SilentlyContinue)) {
foreach ($component in $vsRequirements.components) {
$args += '-requires'
$args += $component
}
}
$vsInfo =& $vsWhereExe $args | ConvertFrom-Json
if ($lastExitCode -ne 0) {
return $null
}
# use first matching instance
return $vsInfo[0]
}
function InitializeBuildTool() {
if (Test-Path variable:global:_BuildTool) {
# If the requested msbuild parameters do not match, clear the cached variables.
if($global:_BuildTool.Contains('ExcludePrereleaseVS') -and $global:_BuildTool.ExcludePrereleaseVS -ne $excludePrereleaseVS) {
Remove-Item variable:global:_BuildTool
Remove-Item variable:global:_MSBuildExe
} else {
return $global:_BuildTool
}
}
if (-not $msbuildEngine) {
$msbuildEngine = GetDefaultMSBuildEngine
}
# Initialize dotnet cli if listed in 'tools'
$dotnetRoot = $null
if (Get-Member -InputObject $GlobalJson.tools -Name 'dotnet') {
$dotnetRoot = InitializeDotNetCli -install:$restore
}
if ($msbuildEngine -eq 'dotnet') {
if (!$dotnetRoot) {
Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "/global.json must specify 'tools.dotnet'."
ExitWithExitCode 1
}
$dotnetPath = Join-Path $dotnetRoot (GetExecutableFileName 'dotnet')
$buildTool = @{ Path = $dotnetPath; Command = 'msbuild'; Tool = 'dotnet'; Framework = 'net' }
} elseif ($msbuildEngine -eq "vs") {
try {
$msbuildPath = InitializeVisualStudioMSBuild -install:$restore
} catch {
Write-PipelineTelemetryError -Category 'InitializeToolset' -Message $_
ExitWithExitCode 1
}
$buildTool = @{ Path = $msbuildPath; Command = ""; Tool = "vs"; Framework = "netframework"; ExcludePrereleaseVS = $excludePrereleaseVS }
} else {
Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "Unexpected value of -msbuildEngine: '$msbuildEngine'."
ExitWithExitCode 1
}
return $global:_BuildTool = $buildTool
}
function GetDefaultMSBuildEngine() {
# Presence of tools.vs indicates the repo needs to build using VS msbuild on Windows.
if (Get-Member -InputObject $GlobalJson.tools -Name 'vs') {
return 'vs'
}
if (Get-Member -InputObject $GlobalJson.tools -Name 'dotnet') {
return 'dotnet'
}
Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "-msbuildEngine must be specified, or /global.json must specify 'tools.dotnet' or 'tools.vs'."
ExitWithExitCode 1
}
function GetNuGetPackageCachePath() {
if ($env:NUGET_PACKAGES -eq $null) {
# Use local cache on CI to ensure deterministic build.
# Avoid using the http cache as workaround for https://github.com/NuGet/Home/issues/3116
# use global cache in dev builds to avoid cost of downloading packages.
# For directory normalization, see also: https://github.com/NuGet/Home/issues/7968
if ($useGlobalNuGetCache) {
$env:NUGET_PACKAGES = Join-Path $env:UserProfile '.nuget\packages\'
} else {
$env:NUGET_PACKAGES = Join-Path $RepoRoot '.packages\'
}
}
return $env:NUGET_PACKAGES
}
# Returns a full path to an Arcade SDK task project file.
function GetSdkTaskProject([string]$taskName) {
return Join-Path (Split-Path (InitializeToolset) -Parent) "SdkTasks\$taskName.proj"
}
function InitializeNativeTools() {
if (-Not (Test-Path variable:DisableNativeToolsetInstalls) -And (Get-Member -InputObject $GlobalJson -Name "native-tools")) {
$nativeArgs= @{}
if ($ci) {
$nativeArgs = @{
InstallDirectory = "$ToolsDir"
}
}
if ($env:NativeToolsOnMachine) {
Write-Host "Variable NativeToolsOnMachine detected, enabling native tool path promotion..."
$nativeArgs += @{ PathPromotion = $true }
}
& "$PSScriptRoot/init-tools-native.ps1" @nativeArgs
}
}
function Read-ArcadeSdkVersion() {
return $GlobalJson.'msbuild-sdks'.'Microsoft.DotNet.Arcade.Sdk'
}
function InitializeToolset() {
# For Unified Build/Source-build support, check whether the environment variable is
# set. If it is, then use this as the toolset build project.
if ($env:_InitializeToolset -ne $null) {
return $global:_InitializeToolset = $env:_InitializeToolset
}
if (Test-Path variable:global:_InitializeToolset) {
return $global:_InitializeToolset
}
$nugetCache = GetNuGetPackageCachePath
$toolsetVersion = Read-ArcadeSdkVersion
$toolsetLocationFile = Join-Path $ToolsetDir "$toolsetVersion.txt"
if (Test-Path $toolsetLocationFile) {
$path = Get-Content $toolsetLocationFile -TotalCount 1
if (Test-Path $path) {
return $global:_InitializeToolset = $path
}
}
if (-not $restore) {
Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "Toolset version $toolsetVersion has not been restored."
ExitWithExitCode 1
}
$buildTool = InitializeBuildTool
$proj = Join-Path $ToolsetDir 'restore.proj'
$bl = if ($binaryLog) { '/bl:' + (Join-Path $LogDir 'ToolsetRestore.binlog') } else { '' }
'' | Set-Content $proj
MSBuild-Core $proj $bl /t:__WriteToolsetLocation /clp:ErrorsOnly`;NoSummary /p:__ToolsetLocationOutputFile=$toolsetLocationFile
$path = Get-Content $toolsetLocationFile -Encoding UTF8 -TotalCount 1
if (!(Test-Path $path)) {
throw "Invalid toolset path: $path"
}
return $global:_InitializeToolset = $path
}
function ExitWithExitCode([int] $exitCode) {
if ($ci -and $prepareMachine) {
Stop-Processes
}
exit $exitCode
}
# Check if $LASTEXITCODE is a nonzero exit code (NZEC). If so, print a Azure Pipeline error for
# diagnostics, then exit the script with the $LASTEXITCODE.
function Exit-IfNZEC([string] $category = "General") {
Write-Host "Exit code $LASTEXITCODE"
if ($LASTEXITCODE -ne 0) {
$message = "Last command failed with exit code $LASTEXITCODE."
Write-PipelineTelemetryError -Force -Category $category -Message $message
ExitWithExitCode $LASTEXITCODE
}
}
function Stop-Processes() {
Write-Host 'Killing running build processes...'
foreach ($processName in $processesToStopOnExit) {
Get-Process -Name $processName -ErrorAction SilentlyContinue | Stop-Process
}
}
#
# Executes msbuild (or 'dotnet msbuild') with arguments passed to the function.
# The arguments are automatically quoted.
# Terminates the script if the build fails.
#
function MSBuild() {
if ($pipelinesLog) {
$buildTool = InitializeBuildTool
if ($ci -and $buildTool.Tool -eq 'dotnet') {
$env:NUGET_PLUGIN_HANDSHAKE_TIMEOUT_IN_SECONDS = 20
$env:NUGET_PLUGIN_REQUEST_TIMEOUT_IN_SECONDS = 20
Write-PipelineSetVariable -Name 'NUGET_PLUGIN_HANDSHAKE_TIMEOUT_IN_SECONDS' -Value '20'
Write-PipelineSetVariable -Name 'NUGET_PLUGIN_REQUEST_TIMEOUT_IN_SECONDS' -Value '20'
}
Enable-Nuget-EnhancedRetry
$toolsetBuildProject = InitializeToolset
$basePath = Split-Path -parent $toolsetBuildProject
$selectedPath = Join-Path $basePath (Join-Path $buildTool.Framework 'Microsoft.DotNet.ArcadeLogging.dll')
if (-not $selectedPath) {
Write-PipelineTelemetryError -Category 'Build' -Message "Unable to find arcade sdk logger assembly: $selectedPath"
ExitWithExitCode 1
}
$args += "/logger:$selectedPath"
}
MSBuild-Core @args
}
#
# Executes msbuild (or 'dotnet msbuild') with arguments passed to the function.
# The arguments are automatically quoted.
# Terminates the script if the build fails.
#
function MSBuild-Core() {
if ($ci) {
if (!$binaryLog -and !$excludeCIBinarylog) {
Write-PipelineTelemetryError -Category 'Build' -Message 'Binary log must be enabled in CI build, or explicitly opted-out from with the -excludeCIBinarylog switch.'
ExitWithExitCode 1
}
if ($nodeReuse) {
Write-PipelineTelemetryError -Category 'Build' -Message 'Node reuse must be disabled in CI build.'
ExitWithExitCode 1
}
}
Enable-Nuget-EnhancedRetry
$buildTool = InitializeBuildTool
$cmdArgs = "$($buildTool.Command) /m /nologo /clp:Summary /v:$verbosity /nr:$nodeReuse /p:ContinuousIntegrationBuild=$ci"
# Add -mt flag for MSBuild multithreaded mode if enabled via environment variable
if ($env:MSBUILD_MT_ENABLED -eq "1") {
$cmdArgs += ' -mt'
}
if ($warnAsError) {
$cmdArgs += ' /warnaserror /p:TreatWarningsAsErrors=true'
}
else {
$cmdArgs += ' /p:TreatWarningsAsErrors=false'
}
foreach ($arg in $args) {
if ($null -ne $arg -and $arg.Trim() -ne "") {
if ($arg.EndsWith('\')) {
$arg = $arg + "\"
}
$cmdArgs += " `"$arg`""
}
}
# Be sure quote the path in case there are spaces in the dotnet installation location.
$env:ARCADE_BUILD_TOOL_COMMAND = "`"$($buildTool.Path)`" $cmdArgs"
$exitCode = Exec-Process $buildTool.Path $cmdArgs
if ($exitCode -ne 0) {
# We should not Write-PipelineTaskError here because that message shows up in the build summary
# The build already logged an error, that's the reason it failed. Producing an error here only adds noise.
Write-Host "Build failed with exit code $exitCode. Check errors above." -ForegroundColor Red
$buildLog = GetMSBuildBinaryLogCommandLineArgument $args
if ($null -ne $buildLog) {
Write-Host "See log: $buildLog" -ForegroundColor DarkGray
}
# When running on Azure Pipelines, override the returned exit code to avoid double logging.
# Skip this when the build is a child of the VMR build.
if ($ci -and $env:SYSTEM_TEAMPROJECT -ne $null -and !$fromVMR) {
Write-PipelineSetResult -Result "Failed" -Message "msbuild execution failed."
# Exiting with an exit code causes the azure pipelines task to log yet another "noise" error
# The above Write-PipelineSetResult will cause the task to be marked as failure without adding yet another error
ExitWithExitCode 0
} else {
ExitWithExitCode $exitCode
}
}
}
function GetMSBuildBinaryLogCommandLineArgument($arguments) {
foreach ($argument in $arguments) {
if ($argument -ne $null) {
$arg = $argument.Trim()
if ($arg.StartsWith('/bl:', "OrdinalIgnoreCase")) {
return $arg.Substring('/bl:'.Length)
}
if ($arg.StartsWith('/binaryLogger:', 'OrdinalIgnoreCase')) {
return $arg.Substring('/binaryLogger:'.Length)
}
}
}
return $null
}
function GetExecutableFileName($baseName) {
if (IsWindowsPlatform) {
return "$baseName.exe"
}
else {
return $baseName
}
}
function IsWindowsPlatform() {
return [environment]::OSVersion.Platform -eq [PlatformID]::Win32NT
}
function Get-Darc($version) {
$darcPath = "$TempDir\darc\$([guid]::NewGuid())"
if ($version -ne $null) {
& $PSScriptRoot\darc-init.ps1 -toolpath $darcPath -darcVersion $version | Out-Host
} else {
& $PSScriptRoot\darc-init.ps1 -toolpath $darcPath | Out-Host
}
return "$darcPath\darc.exe"
}
. $PSScriptRoot\pipeline-logging-functions.ps1
$RepoRoot = Resolve-Path (Join-Path $PSScriptRoot '..\..\')
$EngRoot = Resolve-Path (Join-Path $PSScriptRoot '..')
$ArtifactsDir = Join-Path $RepoRoot 'artifacts'
$ToolsetDir = Join-Path $ArtifactsDir 'toolset'
$ToolsDir = Join-Path $RepoRoot '.tools'
$LogDir = Join-Path (Join-Path $ArtifactsDir 'log') $configuration
$TempDir = Join-Path (Join-Path $ArtifactsDir 'tmp') $configuration
$GlobalJson = Get-Content -Raw -Path (Join-Path $RepoRoot 'global.json') | ConvertFrom-Json
# true if global.json contains a "runtimes" section
$globalJsonHasRuntimes = if ($GlobalJson.tools.PSObject.Properties.Name -Match 'runtimes') { $true } else { $false }
Create-Directory $ToolsetDir
Create-Directory $TempDir
Create-Directory $LogDir
Write-PipelineSetVariable -Name 'Artifacts' -Value $ArtifactsDir
Write-PipelineSetVariable -Name 'Artifacts.Toolset' -Value $ToolsetDir
Write-PipelineSetVariable -Name 'Artifacts.Log' -Value $LogDir
Write-PipelineSetVariable -Name 'TEMP' -Value $TempDir
Write-PipelineSetVariable -Name 'TMP' -Value $TempDir
# Import custom tools configuration, if present in the repo.
# Note: Import in global scope so that the script set top-level variables without qualification.
if (!$disableConfigureToolsetImport) {
$configureToolsetScript = Join-Path $EngRoot 'configure-toolset.ps1'
if (Test-Path $configureToolsetScript) {
. $configureToolsetScript
if ((Test-Path variable:failOnConfigureToolsetError) -And $failOnConfigureToolsetError) {
if ((Test-Path variable:LastExitCode) -And ($LastExitCode -ne 0)) {
Write-PipelineTelemetryError -Category 'Build' -Message 'configure-toolset.ps1 returned a non-zero exit code'
ExitWithExitCode $LastExitCode
}
}
}
}
#
# If $ci flag is set, turn on (and log that we did) special environment variables for improved Nuget client retry logic.
#
function Enable-Nuget-EnhancedRetry() {
if ($ci) {
Write-Host "Setting NUGET enhanced retry environment variables"
$env:NUGET_ENABLE_ENHANCED_HTTP_RETRY = 'true'
$env:NUGET_ENHANCED_MAX_NETWORK_TRY_COUNT = 6
$env:NUGET_ENHANCED_NETWORK_RETRY_DELAY_MILLISECONDS = 1000
$env:NUGET_RETRY_HTTP_429 = 'true'
Write-PipelineSetVariable -Name 'NUGET_ENABLE_ENHANCED_HTTP_RETRY' -Value 'true'
Write-PipelineSetVariable -Name 'NUGET_ENHANCED_MAX_NETWORK_TRY_COUNT' -Value '6'
Write-PipelineSetVariable -Name 'NUGET_ENHANCED_NETWORK_RETRY_DELAY_MILLISECONDS' -Value '1000'
Write-PipelineSetVariable -Name 'NUGET_RETRY_HTTP_429' -Value 'true'
}
}
================================================
FILE: eng/common/tools.sh
================================================
#!/usr/bin/env bash
# Initialize variables if they aren't already defined.
# CI mode - set to true on CI server for PR validation build or official build.
ci=${ci:-false}
# Build mode
source_build=${source_build:-false}
# Set to true to use the pipelines logger which will enable Azure logging output.
# https://github.com/Microsoft/azure-pipelines-tasks/blob/master/docs/authoring/commands.md
# This flag is meant as a temporary opt-opt for the feature while validate it across
# our consumers. It will be deleted in the future.
if [[ "$ci" == true ]]; then
pipelines_log=${pipelines_log:-true}
else
pipelines_log=${pipelines_log:-false}
fi
# Build configuration. Common values include 'Debug' and 'Release', but the repository may use other names.
configuration=${configuration:-'Debug'}
# Set to true to opt out of outputting binary log while running in CI
exclude_ci_binary_log=${exclude_ci_binary_log:-false}
if [[ "$ci" == true && "$exclude_ci_binary_log" == false ]]; then
binary_log_default=true
else
binary_log_default=false
fi
# Set to true to output binary log from msbuild. Note that emitting binary log slows down the build.
binary_log=${binary_log:-$binary_log_default}
# Turns on machine preparation/clean up code that changes the machine state (e.g. kills build processes).
prepare_machine=${prepare_machine:-false}
# True to restore toolsets and dependencies.
restore=${restore:-true}
# Adjusts msbuild verbosity level.
verbosity=${verbosity:-'minimal'}
# Set to true to reuse msbuild nodes. Recommended to not reuse on CI.
if [[ "$ci" == true ]]; then
node_reuse=${node_reuse:-false}
else
node_reuse=${node_reuse:-true}
fi
# Configures warning treatment in msbuild.
warn_as_error=${warn_as_error:-true}
# True to attempt using .NET Core already that meets requirements specified in global.json
# installed on the machine instead of downloading one.
use_installed_dotnet_cli=${use_installed_dotnet_cli:-true}
# Enable repos to use a particular version of the on-line dotnet-install scripts.
# default URL: https://builds.dotnet.microsoft.com/dotnet/scripts/v1/dotnet-install.sh
dotnetInstallScriptVersion=${dotnetInstallScriptVersion:-'v1'}
# True to use global NuGet cache instead of restoring packages to repository-local directory.
# Keep in sync with NuGetPackageroot in Arcade SDK's RepositoryLayout.props.
if [[ "$ci" == true || "$source_build" == true ]]; then
use_global_nuget_cache=${use_global_nuget_cache:-false}
else
use_global_nuget_cache=${use_global_nuget_cache:-true}
fi
# Used when restoring .NET SDK from alternative feeds
runtime_source_feed=${runtime_source_feed:-''}
runtime_source_feed_key=${runtime_source_feed_key:-''}
# True when the build is running within the VMR.
from_vmr=${from_vmr:-false}
# Resolve any symlinks in the given path.
function ResolvePath {
local path=$1
while [[ -h $path ]]; do
local dir="$( cd -P "$( dirname "$path" )" && pwd )"
path="$(readlink "$path")"
# if $path was a relative symlink, we need to resolve it relative to the path where the
# symlink file was located
[[ $path != /* ]] && path="$dir/$path"
done
# return value
_ResolvePath="$path"
}
# ReadVersionFromJson [json key]
function ReadGlobalVersion {
local key=$1
if command -v jq &> /dev/null; then
_ReadGlobalVersion="$(jq -r ".[] | select(has(\"$key\")) | .\"$key\"" "$global_json_file")"
elif [[ "$(cat "$global_json_file")" =~ \"$key\"[[:space:]\:]*\"([^\"]+) ]]; then
_ReadGlobalVersion=${BASH_REMATCH[1]}
fi
if [[ -z "$_ReadGlobalVersion" ]]; then
Write-PipelineTelemetryError -category 'Build' "Error: Cannot find \"$key\" in $global_json_file"
ExitWithExitCode 1
fi
}
function InitializeDotNetCli {
if [[ -n "${_InitializeDotNetCli:-}" ]]; then
return
fi
local install=$1
# Don't resolve runtime, shared framework, or SDK from other locations to ensure build determinism
export DOTNET_MULTILEVEL_LOOKUP=0
# Disable first run since we want to control all package sources
export DOTNET_NOLOGO=1
# Disable telemetry on CI
if [[ $ci == true ]]; then
export DOTNET_CLI_TELEMETRY_OPTOUT=1
fi
# LTTNG is the logging infrastructure used by Core CLR. Need this variable set
# so it doesn't output warnings to the console.
export LTTNG_HOME="$HOME"
# Find the first path on $PATH that contains the dotnet.exe
if [[ "$use_installed_dotnet_cli" == true && $global_json_has_runtimes == false && -z "${DOTNET_INSTALL_DIR:-}" ]]; then
local dotnet_path=`command -v dotnet`
if [[ -n "$dotnet_path" ]]; then
ResolvePath "$dotnet_path"
export DOTNET_INSTALL_DIR=`dirname "$_ResolvePath"`
fi
fi
ReadGlobalVersion "dotnet"
local dotnet_sdk_version=$_ReadGlobalVersion
local dotnet_root=""
# Use dotnet installation specified in DOTNET_INSTALL_DIR if it contains the required SDK version,
# otherwise install the dotnet CLI and SDK to repo local .dotnet directory to avoid potential permission issues.
if [[ $global_json_has_runtimes == false && -n "${DOTNET_INSTALL_DIR:-}" && -d "$DOTNET_INSTALL_DIR/sdk/$dotnet_sdk_version" ]]; then
dotnet_root="$DOTNET_INSTALL_DIR"
else
dotnet_root="${repo_root}.dotnet"
export DOTNET_INSTALL_DIR="$dotnet_root"
if [[ ! -d "$DOTNET_INSTALL_DIR/sdk/$dotnet_sdk_version" ]]; then
if [[ "$install" == true ]]; then
InstallDotNetSdk "$dotnet_root" "$dotnet_sdk_version"
else
Write-PipelineTelemetryError -category 'InitializeToolset' "Unable to find dotnet with SDK version '$dotnet_sdk_version'"
ExitWithExitCode 1
fi
fi
fi
# Add dotnet to PATH. This prevents any bare invocation of dotnet in custom
# build steps from using anything other than what we've downloaded.
Write-PipelinePrependPath -path "$dotnet_root"
Write-PipelineSetVariable -name "DOTNET_MULTILEVEL_LOOKUP" -value "0"
Write-PipelineSetVariable -name "DOTNET_NOLOGO" -value "1"
# return value
_InitializeDotNetCli="$dotnet_root"
}
function InstallDotNetSdk {
local root=$1
local version=$2
local architecture="unset"
if [[ $# -ge 3 ]]; then
architecture=$3
fi
InstallDotNet "$root" "$version" $architecture 'sdk' 'true' $runtime_source_feed $runtime_source_feed_key
}
function InstallDotNet {
local root=$1
local version=$2
local runtime=$4
local dotnetVersionLabel="'$runtime v$version'"
if [[ -n "${4:-}" ]] && [ "$4" != 'sdk' ]; then
runtimePath="$root"
runtimePath="$runtimePath/shared"
case "$runtime" in
dotnet)
runtimePath="$runtimePath/Microsoft.NETCore.App"
;;
aspnetcore)
runtimePath="$runtimePath/Microsoft.AspNetCore.App"
;;
windowsdesktop)
runtimePath="$runtimePath/Microsoft.WindowsDesktop.App"
;;
*)
;;
esac
runtimePath="$runtimePath/$version"
dotnetVersionLabel="runtime toolset '$runtime/$architecture v$version'"
if [ -d "$runtimePath" ]; then
echo " Runtime toolset '$runtime/$architecture v$version' already installed."
local installSuccess=1
return
fi
fi
GetDotNetInstallScript "$root"
local install_script=$_GetDotNetInstallScript
local installParameters=(--version $version --install-dir "$root")
if [[ -n "${3:-}" ]] && [ "$3" != 'unset' ]; then
installParameters+=(--architecture $3)
fi
if [[ -n "${4:-}" ]] && [ "$4" != 'sdk' ]; then
installParameters+=(--runtime $4)
fi
if [[ "$#" -ge "5" ]] && [[ "$5" != 'false' ]]; then
installParameters+=(--skip-non-versioned-files)
fi
local variations=() # list of variable names with parameter arrays in them
local public_location=("${installParameters[@]}")
variations+=(public_location)
local dotnetbuilds=("${installParameters[@]}" --azure-feed "https://ci.dot.net/public")
variations+=(dotnetbuilds)
if [[ -n "${6:-}" ]]; then
variations+=(private_feed)
local private_feed=("${installParameters[@]}" --azure-feed $6)
if [[ -n "${7:-}" ]]; then
# The 'base64' binary on alpine uses '-d' and doesn't support '--decode'
# '-d'. To work around this, do a simple detection and switch the parameter
# accordingly.
decodeArg="--decode"
if base64 --help 2>&1 | grep -q "BusyBox"; then
decodeArg="-d"
fi
decodedFeedKey=`echo $7 | base64 $decodeArg`
private_feed+=(--feed-credential $decodedFeedKey)
fi
fi
local installSuccess=0
for variationName in "${variations[@]}"; do
local name="$variationName[@]"
local variation=("${!name}")
echo " Attempting to install $dotnetVersionLabel from $variationName."
bash "$install_script" "${variation[@]}" && installSuccess=1
if [[ "$installSuccess" -eq 1 ]]; then
break
fi
echo " Failed to install $dotnetVersionLabel from $variationName."
done
if [[ "$installSuccess" -eq 0 ]]; then
Write-PipelineTelemetryError -category 'InitializeToolset' "Failed to install $dotnetVersionLabel from any of the specified locations."
ExitWithExitCode 1
fi
}
function with_retries {
local maxRetries=5
local retries=1
echo "Trying to run '$@' for maximum of $maxRetries attempts."
while [[ $((retries++)) -le $maxRetries ]]; do
"$@"
if [[ $? == 0 ]]; then
echo "Ran '$@' successfully."
return 0
fi
timeout=$((3**$retries-1))
echo "Failed to execute '$@'. Waiting $timeout seconds before next attempt ($retries out of $maxRetries)." 1>&2
sleep $timeout
done
echo "Failed to execute '$@' for $maxRetries times." 1>&2
return 1
}
function GetDotNetInstallScript {
local root=$1
local install_script="$root/dotnet-install.sh"
local install_script_url="https://builds.dotnet.microsoft.com/dotnet/scripts/$dotnetInstallScriptVersion/dotnet-install.sh"
local timestamp_file="$root/.dotnet-install.timestamp"
local should_download=false
if [[ ! -a "$install_script" ]]; then
should_download=true
elif [[ -f "$timestamp_file" ]]; then
# Check if the script is older than 30 days using timestamp file
local download_time=$(cat "$timestamp_file" 2>/dev/null || echo "0")
local current_time=$(date +%s)
local age_seconds=$((current_time - download_time))
# 30 days = 30 * 24 * 60 * 60 = 2592000 seconds
if [[ $age_seconds -gt 2592000 ]]; then
echo "Existing install script is too old, re-downloading..."
should_download=true
fi
else
# No timestamp file exists, assume script is old and re-download
echo "No timestamp found for existing install script, re-downloading..."
should_download=true
fi
if [[ "$should_download" == true ]]; then
mkdir -p "$root"
echo "Downloading '$install_script_url'"
# Use curl if available, otherwise use wget
if command -v curl > /dev/null; then
# first, try directly, if this fails we will retry with verbose logging
curl "$install_script_url" -sSL --retry 10 --create-dirs -o "$install_script" || {
if command -v openssl &> /dev/null; then
echo "Curl failed; dumping some information about dotnet.microsoft.com for later investigation"
echo | openssl s_client -showcerts -servername dotnet.microsoft.com -connect dotnet.microsoft.com:443 || true
fi
echo "Will now retry the same URL with verbose logging."
with_retries curl "$install_script_url" -sSL --verbose --retry 10 --create-dirs -o "$install_script" || {
local exit_code=$?
Write-PipelineTelemetryError -category 'InitializeToolset' "Failed to acquire dotnet install script (exit code '$exit_code')."
ExitWithExitCode $exit_code
}
}
else
with_retries wget -v -O "$install_script" "$install_script_url" || {
local exit_code=$?
Write-PipelineTelemetryError -category 'InitializeToolset' "Failed to acquire dotnet install script (exit code '$exit_code')."
ExitWithExitCode $exit_code
}
fi
# Create timestamp file to track download time in seconds from epoch
date +%s > "$timestamp_file"
fi
# return value
_GetDotNetInstallScript="$install_script"
}
function InitializeBuildTool {
if [[ -n "${_InitializeBuildTool:-}" ]]; then
return
fi
InitializeDotNetCli $restore
# return values
_InitializeBuildTool="$_InitializeDotNetCli/dotnet"
_InitializeBuildToolCommand="msbuild"
}
function GetNuGetPackageCachePath {
if [[ -z ${NUGET_PACKAGES:-} ]]; then
if [[ "$use_global_nuget_cache" == true ]]; then
export NUGET_PACKAGES="$HOME/.nuget/packages/"
else
export NUGET_PACKAGES="$repo_root/.packages/"
fi
fi
# return value
_GetNuGetPackageCachePath=$NUGET_PACKAGES
}
function InitializeNativeTools() {
if [[ -n "${DisableNativeToolsetInstalls:-}" ]]; then
return
fi
if grep -Fq "native-tools" $global_json_file
then
local nativeArgs=""
if [[ "$ci" == true ]]; then
nativeArgs="--installDirectory $tools_dir"
fi
"$_script_dir/init-tools-native.sh" $nativeArgs
fi
}
function InitializeToolset {
if [[ -n "${_InitializeToolset:-}" ]]; then
return
fi
GetNuGetPackageCachePath
ReadGlobalVersion "Microsoft.DotNet.Arcade.Sdk"
local toolset_version=$_ReadGlobalVersion
local toolset_location_file="$toolset_dir/$toolset_version.txt"
if [[ -a "$toolset_location_file" ]]; then
local path=`cat "$toolset_location_file"`
if [[ -a "$path" ]]; then
# return value
_InitializeToolset="$path"
return
fi
fi
if [[ "$restore" != true ]]; then
Write-PipelineTelemetryError -category 'InitializeToolset' "Toolset version $toolset_version has not been restored."
ExitWithExitCode 2
fi
local proj="$toolset_dir/restore.proj"
local bl=""
if [[ "$binary_log" == true ]]; then
bl="/bl:$log_dir/ToolsetRestore.binlog"
fi
echo '' > "$proj"
MSBuild-Core "$proj" $bl /t:__WriteToolsetLocation /clp:ErrorsOnly\;NoSummary /p:__ToolsetLocationOutputFile="$toolset_location_file"
local toolset_build_proj=`cat "$toolset_location_file"`
if [[ ! -a "$toolset_build_proj" ]]; then
Write-PipelineTelemetryError -category 'Build' "Invalid toolset path: $toolset_build_proj"
ExitWithExitCode 3
fi
# return value
_InitializeToolset="$toolset_build_proj"
}
function ExitWithExitCode {
if [[ "$ci" == true && "$prepare_machine" == true ]]; then
StopProcesses
fi
exit $1
}
function StopProcesses {
echo "Killing running build processes..."
pkill -9 "dotnet" || true
pkill -9 "vbcscompiler" || true
return 0
}
function MSBuild {
local args=( "$@" )
if [[ "$pipelines_log" == true ]]; then
InitializeBuildTool
InitializeToolset
if [[ "$ci" == true ]]; then
export NUGET_PLUGIN_HANDSHAKE_TIMEOUT_IN_SECONDS=20
export NUGET_PLUGIN_REQUEST_TIMEOUT_IN_SECONDS=20
Write-PipelineSetVariable -name "NUGET_PLUGIN_HANDSHAKE_TIMEOUT_IN_SECONDS" -value "20"
Write-PipelineSetVariable -name "NUGET_PLUGIN_REQUEST_TIMEOUT_IN_SECONDS" -value "20"
fi
local toolset_dir="${_InitializeToolset%/*}"
local selectedPath="$toolset_dir/net/Microsoft.DotNet.ArcadeLogging.dll"
if [[ -z "$selectedPath" ]]; then
Write-PipelineTelemetryError -category 'Build' "Unable to find arcade sdk logger assembly: $selectedPath"
ExitWithExitCode 1
fi
args+=( "-logger:$selectedPath" )
fi
MSBuild-Core "${args[@]}"
}
function MSBuild-Core {
if [[ "$ci" == true ]]; then
if [[ "$binary_log" != true && "$exclude_ci_binary_log" != true ]]; then
Write-PipelineTelemetryError -category 'Build' "Binary log must be enabled in CI build, or explicitly opted-out from with the -noBinaryLog switch."
ExitWithExitCode 1
fi
if [[ "$node_reuse" == true ]]; then
Write-PipelineTelemetryError -category 'Build' "Node reuse must be disabled in CI build."
ExitWithExitCode 1
fi
fi
InitializeBuildTool
local warnaserror_switch=""
if [[ $warn_as_error == true ]]; then
warnaserror_switch="/warnaserror"
fi
function RunBuildTool {
export ARCADE_BUILD_TOOL_COMMAND="$_InitializeBuildTool $@"
"$_InitializeBuildTool" "$@" || {
local exit_code=$?
# We should not Write-PipelineTaskError here because that message shows up in the build summary
# The build already logged an error, that's the reason it failed. Producing an error here only adds noise.
echo "Build failed with exit code $exit_code. Check errors above."
# When running on Azure Pipelines, override the returned exit code to avoid double logging.
# Skip this when the build is a child of the VMR build.
if [[ "$ci" == true && -n ${SYSTEM_TEAMPROJECT:-} && "$from_vmr" != true ]]; then
Write-PipelineSetResult -result "Failed" -message "msbuild execution failed."
# Exiting with an exit code causes the azure pipelines task to log yet another "noise" error
# The above Write-PipelineSetResult will cause the task to be marked as failure without adding yet another error
ExitWithExitCode 0
else
ExitWithExitCode $exit_code
fi
}
}
# Add -mt flag for MSBuild multithreaded mode if enabled via environment variable
local mt_switch=""
if [[ "${MSBUILD_MT_ENABLED:-}" == "1" ]]; then
mt_switch="-mt"
fi
RunBuildTool "$_InitializeBuildToolCommand" /m /nologo /clp:Summary /v:$verbosity /nr:$node_reuse $warnaserror_switch $mt_switch /p:TreatWarningsAsErrors=$warn_as_error /p:ContinuousIntegrationBuild=$ci "$@"
}
function GetDarc {
darc_path="$temp_dir/darc"
version="$1"
if [[ -n "$version" ]]; then
version="--darcversion $version"
fi
"$eng_root/common/darc-init.sh" --toolpath "$darc_path" $version
darc_tool="$darc_path/darc"
}
# Returns a full path to an Arcade SDK task project file.
function GetSdkTaskProject {
taskName=$1
echo "$(dirname $_InitializeToolset)/SdkTasks/$taskName.proj"
}
ResolvePath "${BASH_SOURCE[0]}"
_script_dir=`dirname "$_ResolvePath"`
. "$_script_dir/pipeline-logging-functions.sh"
eng_root=`cd -P "$_script_dir/.." && pwd`
repo_root=`cd -P "$_script_dir/../.." && pwd`
repo_root="${repo_root}/"
artifacts_dir="${repo_root}artifacts"
toolset_dir="$artifacts_dir/toolset"
tools_dir="${repo_root}.tools"
log_dir="$artifacts_dir/log/$configuration"
temp_dir="$artifacts_dir/tmp/$configuration"
global_json_file="${repo_root}global.json"
# determine if global.json contains a "runtimes" entry
global_json_has_runtimes=false
if command -v jq &> /dev/null; then
if jq -e '.tools | has("runtimes")' "$global_json_file" &> /dev/null; then
global_json_has_runtimes=true
fi
elif [[ "$(cat "$global_json_file")" =~ \"runtimes\"[[:space:]\:]*\{ ]]; then
global_json_has_runtimes=true
fi
# HOME may not be defined in some scenarios, but it is required by NuGet
if [[ -z $HOME ]]; then
export HOME="${repo_root}artifacts/.home/"
mkdir -p "$HOME"
fi
mkdir -p "$toolset_dir"
mkdir -p "$temp_dir"
mkdir -p "$log_dir"
Write-PipelineSetVariable -name "Artifacts" -value "$artifacts_dir"
Write-PipelineSetVariable -name "Artifacts.Toolset" -value "$toolset_dir"
Write-PipelineSetVariable -name "Artifacts.Log" -value "$log_dir"
Write-PipelineSetVariable -name "Temp" -value "$temp_dir"
Write-PipelineSetVariable -name "TMP" -value "$temp_dir"
# Import custom tools configuration, if present in the repo.
if [ -z "${disable_configure_toolset_import:-}" ]; then
configure_toolset_script="$eng_root/configure-toolset.sh"
if [[ -a "$configure_toolset_script" ]]; then
. "$configure_toolset_script"
fi
fi
# TODO: https://github.com/dotnet/arcade/issues/1468
# Temporary workaround to avoid breaking change.
# Remove once repos are updated.
if [[ -n "${useInstalledDotNetCli:-}" ]]; then
use_installed_dotnet_cli="$useInstalledDotNetCli"
fi
================================================
FILE: eng/common/vmr-sync.ps1
================================================
<#
.SYNOPSIS
This script is used for synchronizing the current repository into a local VMR.
It pulls the current repository's code into the specified VMR directory for local testing or
Source-Build validation.
.DESCRIPTION
The tooling used for synchronization will clone the VMR repository into a temporary folder if
it does not already exist. These clones can be reused in future synchronizations, so it is
recommended to dedicate a folder for this to speed up re-runs.
.EXAMPLE
Synchronize current repository into a local VMR:
./vmr-sync.ps1 -vmrDir "$HOME/repos/dotnet" -tmpDir "$HOME/repos/tmp"
.PARAMETER tmpDir
Required. Path to the temporary folder where repositories will be cloned
.PARAMETER vmrBranch
Optional. Branch of the 'dotnet/dotnet' repo to synchronize. The VMR will be checked out to this branch
.PARAMETER azdevPat
Optional. Azure DevOps PAT to use for cloning private repositories.
.PARAMETER vmrDir
Optional. Path to the dotnet/dotnet repository. When null, gets cloned to the temporary folder
.PARAMETER debugOutput
Optional. Enables debug logging in the darc vmr command.
.PARAMETER ci
Optional. Denotes that the script is running in a CI environment.
#>
param (
[Parameter(Mandatory=$true, HelpMessage="Path to the temporary folder where repositories will be cloned")]
[string][Alias('t', 'tmp')]$tmpDir,
[string][Alias('b', 'branch')]$vmrBranch,
[string]$remote,
[string]$azdevPat,
[string][Alias('v', 'vmr')]$vmrDir,
[switch]$ci,
[switch]$debugOutput
)
function Fail {
Write-Host "> $($args[0])" -ForegroundColor 'Red'
}
function Highlight {
Write-Host "> $($args[0])" -ForegroundColor 'Cyan'
}
$verbosity = 'verbose'
if ($debugOutput) {
$verbosity = 'debug'
}
# Validation
if (-not $tmpDir) {
Fail "Missing -tmpDir argument. Please specify the path to the temporary folder where the repositories will be cloned"
exit 1
}
# Sanitize the input
if (-not $vmrDir) {
$vmrDir = Join-Path $tmpDir 'dotnet'
}
if (-not (Test-Path -Path $tmpDir -PathType Container)) {
New-Item -ItemType Directory -Path $tmpDir | Out-Null
}
# Prepare the VMR
if (-not (Test-Path -Path $vmrDir -PathType Container)) {
Highlight "Cloning 'dotnet/dotnet' into $vmrDir.."
git clone https://github.com/dotnet/dotnet $vmrDir
if ($vmrBranch) {
git -C $vmrDir switch -c $vmrBranch
}
}
else {
if ((git -C $vmrDir diff --quiet) -eq $false) {
Fail "There are changes in the working tree of $vmrDir. Please commit or stash your changes"
exit 1
}
if ($vmrBranch) {
Highlight "Preparing $vmrDir"
git -C $vmrDir checkout $vmrBranch
git -C $vmrDir pull
}
}
Set-StrictMode -Version Latest
# Prepare darc
Highlight 'Installing .NET, preparing the tooling..'
. .\eng\common\tools.ps1
$dotnetRoot = InitializeDotNetCli -install:$true
$env:DOTNET_ROOT = $dotnetRoot
$darc = Get-Darc
Highlight "Starting the synchronization of VMR.."
# Synchronize the VMR
$versionDetailsPath = Resolve-Path (Join-Path $PSScriptRoot '..\Version.Details.xml') | Select-Object -ExpandProperty Path
[xml]$versionDetails = Get-Content -Path $versionDetailsPath
$repoName = $versionDetails.SelectSingleNode('//Source').Mapping
if (-not $repoName) {
Fail "Failed to resolve repo mapping from $versionDetailsPath"
exit 1
}
$darcArgs = (
"vmr", "forwardflow",
"--tmp", $tmpDir,
"--$verbosity",
$vmrDir
)
if ($ci) {
$darcArgs += ("--ci")
}
if ($azdevPat) {
$darcArgs += ("--azdev-pat", $azdevPat)
}
& "$darc" $darcArgs
if ($LASTEXITCODE -eq 0) {
Highlight "Synchronization succeeded"
}
else {
Highlight "Failed to flow code into the local VMR. Falling back to resetting the VMR to match repo contents..."
git -C $vmrDir reset --hard
$resetArgs = (
"vmr", "reset",
"${repoName}:HEAD",
"--vmr", $vmrDir,
"--tmp", $tmpDir,
"--additional-remotes", "${repoName}:${repoRoot}"
)
& "$darc" $resetArgs
if ($LASTEXITCODE -eq 0) {
Highlight "Successfully reset the VMR using 'darc vmr reset'"
}
else {
Fail "Synchronization of repo to VMR failed!"
Fail "'$vmrDir' is left in its last state (re-run of this script will reset it)."
Fail "Please inspect the logs which contain path to the failing patch file (use -debugOutput to get all the details)."
Fail "Once you make changes to the conflicting VMR patch, commit it locally and re-run this script."
exit 1
}
}
================================================
FILE: eng/common/vmr-sync.sh
================================================
#!/bin/bash
### This script is used for synchronizing the current repository into a local VMR.
### It pulls the current repository's code into the specified VMR directory for local testing or
### Source-Build validation.
###
### The tooling used for synchronization will clone the VMR repository into a temporary folder if
### it does not already exist. These clones can be reused in future synchronizations, so it is
### recommended to dedicate a folder for this to speed up re-runs.
###
### USAGE:
### Synchronize current repository into a local VMR:
### ./vmr-sync.sh --tmp "$HOME/repos/tmp" "$HOME/repos/dotnet"
###
### Options:
### -t, --tmp, --tmp-dir PATH
### Required. Path to the temporary folder where repositories will be cloned
###
### -b, --branch, --vmr-branch BRANCH_NAME
### Optional. Branch of the 'dotnet/dotnet' repo to synchronize. The VMR will be checked out to this branch
###
### --debug
### Optional. Turns on the most verbose logging for the VMR tooling
###
### --remote name:URI
### Optional. Additional remote to use during the synchronization
### This can be used to synchronize to a commit from a fork of the repository
### Example: 'runtime:https://github.com/yourfork/runtime'
###
### --azdev-pat
### Optional. Azure DevOps PAT to use for cloning private repositories.
###
### -v, --vmr, --vmr-dir PATH
### Optional. Path to the dotnet/dotnet repository. When null, gets cloned to the temporary folder
source="${BASH_SOURCE[0]}"
# resolve $source until the file is no longer a symlink
while [[ -h "$source" ]]; do
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
source="$(readlink "$source")"
# if $source was a relative symlink, we need to resolve it relative to the path where the
# symlink file was located
[[ $source != /* ]] && source="$scriptroot/$source"
done
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
function print_help () {
sed -n '/^### /,/^$/p' "$source" | cut -b 5-
}
COLOR_RED=$(tput setaf 1 2>/dev/null || true)
COLOR_CYAN=$(tput setaf 6 2>/dev/null || true)
COLOR_CLEAR=$(tput sgr0 2>/dev/null || true)
COLOR_RESET=uniquesearchablestring
FAILURE_PREFIX='> '
function fail () {
echo "${COLOR_RED}$FAILURE_PREFIX${1//${COLOR_RESET}/${COLOR_RED}}${COLOR_CLEAR}" >&2
}
function highlight () {
echo "${COLOR_CYAN}$FAILURE_PREFIX${1//${COLOR_RESET}/${COLOR_CYAN}}${COLOR_CLEAR}"
}
tmp_dir=''
vmr_dir=''
vmr_branch=''
additional_remotes=''
verbosity=verbose
azdev_pat=''
ci=false
while [[ $# -gt 0 ]]; do
opt="$(echo "$1" | tr "[:upper:]" "[:lower:]")"
case "$opt" in
-t|--tmp|--tmp-dir)
tmp_dir=$2
shift
;;
-v|--vmr|--vmr-dir)
vmr_dir=$2
shift
;;
-b|--branch|--vmr-branch)
vmr_branch=$2
shift
;;
--remote)
additional_remotes="$additional_remotes $2"
shift
;;
--azdev-pat)
azdev_pat=$2
shift
;;
--ci)
ci=true
;;
-d|--debug)
verbosity=debug
;;
-h|--help)
print_help
exit 0
;;
*)
fail "Invalid argument: $1"
print_help
exit 1
;;
esac
shift
done
# Validation
if [[ -z "$tmp_dir" ]]; then
fail "Missing --tmp-dir argument. Please specify the path to the temporary folder where the repositories will be cloned"
exit 1
fi
# Sanitize the input
if [[ -z "$vmr_dir" ]]; then
vmr_dir="$tmp_dir/dotnet"
fi
if [[ ! -d "$tmp_dir" ]]; then
mkdir -p "$tmp_dir"
fi
if [[ "$verbosity" == "debug" ]]; then
set -x
fi
# Prepare the VMR
if [[ ! -d "$vmr_dir" ]]; then
highlight "Cloning 'dotnet/dotnet' into $vmr_dir.."
git clone https://github.com/dotnet/dotnet "$vmr_dir"
if [[ -n "$vmr_branch" ]]; then
git -C "$vmr_dir" switch -c "$vmr_branch"
fi
else
if ! git -C "$vmr_dir" diff --quiet; then
fail "There are changes in the working tree of $vmr_dir. Please commit or stash your changes"
exit 1
fi
if [[ -n "$vmr_branch" ]]; then
highlight "Preparing $vmr_dir"
git -C "$vmr_dir" checkout "$vmr_branch"
git -C "$vmr_dir" pull
fi
fi
set -e
# Prepare darc
highlight 'Installing .NET, preparing the tooling..'
source "./eng/common/tools.sh"
InitializeDotNetCli true
GetDarc
dotnetDir=$( cd ./.dotnet/; pwd -P )
dotnet=$dotnetDir/dotnet
highlight "Starting the synchronization of VMR.."
set +e
if [[ -n "$additional_remotes" ]]; then
additional_remotes="--additional-remotes $additional_remotes"
fi
if [[ -n "$azdev_pat" ]]; then
azdev_pat="--azdev-pat $azdev_pat"
fi
ci_arg=''
if [[ "$ci" == "true" ]]; then
ci_arg="--ci"
fi
# Synchronize the VMR
version_details_path=$(cd "$scriptroot/.."; pwd -P)/Version.Details.xml
repo_name=$(grep -m 1 ' EndpointOption { get; }
internal Option AccountOption { get; }
internal Option CertificateProfileOption { get; }
internal AzureCredentialOptions AzureCredentialOptions { get; } = new();
internal Argument?> FilesArgument { get; }
internal ArtifactSigningCommand(CodeCommand codeCommand, IServiceProviderFactory serviceProviderFactory)
: base("artifact-signing", ArtifactSigningResources.CommandDescription)
{
ArgumentNullException.ThrowIfNull(codeCommand, nameof(codeCommand));
ArgumentNullException.ThrowIfNull(serviceProviderFactory, nameof(serviceProviderFactory));
EndpointOption = new Option("--artifact-signing-endpoint", "-ase")
{
CustomParser = CodeCommand.ParseHttpsUrl,
Description = ArtifactSigningResources.EndpointOptionDescription,
Required = true
};
AccountOption = new Option("--artifact-signing-account", "-asa")
{
Description = ArtifactSigningResources.AccountOptionDescription,
Required = true
};
CertificateProfileOption = new Option("--artifact-signing-certificate-profile", "-ascp")
{
Description = ArtifactSigningResources.CertificateProfileOptionDescription,
Required = true
};
FilesArgument = new Argument?>("file(s)")
{
Description = Resources.FilesArgumentDescription,
Arity = ArgumentArity.OneOrMore
};
Options.Add(EndpointOption);
Options.Add(AccountOption);
Options.Add(CertificateProfileOption);
AzureCredentialOptions.AddOptionsToCommand(this);
Arguments.Add(FilesArgument);
SetAction((ParseResult parseResult, CancellationToken cancellationToken) =>
{
List? filesArgument = parseResult.GetValue(FilesArgument);
if (filesArgument is not { Count: > 0 })
{
Console.Error.WriteLine(Resources.MissingFileValue);
return Task.FromResult(ExitCode.InvalidOptions);
}
TokenCredential? credential = AzureCredentialOptions.CreateTokenCredential(parseResult);
if (credential is null)
{
return Task.FromResult(ExitCode.Failed);
}
// Some of the options are required and that is why we can safely use
// the null-forgiving operator (!) to simplify the code.
Uri endpointUrl = parseResult.GetValue(EndpointOption)!;
string accountName = parseResult.GetValue(AccountOption)!;
string certificateProfileName = parseResult.GetValue(CertificateProfileOption)!;
serviceProviderFactory.AddServices(services =>
{
services.AddAzureClients(builder =>
{
builder.AddCertificateProfileClient(endpointUrl);
builder.UseCredential(credential);
builder.ConfigureDefaults(options => options.Retry.Mode = RetryMode.Exponential);
});
services.AddSingleton(serviceProvider =>
{
return new ArtifactSigningService(
serviceProvider.GetRequiredService(),
accountName,
certificateProfileName,
serviceProvider.GetRequiredService>());
});
});
ArtifactSigningServiceProvider trustedSigningServiceProvider = new();
return codeCommand.HandleAsync(parseResult, serviceProviderFactory, trustedSigningServiceProvider, filesArgument);
});
}
}
}
================================================
FILE: src/Sign.Cli/ArtifactSigningResources.Designer.cs
================================================
//------------------------------------------------------------------------------
//
// This code was generated by a tool.
// Runtime Version:4.0.30319.42000
//
// Changes to this file may cause incorrect behavior and will be lost if
// the code is regenerated.
//
//------------------------------------------------------------------------------
namespace Sign.Cli {
using System;
///
/// A strongly-typed resource class, for looking up localized strings, etc.
///
// This class was auto-generated by the StronglyTypedResourceBuilder
// class via a tool like ResGen or Visual Studio.
// To add or remove a member, edit your .ResX file then rerun ResGen
// with the /str option, or rebuild your VS project.
[global::System.CodeDom.Compiler.GeneratedCodeAttribute("System.Resources.Tools.StronglyTypedResourceBuilder", "17.0.0.0")]
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
[global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
internal class ArtifactSigningResources {
private static global::System.Resources.ResourceManager resourceMan;
private static global::System.Globalization.CultureInfo resourceCulture;
[global::System.Diagnostics.CodeAnalysis.SuppressMessageAttribute("Microsoft.Performance", "CA1811:AvoidUncalledPrivateCode")]
internal ArtifactSigningResources() {
}
///
/// Returns the cached ResourceManager instance used by this class.
///
[global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)]
internal static global::System.Resources.ResourceManager ResourceManager {
get {
if (object.ReferenceEquals(resourceMan, null)) {
global::System.Resources.ResourceManager temp = new global::System.Resources.ResourceManager("Sign.Cli.ArtifactSigningResources", typeof(ArtifactSigningResources).Assembly);
resourceMan = temp;
}
return resourceMan;
}
}
///
/// Overrides the current thread's CurrentUICulture property for all
/// resource lookups using this strongly typed resource class.
///
[global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)]
internal static global::System.Globalization.CultureInfo Culture {
get {
return resourceCulture;
}
set {
resourceCulture = value;
}
}
///
/// Looks up a localized string similar to The Artifact Signing Account name..
///
internal static string AccountOptionDescription {
get {
return ResourceManager.GetString("AccountOptionDescription", resourceCulture);
}
}
///
/// Looks up a localized string similar to The Certificate Profile name..
///
internal static string CertificateProfileOptionDescription {
get {
return ResourceManager.GetString("CertificateProfileOptionDescription", resourceCulture);
}
}
///
/// Looks up a localized string similar to Use Artifact Signing..
///
internal static string CommandDescription {
get {
return ResourceManager.GetString("CommandDescription", resourceCulture);
}
}
///
/// Looks up a localized string similar to The Artifact Signing Account endpoint. The value must be a URI that aligns to the region that your Artifact Signing Account and Certificate Profile were created in..
///
internal static string EndpointOptionDescription {
get {
return ResourceManager.GetString("EndpointOptionDescription", resourceCulture);
}
}
}
}
================================================
FILE: src/Sign.Cli/ArtifactSigningResources.resx
================================================
text/microsoft-resx2.0System.Resources.ResXResourceReader, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089System.Resources.ResXResourceWriter, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089The Artifact Signing Account name.The Certificate Profile name.Use Artifact Signing.The Artifact Signing Account endpoint. The value must be a URI that aligns to the region that your Artifact Signing Account and Certificate Profile were created in.
================================================
FILE: src/Sign.Cli/AzureCredentialOptions.cs
================================================
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE.txt file in the project root for more information.
using System.CommandLine;
using Azure.Core;
using Azure.Identity;
namespace Sign.Cli
{
internal sealed class AzureCredentialOptions
{
internal Option CredentialTypeOption { get; }
internal Option ManagedIdentityClientIdOption { get; }
internal Option ManagedIdentityResourceIdOption { get; }
internal Option ObsoleteManagedIdentityOption { get; }
internal Option ObsoleteTenantIdOption { get; }
internal Option ObsoleteClientIdOption { get; }
internal Option ObsoleteClientSecretOption { get; }
internal AzureCredentialOptions()
{
CredentialTypeOption = new Option("--azure-credential-type", "-act")
{
Description = Resources.CredentialTypeOptionDescription
};
CredentialTypeOption.AcceptOnlyFromAmong(
AzureCredentialType.AzureCli,
AzureCredentialType.AzurePowerShell,
AzureCredentialType.ManagedIdentity,
AzureCredentialType.WorkloadIdentity);
ManagedIdentityClientIdOption = new Option("--managed-identity-client-id", "-mici")
{
Description = Resources.ManagedIdentityClientIdOptionDescription
};
ManagedIdentityResourceIdOption = new Option("--managed-identity-resource-id", "-miri")
{
Description = Resources.ManagedIdentityResourceIdOptionDescription
};
ObsoleteManagedIdentityOption = new Option("--azure-key-vault-managed-identity", "-kvm")
{
Description = Resources.ManagedIdentityOptionDescription,
Hidden = true
};
ObsoleteTenantIdOption = new Option("--azure-key-vault-tenant-id", "-kvt")
{
Description = Resources.TenantIdOptionDescription,
Hidden = true
};
ObsoleteClientIdOption = new Option("--azure-key-vault-client-id", "-kvi")
{
Description = Resources.ClientIdOptionDescription,
Hidden = true
};
ObsoleteClientSecretOption = new Option("--azure-key-vault-client-secret", "-kvs")
{
Description = Resources.ClientSecretOptionDescription,
Hidden = true
};
}
internal void AddOptionsToCommand(Command command)
{
command.Options.Add(CredentialTypeOption);
command.Options.Add(ManagedIdentityClientIdOption);
command.Options.Add(ManagedIdentityResourceIdOption);
command.Options.Add(ObsoleteManagedIdentityOption);
command.Options.Add(ObsoleteTenantIdOption);
command.Options.Add(ObsoleteClientIdOption);
command.Options.Add(ObsoleteClientSecretOption);
}
internal DefaultAzureCredentialOptions CreateDefaultAzureCredentialOptions(ParseResult parseResult)
{
DefaultAzureCredentialOptions options = new();
string? managedIdentityClientId = parseResult.GetValue(ManagedIdentityClientIdOption);
if (managedIdentityClientId is not null)
{
options.ManagedIdentityClientId = managedIdentityClientId;
}
string? managedIdentityResourceId = parseResult.GetValue(ManagedIdentityResourceIdOption);
if (managedIdentityResourceId is not null)
{
options.ManagedIdentityResourceId = new ResourceIdentifier(managedIdentityResourceId);
}
return options;
}
internal TokenCredential? CreateTokenCredential(ParseResult parseResult)
{
bool? useManagedIdentity = parseResult.GetValue(ObsoleteManagedIdentityOption);
if (useManagedIdentity is not null)
{
Console.Out.WriteLine(Resources.ManagedIdentityOptionObsolete);
}
string? tenantId = parseResult.GetValue(ObsoleteTenantIdOption);
string? clientId = parseResult.GetValue(ObsoleteClientIdOption);
string? secret = parseResult.GetValue(ObsoleteClientSecretOption);
if (!string.IsNullOrEmpty(tenantId) &&
!string.IsNullOrEmpty(clientId) &&
!string.IsNullOrEmpty(secret))
{
Console.Out.WriteLine(Resources.ClientSecretOptionsObsolete);
return new ClientSecretCredential(tenantId, clientId, secret);
}
switch (parseResult.GetValue(CredentialTypeOption))
{
case AzureCredentialType.AzureCli:
return new AzureCliCredential();
case AzureCredentialType.AzurePowerShell:
return new AzurePowerShellCredential();
case AzureCredentialType.ManagedIdentity:
string? managedIdentityClientId = parseResult.GetValue(ManagedIdentityClientIdOption);
if (managedIdentityClientId is not null)
{
ManagedIdentityId managedIdentityId = ManagedIdentityId.FromUserAssignedClientId(managedIdentityClientId);
return new ManagedIdentityCredential(managedIdentityId);
}
string? managedIdentityResourceId = parseResult.GetValue(ManagedIdentityResourceIdOption);
if (managedIdentityResourceId is not null)
{
ResourceIdentifier resourceIdentifier = new(managedIdentityResourceId);
ManagedIdentityId managedIdentityId = ManagedIdentityId.FromUserAssignedResourceId(resourceIdentifier);
return new ManagedIdentityCredential(managedIdentityId);
}
return new ManagedIdentityCredential(ManagedIdentityId.SystemAssigned);
case AzureCredentialType.WorkloadIdentity:
return new WorkloadIdentityCredential();
default:
DefaultAzureCredentialOptions options = CreateDefaultAzureCredentialOptions(parseResult);
// CodeQL [SM05137] Sign CLI is not a production service.
return new DefaultAzureCredential(options);
}
}
}
}
================================================
FILE: src/Sign.Cli/AzureCredentialType.cs
================================================
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE.txt file in the project root for more information.
namespace Sign.Cli
{
internal static class AzureCredentialType
{
public const string AzureCli = "azure-cli";
public const string AzurePowerShell = "azure-powershell";
public const string ManagedIdentity = "managed-identity";
public const string WorkloadIdentity = "workload-identity";
}
}
================================================
FILE: src/Sign.Cli/AzureKeyVaultCommand.cs
================================================
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE.txt file in the project root for more information.
using System.CommandLine;
using System.CommandLine.Parsing;
using Azure.Core;
using Azure.Security.KeyVault.Certificates;
using Azure.Security.KeyVault.Keys.Cryptography;
using Microsoft.Extensions.Azure;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using Sign.Core;
using Sign.SignatureProviders.KeyVault;
namespace Sign.Cli
{
internal sealed class AzureKeyVaultCommand : Command
{
internal Option UrlOption { get; }
internal Option CertificateOption { get; }
internal AzureCredentialOptions AzureCredentialOptions { get; } = new();
internal Argument?> FilesArgument { get; }
internal AzureKeyVaultCommand(CodeCommand codeCommand, IServiceProviderFactory serviceProviderFactory)
: base("azure-key-vault", AzureKeyVaultResources.CommandDescription)
{
ArgumentNullException.ThrowIfNull(codeCommand, nameof(codeCommand));
ArgumentNullException.ThrowIfNull(serviceProviderFactory, nameof(serviceProviderFactory));
UrlOption = new Option("--azure-key-vault-url", "-kvu")
{
Description = AzureKeyVaultResources.UrlOptionDescription,
Required = true,
CustomParser = ParseUrl
};
CertificateOption = new Option("--azure-key-vault-certificate", "-kvc")
{
Description = AzureKeyVaultResources.CertificateOptionDescription,
Required = true
};
FilesArgument = new Argument?>("file(s)")
{
Description = Resources.FilesArgumentDescription,
Arity = ArgumentArity.OneOrMore
};
Options.Add(UrlOption);
Options.Add(CertificateOption);
AzureCredentialOptions.AddOptionsToCommand(this);
Arguments.Add(FilesArgument);
SetAction((ParseResult parseResult, CancellationToken cancellationToken) =>
{
List? filesArgument = parseResult.GetValue(FilesArgument);
if (filesArgument is not { Count: > 0 })
{
Console.Error.WriteLine(Resources.MissingFileValue);
return Task.FromResult(ExitCode.InvalidOptions);
}
// this check exists as a courtesy to users who may have been signing .clickonce files via the old workaround.
// at some point we should remove this check, probably once we hit v1.0
if (filesArgument.Any(x => x.EndsWith(".clickonce", StringComparison.OrdinalIgnoreCase)))
{
Console.Error.WriteLine(AzureKeyVaultResources.ClickOnceExtensionNotSupported);
return Task.FromResult(ExitCode.InvalidOptions);
}
TokenCredential? credential = AzureCredentialOptions.CreateTokenCredential(parseResult);
if (credential is null)
{
return Task.FromResult(ExitCode.Failed);
}
// Some of the options are required and that is why we can safely use
// the null-forgiving operator (!) to simplify the code.
Uri url = parseResult.GetValue(UrlOption)!;
string certificateId = parseResult.GetValue(CertificateOption)!;
// Construct the URI for the certificate and the key from user parameters. We'll validate those with the SDK
var certUri = new Uri($"{url.Scheme}://{url.Authority}/certificates/{certificateId}");
if (!KeyVaultCertificateIdentifier.TryCreate(certUri, out var certId))
{
Console.Error.WriteLine(AzureKeyVaultResources.InvalidKeyVaultUrl);
return Task.FromResult(ExitCode.InvalidOptions);
}
// The key uri is similar and the key name matches the certificate name
var keyUri = new Uri($"{url.Scheme}://{url.Authority}/keys/{certificateId}");
serviceProviderFactory.AddServices(services =>
{
services.AddAzureClients(builder =>
{
builder.AddCertificateClient(certId.VaultUri);
builder.AddCryptographyClient(keyUri);
builder.UseCredential(credential);
builder.ConfigureDefaults(options => options.Retry.Mode = RetryMode.Exponential);
});
services.AddSingleton(serviceProvider =>
{
return new KeyVaultService(
serviceProvider.GetRequiredService(),
serviceProvider.GetRequiredService(),
certId.Name,
serviceProvider.GetRequiredService>());
});
});
KeyVaultServiceProvider keyVaultServiceProvider = new();
return codeCommand.HandleAsync(parseResult, serviceProviderFactory, keyVaultServiceProvider, filesArgument);
});
}
private static Uri? ParseUrl(ArgumentResult result)
{
if (result.Tokens.Count != 1 ||
!Uri.TryCreate(result.Tokens[0].Value, UriKind.Absolute, out Uri? uri)
|| !string.Equals(uri.Scheme, Uri.UriSchemeHttps, StringComparison.OrdinalIgnoreCase))
{
result.AddError(AzureKeyVaultResources.InvalidUrlValue);
return null;
}
return uri;
}
}
}
================================================
FILE: src/Sign.Cli/AzureKeyVaultResources.Designer.cs
================================================
//------------------------------------------------------------------------------
//
// This code was generated by a tool.
// Runtime Version:4.0.30319.42000
//
// Changes to this file may cause incorrect behavior and will be lost if
// the code is regenerated.
//
//------------------------------------------------------------------------------
namespace Sign.Cli {
using System;
///
/// A strongly-typed resource class, for looking up localized strings, etc.
///
// This class was auto-generated by the StronglyTypedResourceBuilder
// class via a tool like ResGen or Visual Studio.
// To add or remove a member, edit your .ResX file then rerun ResGen
// with the /str option, or rebuild your VS project.
[global::System.CodeDom.Compiler.GeneratedCodeAttribute("System.Resources.Tools.StronglyTypedResourceBuilder", "18.0.0.0")]
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
[global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
internal class AzureKeyVaultResources {
private static global::System.Resources.ResourceManager resourceMan;
private static global::System.Globalization.CultureInfo resourceCulture;
[global::System.Diagnostics.CodeAnalysis.SuppressMessageAttribute("Microsoft.Performance", "CA1811:AvoidUncalledPrivateCode")]
internal AzureKeyVaultResources() {
}
///
/// Returns the cached ResourceManager instance used by this class.
///
[global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)]
internal static global::System.Resources.ResourceManager ResourceManager {
get {
if (object.ReferenceEquals(resourceMan, null)) {
global::System.Resources.ResourceManager temp = new global::System.Resources.ResourceManager("Sign.Cli.AzureKeyVaultResources", typeof(AzureKeyVaultResources).Assembly);
resourceMan = temp;
}
return resourceMan;
}
}
///
/// Overrides the current thread's CurrentUICulture property for all
/// resource lookups using this strongly typed resource class.
///
[global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)]
internal static global::System.Globalization.CultureInfo Culture {
get {
return resourceCulture;
}
set {
resourceCulture = value;
}
}
///
/// Looks up a localized string similar to Name of the certificate in Azure Key Vault..
///
internal static string CertificateOptionDescription {
get {
return ResourceManager.GetString("CertificateOptionDescription", resourceCulture);
}
}
///
/// Looks up a localized string similar to ClickOnce signing via the legacy .clickonce ZIP workaround is no longer supported. See documentation..
///
internal static string ClickOnceExtensionNotSupported {
get {
return ResourceManager.GetString("ClickOnceExtensionNotSupported", resourceCulture);
}
}
///
/// Looks up a localized string similar to Use Azure Key Vault..
///
internal static string CommandDescription {
get {
return ResourceManager.GetString("CommandDescription", resourceCulture);
}
}
///
/// Looks up a localized string similar to URL must only contain the protocol and host. (e.g.: https://<vault-name>.vault.azure.net/).
///
internal static string InvalidKeyVaultUrl {
get {
return ResourceManager.GetString("InvalidKeyVaultUrl", resourceCulture);
}
}
///
/// Looks up a localized string similar to URL must be an absolute HTTPS URL to an Azure Key Vault..
///
internal static string InvalidUrlValue {
get {
return ResourceManager.GetString("InvalidUrlValue", resourceCulture);
}
}
///
/// Looks up a localized string similar to URL to an Azure Key Vault..
///
internal static string UrlOptionDescription {
get {
return ResourceManager.GetString("UrlOptionDescription", resourceCulture);
}
}
}
}
================================================
FILE: src/Sign.Cli/AzureKeyVaultResources.resx
================================================
text/microsoft-resx2.0System.Resources.ResXResourceReader, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089System.Resources.ResXResourceWriter, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089Name of the certificate in Azure Key Vault.ClickOnce signing via the legacy .clickonce ZIP workaround is no longer supported. See documentation.Use Azure Key Vault.URL must only contain the protocol and host. (e.g.: https://<vault-name>.vault.azure.net/)URL must be an absolute HTTPS URL to an Azure Key Vault.URL to an Azure Key Vault.
================================================
FILE: src/Sign.Cli/CertificateStoreCommand.cs
================================================
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE.txt file in the project root for more information.
using System.CommandLine;
using System.CommandLine.Parsing;
using System.Globalization;
using System.Security.Cryptography;
using Sign.Core;
using Sign.SignatureProviders.CertificateStore;
namespace Sign.Cli
{
internal sealed class CertificateStoreCommand : Command
{
internal Option CertificateFingerprintOption { get; }
internal Option CertificateFileOption { get; }
internal Option CertificatePasswordOption { get; }
internal Option CryptoServiceProviderOption { get; }
internal Option PrivateKeyContainerOption { get; }
internal Option UseMachineKeyContainerOption { get; }
internal Option InteractiveOption { get; }
internal Argument?> FilesArgument { get; }
internal CertificateStoreCommand(CodeCommand codeCommand, IServiceProviderFactory serviceProviderFactory)
: base("certificate-store", Resources.CertificateStoreCommandDescription)
{
ArgumentNullException.ThrowIfNull(codeCommand, nameof(codeCommand));
ArgumentNullException.ThrowIfNull(serviceProviderFactory, nameof(serviceProviderFactory));
CertificateFingerprintOption = new Option("--certificate-fingerprint", "-cfp")
{
CustomParser = ParseCertificateFingerprint,
Description = CertificateStoreResources.CertificateFingerprintOptionDescription,
Required = true
};
CertificateFileOption = new Option("--certificate-file", "-cf")
{
Description = CertificateStoreResources.CertificateFileOptionDescription
};
CertificatePasswordOption = new Option("--password", "-p")
{
Description = CertificateStoreResources.CertificatePasswordOptionDescription
};
CryptoServiceProviderOption = new Option("--crypto-service-provider", "-csp")
{
Description = CertificateStoreResources.CspOptionDescription
};
PrivateKeyContainerOption = new Option("--key-container", "-k")
{
Description = CertificateStoreResources.KeyContainerOptionDescription
};
UseMachineKeyContainerOption = new Option("--use-machine-key-container", "-km")
{
DefaultValueFactory = _ => false,
Description = CertificateStoreResources.UseMachineKeyContainerOptionDescription
};
InteractiveOption = new Option("--interactive", "-i")
{
DefaultValueFactory = _ => false,
Description = CertificateStoreResources.InteractiveDescription
};
FilesArgument = new Argument?>("file(s)")
{
Description = Resources.FilesArgumentDescription,
Arity = ArgumentArity.OneOrMore
};
Options.Add(CertificateFingerprintOption);
Options.Add(CertificateFileOption);
Options.Add(CertificatePasswordOption);
Options.Add(CryptoServiceProviderOption);
Options.Add(PrivateKeyContainerOption);
Options.Add(UseMachineKeyContainerOption);
Options.Add(InteractiveOption);
Arguments.Add(FilesArgument);
SetAction((ParseResult parseResult, CancellationToken cancellationToken) =>
{
List? filesArgument = parseResult.GetValue(FilesArgument);
if (filesArgument is not { Count: > 0 })
{
Console.Error.WriteLine(Resources.MissingFileValue);
return Task.FromResult(ExitCode.InvalidOptions);
}
// Some of the options are required and that is why we can safely use
// the null-forgiving operator (!) to simplify the code.
string certificateFingerprint = parseResult.GetValue(CertificateFingerprintOption)!;
string? certificatePath = parseResult.GetValue(CertificateFileOption);
string? certificatePassword = parseResult.GetValue(CertificatePasswordOption);
string? cryptoServiceProvider = parseResult.GetValue(CryptoServiceProviderOption);
string? privateKeyContainer = parseResult.GetValue(PrivateKeyContainerOption);
bool useMachineKeyContainer = parseResult.GetValue(UseMachineKeyContainerOption);
bool isInteractive = parseResult.GetValue(InteractiveOption);
// Certificate fingerprint is required in case the provided certificate container contains multiple certificates.
if (string.IsNullOrEmpty(certificateFingerprint))
{
Console.Error.WriteFormattedLine(
Resources.InvalidCertificateFingerprintValue,
CertificateFingerprintOption);
return Task.FromResult(ExitCode.InvalidOptions);
}
if (!TryDeduceHashAlgorithm(certificateFingerprint, out HashAlgorithmName certificateFingerprintAlgorithm))
{
Console.Error.WriteFormattedLine(
Resources.InvalidCertificateFingerprintValue,
CertificateFingerprintOption);
return Task.FromResult(ExitCode.InvalidOptions);
}
// CSP requires a private key container to function.
if (string.IsNullOrEmpty(cryptoServiceProvider) != string.IsNullOrEmpty(privateKeyContainer))
{
if (string.IsNullOrEmpty(privateKeyContainer))
{
Console.Error.WriteLine(CertificateStoreResources.MissingPrivateKeyContainerError);
return Task.FromResult(ExitCode.InvalidOptions);
}
else
{
Console.Error.WriteLine(CertificateStoreResources.MissingCspError);
return Task.FromResult(ExitCode.InvalidOptions);
}
}
CertificateStoreServiceProvider certificateStoreServiceProvider = new(
certificateFingerprint,
certificateFingerprintAlgorithm,
cryptoServiceProvider,
privateKeyContainer,
certificatePath,
certificatePassword,
useMachineKeyContainer,
isInteractive);
return codeCommand.HandleAsync(parseResult, serviceProviderFactory, certificateStoreServiceProvider, filesArgument);
});
}
private static string? ParseCertificateFingerprint(ArgumentResult result)
{
string? token = null;
if (result.Tokens.Count == 1)
{
token = result.Tokens[0].Value;
if (!HexHelpers.IsHex(token))
{
result.AddError(FormatMessage(
Resources.InvalidCertificateFingerprintValue,
result.Argument));
}
else if (!TryDeduceHashAlgorithm(token, out HashAlgorithmName hashAlgorithmName))
{
result.AddError(FormatMessage(
Resources.InvalidCertificateFingerprintValue,
result.Argument));
}
}
else
{
result.AddError(FormatMessage(
Resources.InvalidCertificateFingerprintValue,
result.Argument));
}
return token;
}
private static string FormatMessage(string format, Argument argument)
{
return string.Format(CultureInfo.CurrentCulture, format, argument.Name);
}
private static bool TryDeduceHashAlgorithm(
string certificateFingerprint,
out HashAlgorithmName hashAlgorithmName)
{
hashAlgorithmName = HashAlgorithmName.SHA256;
if (string.IsNullOrEmpty(certificateFingerprint))
{
return false;
}
// One hexadecimal character is 4 bits.
switch (certificateFingerprint.Length)
{
case 64: // 64 characters * 4 bits/character = 256 bits
hashAlgorithmName = HashAlgorithmName.SHA256;
return true;
case 96: // 96 characters * 4 bits/character = 384 bits
hashAlgorithmName = HashAlgorithmName.SHA384;
return true;
case 128: // 128 characters * 4 bits/character = 512 bits
hashAlgorithmName = HashAlgorithmName.SHA512;
return true;
default:
return false;
}
}
}
}
================================================
FILE: src/Sign.Cli/CertificateStoreResources.Designer.cs
================================================
//------------------------------------------------------------------------------
//
// This code was generated by a tool.
// Runtime Version:4.0.30319.42000
//
// Changes to this file may cause incorrect behavior and will be lost if
// the code is regenerated.
//
//------------------------------------------------------------------------------
namespace Sign.Cli {
using System;
///
/// A strongly-typed resource class, for looking up localized strings, etc.
///
// This class was auto-generated by the StronglyTypedResourceBuilder
// class via a tool like ResGen or Visual Studio.
// To add or remove a member, edit your .ResX file then rerun ResGen
// with the /str option, or rebuild your VS project.
[global::System.CodeDom.Compiler.GeneratedCodeAttribute("System.Resources.Tools.StronglyTypedResourceBuilder", "17.0.0.0")]
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
[global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
internal class CertificateStoreResources {
private static global::System.Resources.ResourceManager resourceMan;
private static global::System.Globalization.CultureInfo resourceCulture;
[global::System.Diagnostics.CodeAnalysis.SuppressMessageAttribute("Microsoft.Performance", "CA1811:AvoidUncalledPrivateCode")]
internal CertificateStoreResources() {
}
///
/// Returns the cached ResourceManager instance used by this class.
///
[global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)]
internal static global::System.Resources.ResourceManager ResourceManager {
get {
if (object.ReferenceEquals(resourceMan, null)) {
global::System.Resources.ResourceManager temp = new global::System.Resources.ResourceManager("Sign.Cli.CertificateStoreResources", typeof(CertificateStoreResources).Assembly);
resourceMan = temp;
}
return resourceMan;
}
}
///
/// Overrides the current thread's CurrentUICulture property for all
/// resource lookups using this strongly typed resource class.
///
[global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)]
internal static global::System.Globalization.CultureInfo Culture {
get {
return resourceCulture;
}
set {
resourceCulture = value;
}
}
///
/// Looks up a localized string similar to PFX, P7B, or CER file containing a certificate and potentially a private key..
///
internal static string CertificateFileOptionDescription {
get {
return ResourceManager.GetString("CertificateFileOptionDescription", resourceCulture);
}
}
///
/// Looks up a localized string similar to SHA fingerprint used to identify a certificate..
///
internal static string CertificateFingerprintOptionDescription {
get {
return ResourceManager.GetString("CertificateFingerprintOptionDescription", resourceCulture);
}
}
///
/// Looks up a localized string similar to Password for certificate file..
///
internal static string CertificatePasswordOptionDescription {
get {
return ResourceManager.GetString("CertificatePasswordOptionDescription", resourceCulture);
}
}
///
/// Looks up a localized string similar to Sign container contents.
///
internal static string ContainersDescription {
get {
return ResourceManager.GetString("ContainersDescription", resourceCulture);
}
}
///
/// Looks up a localized string similar to Cryptographic Service Provider containing the private key container. Requires /k and optionally /km..
///
internal static string CspOptionDescription {
get {
return ResourceManager.GetString("CspOptionDescription", resourceCulture);
}
}
///
/// Looks up a localized string similar to Allow user interactions (such as a dialog box) when a private key is accessed..
///
internal static string InteractiveDescription {
get {
return ResourceManager.GetString("InteractiveDescription", resourceCulture);
}
}
///
/// Looks up a localized string similar to Private key container name..
///
internal static string KeyContainerOptionDescription {
get {
return ResourceManager.GetString("KeyContainerOptionDescription", resourceCulture);
}
}
///
/// Looks up a localized string similar to Cryptographic Service Provider missing. Use /csp to specify a CSP..
///
internal static string MissingCspError {
get {
return ResourceManager.GetString("MissingCspError", resourceCulture);
}
}
///
/// Looks up a localized string similar to Private key container name missing. Use /k to specify a key container name..
///
internal static string MissingPrivateKeyContainerError {
get {
return ResourceManager.GetString("MissingPrivateKeyContainerError", resourceCulture);
}
}
///
/// Looks up a localized string similar to Use a machine-level private key container. (The default is user-level.).
///
internal static string UseMachineKeyContainerOptionDescription {
get {
return ResourceManager.GetString("UseMachineKeyContainerOptionDescription", resourceCulture);
}
}
}
}
================================================
FILE: src/Sign.Cli/CertificateStoreResources.resx
================================================
text/microsoft-resx2.0System.Resources.ResXResourceReader, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089System.Resources.ResXResourceWriter, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089Allow user interactions (such as a dialog box) when a private key is accessed.PFX, P7B, or CER file containing a certificate and potentially a private key.{Locked="PFX", "P7B", "CER"} are file extensions.SHA fingerprint used to identify a certificate.{Locked="SHA"} is a cryptographic algorithm.Password for certificate file.Cryptographic Service Provider containing the private key container. Requires /k and optionally /km.{Locked="/k", "/km"} are command line options.Private key container name.Cryptographic Service Provider missing. Use /csp to specify a CSP.{Locked="/csp"} is a command line option.Private key container name missing. Use /k to specify a key container name.{Locked="/k"} is a command line option.Use a machine-level private key container. (The default is user-level.)Sign container contents.
================================================
FILE: src/Sign.Cli/CodeCommand.cs
================================================
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE.txt file in the project root for more information.
using System.CommandLine;
using System.CommandLine.Parsing;
using System.Globalization;
using System.Security.Cryptography;
using System.Text;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.FileSystemGlobbing;
using Microsoft.Extensions.FileSystemGlobbing.Abstractions;
using Microsoft.Extensions.Logging;
using Sign.Core;
namespace Sign.Cli
{
internal sealed class CodeCommand : Command
{
internal Option ApplicationNameOption { get; }
internal Option BaseDirectoryOption { get; }
internal Option DescriptionOption { get; }
internal Option DescriptionUrlOption { get; }
internal Option FileDigestOption { get; }
internal Option FileListOption { get; }
internal Option RecurseContainersOption { get; }
internal Option MaxConcurrencyOption { get; }
internal Option OutputOption { get; }
internal Option PublisherNameOption { get; }
internal Option TimestampDigestOption { get; }
internal Option TimestampUrlOption { get; }
internal Option VerbosityOption { get; }
internal CodeCommand()
: base("code", Resources.CodeCommandDescription)
{
ApplicationNameOption = new Option("--application-name", "-an")
{
Description = Resources.ApplicationNameOptionDescription,
Recursive = true
};
BaseDirectoryOption = new Option("--base-directory", "-b")
{
CustomParser = ParseBaseDirectoryOption,
DefaultValueFactory = _ => new DirectoryInfo(Environment.CurrentDirectory),
Description = Resources.BaseDirectoryOptionDescription,
Recursive = true
};
DescriptionOption = new Option("--description", "-d")
{
Description = Resources.DescriptionOptionDescription,
Recursive = true
};
DescriptionUrlOption = new Option("--description-url", "-u")
{
CustomParser = ParseUrl,
Description = Resources.DescriptionUrlOptionDescription,
Recursive = true
};
FileDigestOption = new Option("--file-digest", "-fd")
{
CustomParser = HashAlgorithmParser.ParseHashAlgorithmName,
DefaultValueFactory = _ => HashAlgorithmName.SHA256,
Description = Resources.FileDigestOptionDescription,
Recursive = true
};
FileListOption = new Option("--file-list", "-fl")
{
Description = Resources.FileListOptionDescription,
Recursive = true
};
RecurseContainersOption = new Option("--recurse-containers", "-rc")
{
DefaultValueFactory = _ => true,
Description = CertificateStoreResources.ContainersDescription,
Recursive = true
};
MaxConcurrencyOption = new Option("--max-concurrency", "-m")
{
CustomParser = ParseMaxConcurrencyOption,
DefaultValueFactory = _ => 4,
Description = Resources.MaxConcurrencyOptionDescription,
Recursive = true
};
OutputOption = new Option("--output", "-o")
{
Description = Resources.OutputOptionDescription,
Recursive = true
};
PublisherNameOption = new Option("--publisher-name", "-pn")
{
Description = Resources.PublisherNameOptionDescription,
Recursive = true
};
TimestampDigestOption = new Option("--timestamp-digest", "-td")
{
CustomParser = HashAlgorithmParser.ParseHashAlgorithmName,
DefaultValueFactory = _ => HashAlgorithmName.SHA256,
Description = Resources.TimestampDigestOptionDescription,
Recursive = true
};
TimestampUrlOption = new Option("--timestamp-url", "-t")
{
CustomParser = ParseUrl,
DefaultValueFactory = _ => new Uri("http://timestamp.acs.microsoft.com"),
Description = Resources.TimestampUrlOptionDescription,
Recursive = true
};
VerbosityOption = new Option("--verbosity", "-v")
{
Description = Resources.VerbosityOptionDescription,
Recursive = true
};
// These options are available on the adding command and all subcommands.
// Order here is significant as it represents the order in which options are
// displayed in help.
Options.Add(ApplicationNameOption);
Options.Add(DescriptionOption);
Options.Add(DescriptionUrlOption);
Options.Add(BaseDirectoryOption);
Options.Add(OutputOption);
Options.Add(PublisherNameOption);
Options.Add(FileListOption);
Options.Add(RecurseContainersOption);
Options.Add(FileDigestOption);
Options.Add(TimestampUrlOption);
Options.Add(TimestampDigestOption);
Options.Add(MaxConcurrencyOption);
Options.Add(VerbosityOption);
}
internal async Task HandleAsync(ParseResult parseResult, IServiceProviderFactory serviceProviderFactory, ISignatureProvider signatureProvider, IEnumerable filesArgument)
{
// Some of the options have a default value and that is why we can safely use
// the null-forgiving operator (!) to simplify the code.
DirectoryInfo baseDirectory = parseResult.GetValue(BaseDirectoryOption)!;
string? applicationName = parseResult.GetValue(ApplicationNameOption);
string? publisherName = parseResult.GetValue(PublisherNameOption);
string? description = parseResult.GetValue(DescriptionOption);
Uri? descriptionUrl = parseResult.GetValue(DescriptionUrlOption);
string? fileListFilePath = parseResult.GetValue(FileListOption);
bool recurseContainers = parseResult.GetValue(RecurseContainersOption);
HashAlgorithmName fileHashAlgorithmName = parseResult.GetValue(FileDigestOption);
HashAlgorithmName timestampHashAlgorithmName = parseResult.GetValue(TimestampDigestOption);
Uri timestampUrl = parseResult.GetValue(TimestampUrlOption)!;
LogLevel verbosity = parseResult.GetValue(VerbosityOption);
string? output = parseResult.GetValue(OutputOption);
int maxConcurrency = parseResult.GetValue(MaxConcurrencyOption);
// Make sure this is rooted
if (!Path.IsPathRooted(baseDirectory.FullName))
{
Console.Error.WriteFormattedLine(
Resources.InvalidBaseDirectoryValue,
BaseDirectoryOption);
return ExitCode.InvalidOptions;
}
IServiceProvider serviceProvider = serviceProviderFactory.Create(
verbosity,
addServices: (IServiceCollection services) =>
{
services.AddSingleton(
(IServiceProvider serviceProvider) => signatureProvider.GetSignatureAlgorithmProvider(serviceProvider));
services.AddSingleton(
(IServiceProvider serviceProvider) => signatureProvider.GetCertificateProvider(serviceProvider));
});
List inputFiles = [];
foreach (string fileArgument in filesArgument)
{
// If we're going to glob, we can't be fully rooted currently (fix me later)
bool isGlob = fileArgument.Contains('*');
if (isGlob)
{
if (Path.IsPathRooted(fileArgument))
{
Console.Error.WriteLine(Resources.InvalidFileValue);
return ExitCode.InvalidOptions;
}
IFileListReader fileListReader = serviceProvider.GetRequiredService();
IFileMatcher fileMatcher = serviceProvider.GetRequiredService();
using (MemoryStream stream = new(Encoding.UTF8.GetBytes(fileArgument)))
using (StreamReader reader = new(stream))
{
fileListReader.Read(reader, out Matcher? matcher, out Matcher? antiMatcher);
DirectoryInfoBase directory = new DirectoryInfoWrapper(baseDirectory);
IEnumerable matches = fileMatcher.EnumerateMatches(directory, matcher);
if (antiMatcher is not null)
{
IEnumerable antiMatches = fileMatcher.EnumerateMatches(directory, antiMatcher);
matches = matches.Except(antiMatches, FileInfoComparer.Instance);
}
inputFiles.AddRange(matches);
}
}
else
{
inputFiles.Add(new FileInfo(ExpandFilePath(baseDirectory, fileArgument)));
}
}
FileInfo? fileList = null;
if (!string.IsNullOrEmpty(fileListFilePath))
{
if (Path.IsPathRooted(fileListFilePath))
{
fileList = new FileInfo(fileListFilePath);
}
else
{
fileList = new FileInfo(ExpandFilePath(baseDirectory, fileListFilePath));
}
}
if (inputFiles.Count == 0)
{
Console.Error.WriteLine(Resources.NoFilesToSign);
return ExitCode.NoInputsFound;
}
if (inputFiles.Any(file => !file.Exists))
{
Console.Error.WriteFormattedLine(
Resources.SomeFilesDoNotExist,
BaseDirectoryOption);
foreach (FileInfo file in inputFiles.Where(file => !file.Exists))
{
Console.Error.WriteLine($" {file.FullName}");
}
return ExitCode.NoInputsFound;
}
ISigner signer = serviceProvider.GetRequiredService();
int exitCode = await signer.SignAsync(
inputFiles,
output,
fileList,
recurseContainers,
baseDirectory,
applicationName,
publisherName,
description,
descriptionUrl,
timestampUrl,
maxConcurrency,
fileHashAlgorithmName,
timestampHashAlgorithmName);
return exitCode;
}
private static string ExpandFilePath(DirectoryInfo baseDirectory, string file)
{
if (Path.IsPathRooted(file))
{
return file;
}
return Path.Combine(baseDirectory.FullName, file);
}
private static DirectoryInfo ParseBaseDirectoryOption(ArgumentResult result)
{
if (result.Tokens.Count != 1 ||
string.IsNullOrWhiteSpace(result.Tokens[0].Value))
{
result.AddError(FormatMessage(Resources.InvalidBaseDirectoryValue, result.Argument));
return new DirectoryInfo(Environment.CurrentDirectory);
}
string value = result.Tokens[0].Value;
if (Path.IsPathRooted(value))
{
return new DirectoryInfo(value);
}
result.AddError(FormatMessage(Resources.InvalidBaseDirectoryValue, result.Argument));
return new DirectoryInfo(Environment.CurrentDirectory);
}
private static int ParseMaxConcurrencyOption(ArgumentResult result)
{
if (result.Tokens.Count != 1 ||
!int.TryParse(result.Tokens[0].Value, out int value) ||
value < 1)
{
result.AddError(FormatMessage(Resources.InvalidMaxConcurrencyValue, result.Argument));
return default;
}
return value;
}
internal static Uri? ParseHttpsUrl(ArgumentResult result)
{
if (result.Tokens.Count != 1 ||
!Uri.TryCreate(result.Tokens[0].Value, UriKind.Absolute, out Uri? uri) ||
!string.Equals(Uri.UriSchemeHttps, uri.Scheme, StringComparison.OrdinalIgnoreCase))
{
result.AddError(FormatMessage(Resources.InvalidHttpsUrlValue, result.Argument));
return null;
}
return uri;
}
internal static Uri? ParseUrl(ArgumentResult result)
{
if (result.Tokens.Count != 1 ||
!Uri.TryCreate(result.Tokens[0].Value, UriKind.Absolute, out Uri? uri) ||
!(string.Equals(Uri.UriSchemeHttp, uri.Scheme, StringComparison.OrdinalIgnoreCase) ||
string.Equals(Uri.UriSchemeHttps, uri.Scheme, StringComparison.OrdinalIgnoreCase)))
{
result.AddError(FormatMessage(Resources.InvalidUrlValue, result.Argument));
return null;
}
return uri;
}
private static string FormatMessage(string format, Argument argument)
{
return string.Format(CultureInfo.CurrentCulture, format, argument.Name);
}
}
}
================================================
FILE: src/Sign.Cli/Helpers/HashAlgorithmParser.cs
================================================
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE.txt file in the project root for more information.
using System.CommandLine.Parsing;
using System.Globalization;
using System.Security.Cryptography;
namespace Sign.Cli
{
internal static class HashAlgorithmParser
{
public static HashAlgorithmName ParseHashAlgorithmName(ArgumentResult result)
{
if (result.Tokens.Count == 0)
{
return HashAlgorithmName.SHA256;
}
string token = result.Tokens.Single().Value.ToLowerInvariant();
switch (token)
{
case "sha256":
return HashAlgorithmName.SHA256;
case "sha384":
return HashAlgorithmName.SHA384;
case "sha512":
return HashAlgorithmName.SHA512;
default:
result.AddError(string.Format(CultureInfo.CurrentCulture, Resources.InvalidDigestValue, result.Argument.Name));
return HashAlgorithmName.SHA256;
}
}
}
}
================================================
FILE: src/Sign.Cli/Kernel32.cs
================================================
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE.txt file in the project root for more information.
using System.ComponentModel;
using System.Runtime.InteropServices;
namespace Sign.Cli
{
#pragma warning disable IDE1006 // Naming Styles
static class Kernel32
{
[DllImport("kernel32.dll", SetLastError = true, PreserveSig = true)]
[return: MarshalAs(UnmanagedType.Bool)]
public static extern bool SetDllDirectoryW(
[MarshalAs(UnmanagedType.LPWStr)] string lpPathName);
[DllImport("kernel32.dll", SetLastError = true, PreserveSig = true)]
public static extern IntPtr LoadLibraryW(
[MarshalAs(UnmanagedType.LPWStr)] string path);
[DllImport("kernel32.dll", SetLastError = true, PreserveSig = true)]
public static extern IntPtr CreateActCtxW(ref ACTCTX pActCtx);
[DllImport("kernel32.dll", SetLastError = true, PreserveSig = true)]
[return: MarshalAs(UnmanagedType.Bool)]
public static extern bool ActivateActCtx(IntPtr hActCtx, out IntPtr lpCookie);
[DllImport("kernel32.dll", SetLastError = true, PreserveSig = true)]
[return: MarshalAs(UnmanagedType.Bool)]
public static extern bool DeactivateActCtx(int dwFlags, IntPtr lpCookie);
[DllImport("kernel32.dll", PreserveSig = true)]
public static extern void ReleaseActCtx(IntPtr hActCtx);
[StructLayout(LayoutKind.Sequential, Pack = 4, CharSet = CharSet.Unicode)]
public struct ACTCTX
{
public int cbSize;
public ActivationContextFlags dwFlags;
public string lpSource;
public ushort wProcessorArchitecture;
public ushort wLangId;
public string lpAssemblyDirectory;
public string lpResourceName;
public string lpApplicationName;
public IntPtr hModule;
}
[Flags]
public enum ActivationContextFlags : uint
{
ACTCTX_FLAG_RESOURCE_NAME_VALID = 0x008,
ACTCTX_FLAG_APPLICATION_NAME_VALID = 0x020
}
public sealed class ActivationContext : IDisposable
{
readonly IntPtr INVALID_HANDLE_VALUE = new(-1);
IntPtr activationContext = new(-1);
IntPtr activationContextCookie;
public ActivationContext(string assemblyName)
{
var requestedActivationContext = new ACTCTX
{
cbSize = Marshal.SizeOf(),
lpSource = assemblyName
};
activationContext = CreateActCtxW(ref requestedActivationContext);
if (activationContext != INVALID_HANDLE_VALUE)
{
if (!ActivateActCtx(activationContext, out activationContextCookie))
{
throw new Win32Exception(Marshal.GetLastWin32Error());
}
}
else
{
throw new Win32Exception(Marshal.GetLastWin32Error());
}
}
public void Dispose()
{
if (activationContextCookie != IntPtr.Zero)
{
if (!DeactivateActCtx(dwFlags: 0, activationContextCookie))
{
throw new Win32Exception(Marshal.GetLastWin32Error());
}
activationContextCookie = IntPtr.Zero;
}
if (activationContext != INVALID_HANDLE_VALUE)
{
ReleaseActCtx(activationContext);
activationContext = INVALID_HANDLE_VALUE;
}
}
}
}
#pragma warning restore IDE1006 // Naming Styles
}
================================================
FILE: src/Sign.Cli/PACKAGE.md
================================================
## About
Sign CLI is a .NET tool that provides digital signing for .NET assemblies, packages, and other files.
The tool signs files inside-out, starting with the most nested files and then the outer files, ensuring everything is signed in the correct order.
## Prerequisites
- An up-to-date x64-based version of Windows currently in [mainstream support](https://learn.microsoft.com/lifecycle/products/)
- [.NET 8 SDK or later](https://dotnet.microsoft.com/download)
- [Microsoft Visual C++ 14 runtime](https://aka.ms/vs/17/release/vc_redist.x64.exe)
- For ClickOnce and VSTO signing: A recent --- ideally, the latest --- version of [.NET Framework](https://dotnet.microsoft.com/download/dotnet-framework)
## Usage
- For help with...
- Azure Key Vault: `sign code azure-key-vault --help`
- Artifact Signing: `sign code artifact-signing --help`
- local signing: `sign code certificate-store --help`
- Version information: `sign --version`
See the [GitHub repository](https://github.com/dotnet/sign) for additional information and samples.
## License
This package is released as open source under the [MIT license](https://licenses.nuget.org/MIT).
## Feedback
Bug reports, feedback, and contributions are welcome at the [GitHub repository](https://github.com/dotnet/sign).
Happy signing! 🚀
================================================
FILE: src/Sign.Cli/Program.cs
================================================
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE.txt file in the project root for more information.
using Sign.Core;
namespace Sign.Cli
{
internal static class Program
{
internal static async Task Main(string[] args)
{
using (new TemporaryConsoleEncoding())
{
if (!Environment.Is64BitProcess)
{
Console.Error.WriteLine(Resources.x86NotSupported);
return ExitCode.Failed;
}
AppInitializer.Initialize();
string systemDirectoryPath = Environment.GetFolderPath(Environment.SpecialFolder.System);
// NavSip.dll has a dependency on this.
string vcRuntime140FilePath = Path.Combine(systemDirectoryPath, "vcruntime140.dll");
if (!File.Exists(vcRuntime140FilePath))
{
WriteWarning(Resources.MsvcrtNotDetected);
}
try
{
SignCommand rootCommand = CreateCommand(serviceProviderFactory: null);
return await rootCommand.Parse(args).InvokeAsync();
}
catch (Exception ex)
{
Console.WriteLine(ex);
return ExitCode.Failed;
}
}
}
private static void WriteWarning(string warning)
{
Console.ForegroundColor = ConsoleColor.Yellow;
Console.WriteLine(warning);
Console.ResetColor();
}
internal static SignCommand CreateCommand(IServiceProviderFactory? serviceProviderFactory = null)
{
return new SignCommand(serviceProviderFactory);
}
}
}
================================================
FILE: src/Sign.Cli/Properties/launchSettings.json
================================================
{
"profiles": {
"Sign.Cli": {
"commandName": "Project",
"commandLineArgs": "code certificate-store -b C:\\Trash -v trace -cf C:\\git\\Entropy\\MakeTestCert\\af994810f3d0d01b5f6f37e8be085e1a537d40c9.pfx -cfp 0695cf4875ae67f2194ea4c2cbcdcb1327c6874d5949321c4d8028bed308b7a6 MakeTestCert.dll -o MakeTestCert.signed.dll"
}
}
}
================================================
FILE: src/Sign.Cli/Resources.Designer.cs
================================================
//------------------------------------------------------------------------------
//
// This code was generated by a tool.
// Runtime Version:4.0.30319.42000
//
// Changes to this file may cause incorrect behavior and will be lost if
// the code is regenerated.
//
//------------------------------------------------------------------------------
namespace Sign.Cli {
using System;
///
/// A strongly-typed resource class, for looking up localized strings, etc.
///
// This class was auto-generated by the StronglyTypedResourceBuilder
// class via a tool like ResGen or Visual Studio.
// To add or remove a member, edit your .ResX file then rerun ResGen
// with the /str option, or rebuild your VS project.
[global::System.CodeDom.Compiler.GeneratedCodeAttribute("System.Resources.Tools.StronglyTypedResourceBuilder", "17.0.0.0")]
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
[global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
internal class Resources {
private static global::System.Resources.ResourceManager resourceMan;
private static global::System.Globalization.CultureInfo resourceCulture;
[global::System.Diagnostics.CodeAnalysis.SuppressMessageAttribute("Microsoft.Performance", "CA1811:AvoidUncalledPrivateCode")]
internal Resources() {
}
///
/// Returns the cached ResourceManager instance used by this class.
///
[global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)]
internal static global::System.Resources.ResourceManager ResourceManager {
get {
if (object.ReferenceEquals(resourceMan, null)) {
global::System.Resources.ResourceManager temp = new global::System.Resources.ResourceManager("Sign.Cli.Resources", typeof(Resources).Assembly);
resourceMan = temp;
}
return resourceMan;
}
}
///
/// Overrides the current thread's CurrentUICulture property for all
/// resource lookups using this strongly typed resource class.
///
[global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)]
internal static global::System.Globalization.CultureInfo Culture {
get {
return resourceCulture;
}
set {
resourceCulture = value;
}
}
///
/// Looks up a localized string similar to Application name (ClickOnce)..
///
internal static string ApplicationNameOptionDescription {
get {
return ResourceManager.GetString("ApplicationNameOptionDescription", resourceCulture);
}
}
///
/// Looks up a localized string similar to Base directory for files. Overrides the current working directory..
///
internal static string BaseDirectoryOptionDescription {
get {
return ResourceManager.GetString("BaseDirectoryOptionDescription", resourceCulture);
}
}
///
/// Looks up a localized string similar to Use Windows Certificate Store or a local certificate file..
///
internal static string CertificateStoreCommandDescription {
get {
return ResourceManager.GetString("CertificateStoreCommandDescription", resourceCulture);
}
}
///
/// Looks up a localized string similar to Client ID to authenticate to Azure..
///
internal static string ClientIdOptionDescription {
get {
return ResourceManager.GetString("ClientIdOptionDescription", resourceCulture);
}
}
///
/// Looks up a localized string similar to Client secret to authenticate to Azure..
///
internal static string ClientSecretOptionDescription {
get {
return ResourceManager.GetString("ClientSecretOptionDescription", resourceCulture);
}
}
///
/// Looks up a localized string similar to The client secret options are obsolete and should no longer be specified..
///
internal static string ClientSecretOptionsObsolete {
get {
return ResourceManager.GetString("ClientSecretOptionsObsolete", resourceCulture);
}
}
///
/// Looks up a localized string similar to Sign binaries and containers..
///
internal static string CodeCommandDescription {
get {
return ResourceManager.GetString("CodeCommandDescription", resourceCulture);
}
}
///
/// Looks up a localized string similar to Azure credential type that will be used. This defaults to DefaultAzureCredential..
///
internal static string CredentialTypeOptionDescription {
get {
return ResourceManager.GetString("CredentialTypeOptionDescription", resourceCulture);
}
}
///
/// Looks up a localized string similar to Description of the signing certificate..
///
internal static string DescriptionOptionDescription {
get {
return ResourceManager.GetString("DescriptionOptionDescription", resourceCulture);
}
}
///
/// Looks up a localized string similar to Description URL of the signing certificate..
///
internal static string DescriptionUrlOptionDescription {
get {
return ResourceManager.GetString("DescriptionUrlOptionDescription", resourceCulture);
}
}
///
/// Looks up a localized string similar to Digest algorithm to hash files with. Allowed values are 'sha256', 'sha384', and 'sha512'..
///
internal static string FileDigestOptionDescription {
get {
return ResourceManager.GetString("FileDigestOptionDescription", resourceCulture);
}
}
///
/// Looks up a localized string similar to Path to file containing paths of files to sign or to exclude from signing..
///
internal static string FileListOptionDescription {
get {
return ResourceManager.GetString("FileListOptionDescription", resourceCulture);
}
}
///
/// Looks up a localized string similar to File(s) to sign..
///
internal static string FilesArgumentDescription {
get {
return ResourceManager.GetString("FilesArgumentDescription", resourceCulture);
}
}
///
/// Looks up a localized string similar to Invalid value for {0}. The value must be a fully rooted directory path..
///
internal static string InvalidBaseDirectoryValue {
get {
return ResourceManager.GetString("InvalidBaseDirectoryValue", resourceCulture);
}
}
///
/// Looks up a localized string similar to Invalid value for {0}. The value must be a SHA-256, SHA-384, or SHA-512 certificate fingerprint (in hexadecimal)..
///
internal static string InvalidCertificateFingerprintValue {
get {
return ResourceManager.GetString("InvalidCertificateFingerprintValue", resourceCulture);
}
}
///
/// Looks up a localized string similar to Invalid value for {0}. The value must be 'sha256', 'sha384', or 'sha512'..
///
internal static string InvalidDigestValue {
get {
return ResourceManager.GetString("InvalidDigestValue", resourceCulture);
}
}
///
/// Looks up a localized string similar to The file path cannot be rooted when using a glob. Use a path relative to the working directory (or base directory, if used)..
///
internal static string InvalidFileValue {
get {
return ResourceManager.GetString("InvalidFileValue", resourceCulture);
}
}
///
/// Looks up a localized string similar to Invalid value for {0}. The value must be a number value greater than or equal to 1..
///
internal static string InvalidMaxConcurrencyValue {
get {
return ResourceManager.GetString("InvalidMaxConcurrencyValue", resourceCulture);
}
}
///
/// Looks up a localized string similar to Invalid value for {0}. The value must be an absolute HTTPS URL..
///
internal static string InvalidHttpsUrlValue {
get {
return ResourceManager.GetString("InvalidHttpsUrlValue", resourceCulture);
}
}
///
/// Looks up a localized string similar to Invalid value for {0}. The value must be an absolute HTTP or HTTPS URL..
///
internal static string InvalidUrlValue {
get {
return ResourceManager.GetString("InvalidUrlValue", resourceCulture);
}
}
///
/// Looks up a localized string similar to The client id of a user assigned ManagedIdentity..
///
internal static string ManagedIdentityClientIdOptionDescription {
get {
return ResourceManager.GetString("ManagedIdentityClientIdOptionDescription", resourceCulture);
}
}
///
/// Looks up a localized string similar to Managed identity to authenticate to Azure Key. (obsolete).
///
internal static string ManagedIdentityOptionDescription {
get {
return ResourceManager.GetString("ManagedIdentityOptionDescription", resourceCulture);
}
}
///
/// Looks up a localized string similar to The -kvm and --azure-key-vault-managed-identity options are obsolete and should no longer be specified..
///
internal static string ManagedIdentityOptionObsolete {
get {
return ResourceManager.GetString("ManagedIdentityOptionObsolete", resourceCulture);
}
}
///
/// Looks up a localized string similar to The resource id of a user assigned ManagedIdentity..
///
internal static string ManagedIdentityResourceIdOptionDescription {
get {
return ResourceManager.GetString("ManagedIdentityResourceIdOptionDescription", resourceCulture);
}
}
///
/// Looks up a localized string similar to Maximum concurrency..
///
internal static string MaxConcurrencyOptionDescription {
get {
return ResourceManager.GetString("MaxConcurrencyOptionDescription", resourceCulture);
}
}
///
/// Looks up a localized string similar to A file or glob is required..
///
internal static string MissingFileValue {
get {
return ResourceManager.GetString("MissingFileValue", resourceCulture);
}
}
///
/// Looks up a localized string similar to Warning: The Microsoft Visual C++ 14 runtime is required but was not detected on your system. Download and install from https://aka.ms/vs/17/release/vc_redist.x64.exe.
///
internal static string MsvcrtNotDetected {
get {
return ResourceManager.GetString("MsvcrtNotDetected", resourceCulture);
}
}
///
/// Looks up a localized string similar to No inputs found to sign..
///
internal static string NoFilesToSign {
get {
return ResourceManager.GetString("NoFilesToSign", resourceCulture);
}
}
///
/// Looks up a localized string similar to Output file or directory. If omitted, input files will be overwritten..
///
internal static string OutputOptionDescription {
get {
return ResourceManager.GetString("OutputOptionDescription", resourceCulture);
}
}
///
/// Looks up a localized string similar to Publisher name (ClickOnce)..
///
internal static string PublisherNameOptionDescription {
get {
return ResourceManager.GetString("PublisherNameOptionDescription", resourceCulture);
}
}
///
/// Looks up a localized string similar to Sign CLI.
///
internal static string SignCommandDescription {
get {
return ResourceManager.GetString("SignCommandDescription", resourceCulture);
}
}
///
/// Looks up a localized string similar to Some files do not exist. Try using a different {0} value or a fully qualified file path..
///
internal static string SomeFilesDoNotExist {
get {
return ResourceManager.GetString("SomeFilesDoNotExist", resourceCulture);
}
}
///
/// Looks up a localized string similar to Tenant ID to authenticate to Azure..
///
internal static string TenantIdOptionDescription {
get {
return ResourceManager.GetString("TenantIdOptionDescription", resourceCulture);
}
}
///
/// Looks up a localized string similar to Digest algorithm for the RFC 3161 timestamp server. Allowed values are sha256, sha384, and sha512..
///
internal static string TimestampDigestOptionDescription {
get {
return ResourceManager.GetString("TimestampDigestOptionDescription", resourceCulture);
}
}
///
/// Looks up a localized string similar to RFC 3161 timestamp server URL..
///
internal static string TimestampUrlOptionDescription {
get {
return ResourceManager.GetString("TimestampUrlOptionDescription", resourceCulture);
}
}
///
/// Looks up a localized string similar to Sets the verbosity level. Allowed values are 'none', 'critical', 'error', 'warning', 'information', 'debug', and 'trace'..
///
internal static string VerbosityOptionDescription {
get {
return ResourceManager.GetString("VerbosityOptionDescription", resourceCulture);
}
}
///
/// Looks up a localized string similar to The trusted-signing command is obsolete. Use the artifact-signing command instead..
///
internal static string TrustedSigningCommandObsolete {
get {
return ResourceManager.GetString("TrustedSigningCommandObsolete", resourceCulture);
}
}
///
/// Looks up a localized string similar to Only Windows x64 is supported at this time. See https://github.com/dotnet/sign/issues/474 regarding Windows x86 support..
///
internal static string x86NotSupported {
get {
return ResourceManager.GetString("x86NotSupported", resourceCulture);
}
}
}
}
================================================
FILE: src/Sign.Cli/Resources.resx
================================================
text/microsoft-resx2.0System.Resources.ResXResourceReader, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089System.Resources.ResXResourceWriter, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089Application name (ClickOnce).Base directory for files. Overrides the current working directory.Use Windows Certificate Store or a local certificate file.Client ID to authenticate to Azure.Client secret to authenticate to Azure.The client secret options are obsolete and should no longer be specified.Sign binaries and containers.Description of the signing certificate.Description URL of the signing certificate.Digest algorithm to hash files with. Allowed values are 'sha256', 'sha384', and 'sha512'.{Locked="sha256", "sha384", "sha512"} are cryptographic hash algorithm names and should not be localized.Path to file containing paths of files to sign or to exclude from signing.File(s) to sign.Invalid value for {0}. The value must be a fully rooted directory path.{NumberedPlaceholder="{0}"} is an option name (e.g.: --base-directory) and should not be localized.Invalid value for {0}. The value must be a SHA-256, SHA-384, or SHA-512 certificate fingerprint (in hexadecimal).{NumberedPlaceholder="{0}"} is the option name (e.g.: --certificate-fingerprint). {Locked="SHA-256", "SHA-384", "SHA-512"} are cryptographic hash algorithms.Invalid value for {0}. The value must be 'sha256', 'sha384', or 'sha512'.{Locked="sha256", "sha384", "sha512"} are cryptographic hash algorithm names and should not be localized. {NumberedPlaceholder="{0}"} is an option name (e.g.: --file-digest) and should not be localized.The file path cannot be rooted when using a glob. Use a path relative to the working directory (or base directory, if used).Invalid value for {0}. The value must be a number value greater than or equal to 1.{NumberedPlaceholder="{0}"} is an option name (e.g.: --max-concurrency) and should not be localized.Invalid value for {0}. The value must be an absolute HTTPS URL.{NumberedPlaceholder="{0}"} is an option name (e.g.: --artifact-signing-endpoint) and should not be localized.Invalid value for {0}. The value must be an absolute HTTP or HTTPS URL.{NumberedPlaceholder="{0}"} is an option name (e.g.: --timestamp-url) and should not be localized.The client id of a user assigned ManagedIdentity.Managed identity to authenticate to Azure Key. (obsolete)The -kvm and --azure-key-vault-managed-identity options are obsolete and should no longer be specified.The resource id of a user assigned ManagedIdentity.Maximum concurrency.A file or glob is required.No inputs found to sign.Output file or directory. If omitted, input files will be overwritten.Publisher name (ClickOnce).ClickOnce is a Microsoft deployment technology.Sign CLISome files do not exist. Try using a different {0} value or a fully qualified file path.{NumberedPlaceholder="{0}"} is an option name (e.g.: --base-directory) and should not be localized.Tenant ID to authenticate to Azure.Digest algorithm for the RFC 3161 timestamp server. Allowed values are sha256, sha384, and sha512.{Locked="RFC 3161"} is an Internet standard (https://www.rfc-editor.org/info/rfc3161), and {Locked="sha256", "sha384", "sha512"} are cryptographic hash algorithm names should not be localized.RFC 3161 timestamp server URL.{Locked="RFC 3161"} is an Internet standard (https://www.rfc-editor.org/info/rfc3161) and should not be localized.Sets the verbosity level. Allowed values are 'none', 'critical', 'error', 'warning', 'information', 'debug', and 'trace'.{Locked="none", "critical", "error", "warning", "information", "debug", "trace"} are option values and should not be localized.Only Windows x64 is supported at this time. See https://github.com/dotnet/sign/issues/474 regarding Windows x86 support.Warning: The Microsoft Visual C++ 14 runtime is required but was not detected on your system. Download and install from https://aka.ms/vs/17/release/vc_redist.x64.exe{Locked="https://aka.ms/vs/17/release/vc_redist.x64.exe"} is a URL.Azure credential type that will be used. This defaults to DefaultAzureCredential.The trusted-signing command is obsolete. Use the artifact-signing command instead.
================================================
FILE: src/Sign.Cli/Sign.Cli.csproj
================================================
signtruetrueExeSign CLIPACKAGE.mdtrueMajorSign.Clifalsesign%WINDIR%\System32\WindowsPowerShell\v1.0\powershell.exe$(RepositoryRootDirectory)\scripts\VerifyNuGetPackage.ps1true\PreserveNewesttrue\true\TrustedSigningResources.resxTrueTrueTrueTrueAzureKeyVaultResources.resxTrueTrueCertificateStoreResources.resxTrueTrueResources.resxTrueTrueArtifactSigningResources.resxTrustedSigningResources.Designer.csResXFileCodeGeneratorResXFileCodeGeneratorAzureKeyVaultResources.Designer.csResXFileCodeGeneratorCertificateStoreResources.Designer.csResXFileCodeGeneratorResources.Designer.csResXFileCodeGeneratorArtifactSigningResources.Designer.cs
================================================
FILE: src/Sign.Cli/SignCommand.cs
================================================
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE.txt file in the project root for more information.
using System.CommandLine;
using Sign.Core;
namespace Sign.Cli
{
internal sealed class SignCommand : RootCommand
{
internal SignCommand(IServiceProviderFactory? serviceProviderFactory = null)
: base(Resources.SignCommandDescription)
{
CodeCommand codeCommand = new();
serviceProviderFactory ??= new ServiceProviderFactory();
Subcommands.Add(codeCommand);
AzureKeyVaultCommand azureKeyVaultCommand = new(
codeCommand,
serviceProviderFactory);
codeCommand.Subcommands.Add(azureKeyVaultCommand);
CertificateStoreCommand certificateStoreCommand = new(
codeCommand,
serviceProviderFactory);
codeCommand.Subcommands.Add(certificateStoreCommand);
TrustedSigningCommand trustedSigningCommand = new(
codeCommand,
serviceProviderFactory);
codeCommand.Subcommands.Add(trustedSigningCommand);
ArtifactSigningCommand artifactSigningCommand = new(
codeCommand,
serviceProviderFactory);
codeCommand.Subcommands.Add(artifactSigningCommand);
}
}
}
================================================
FILE: src/Sign.Cli/StandardStreamWriterExtensions.cs
================================================
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE.txt file in the project root for more information.
using System.Globalization;
namespace Sign.Cli
{
internal static class StandardStreamWriterExtensions
{
internal static void WriteFormattedLine(this TextWriter writer, string format, params object[] options)
{
string[] formattedOptions = options
.Select(option => $"{((dynamic)option).Name}")
.ToArray();
writer.WriteLine(string.Format(CultureInfo.InvariantCulture, format, formattedOptions));
}
}
}
================================================
FILE: src/Sign.Cli/TemporaryConsoleEncoding.cs
================================================
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE.txt file in the project root for more information.
using System.Text;
namespace Sign.Cli
{
internal sealed class TemporaryConsoleEncoding : IDisposable
{
private readonly Encoding _defaultInputEncoding;
private readonly Encoding _defaultOutputEncoding;
internal TemporaryConsoleEncoding()
{
_defaultInputEncoding = Console.InputEncoding;
_defaultOutputEncoding = Console.OutputEncoding;
Console.InputEncoding = Encoding.UTF8;
Console.OutputEncoding = Encoding.UTF8;
}
public void Dispose()
{
Console.InputEncoding = _defaultInputEncoding;
Console.OutputEncoding = _defaultOutputEncoding;
GC.SuppressFinalize(this);
}
}
}
================================================
FILE: src/Sign.Cli/TrustedSigningCommand.cs
================================================
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE.txt file in the project root for more information.
using System.CommandLine;
using Azure.CodeSigning;
using Azure.CodeSigning.Extensions;
using Azure.Core;
using Microsoft.Extensions.Azure;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using Sign.Core;
using Sign.SignatureProviders.ArtifactSigning;
namespace Sign.Cli
{
internal sealed class TrustedSigningCommand : Command
{
internal Option EndpointOption { get; }
internal Option AccountOption { get; }
internal Option CertificateProfileOption { get; }
internal AzureCredentialOptions AzureCredentialOptions { get; } = new();
internal Argument?> FilesArgument { get; }
internal TrustedSigningCommand(CodeCommand codeCommand, IServiceProviderFactory serviceProviderFactory)
: base("trusted-signing", TrustedSigningResources.CommandDescription)
{
ArgumentNullException.ThrowIfNull(codeCommand, nameof(codeCommand));
ArgumentNullException.ThrowIfNull(serviceProviderFactory, nameof(serviceProviderFactory));
EndpointOption = new Option("--trusted-signing-endpoint", "-tse")
{
CustomParser = CodeCommand.ParseHttpsUrl,
Description = TrustedSigningResources.EndpointOptionDescription,
Required = true
};
AccountOption = new Option("--trusted-signing-account", "-tsa")
{
Description = TrustedSigningResources.AccountOptionDescription,
Required = true
};
CertificateProfileOption = new Option("--trusted-signing-certificate-profile", "-tscp")
{
Description = TrustedSigningResources.CertificateProfileOptionDescription,
Required = true
};
FilesArgument = new Argument?>("file(s)")
{
Description = Resources.FilesArgumentDescription,
Arity = ArgumentArity.OneOrMore
};
Options.Add(EndpointOption);
Options.Add(AccountOption);
Options.Add(CertificateProfileOption);
AzureCredentialOptions.AddOptionsToCommand(this);
Arguments.Add(FilesArgument);
SetAction((ParseResult parseResult, CancellationToken cancellationToken) =>
{
Console.Out.WriteLine(Resources.TrustedSigningCommandObsolete);
List? filesArgument = parseResult.GetValue(FilesArgument);
if (filesArgument is not { Count: > 0 })
{
Console.Error.WriteLine(Resources.MissingFileValue);
return Task.FromResult(ExitCode.InvalidOptions);
}
TokenCredential? credential = AzureCredentialOptions.CreateTokenCredential(parseResult);
if (credential is null)
{
return Task.FromResult(ExitCode.Failed);
}
// Some of the options are required and that is why we can safely use
// the null-forgiving operator (!) to simplify the code.
Uri endpointUrl = parseResult.GetValue(EndpointOption)!;
string accountName = parseResult.GetValue(AccountOption)!;
string certificateProfileName = parseResult.GetValue(CertificateProfileOption)!;
serviceProviderFactory.AddServices(services =>
{
services.AddAzureClients(builder =>
{
builder.AddCertificateProfileClient(endpointUrl);
builder.UseCredential(credential);
builder.ConfigureDefaults(options => options.Retry.Mode = RetryMode.Exponential);
});
services.AddSingleton(serviceProvider =>
{
return new ArtifactSigningService(
serviceProvider.GetRequiredService(),
accountName,
certificateProfileName,
serviceProvider.GetRequiredService>());
});
});
ArtifactSigningServiceProvider trustedSigningServiceProvider = new();
return codeCommand.HandleAsync(parseResult, serviceProviderFactory, trustedSigningServiceProvider, filesArgument);
});
}
}
}
================================================
FILE: src/Sign.Cli/TrustedSigningResources.Designer.cs
================================================
//------------------------------------------------------------------------------
//
// This code was generated by a tool.
// Runtime Version:4.0.30319.42000
//
// Changes to this file may cause incorrect behavior and will be lost if
// the code is regenerated.
//
//------------------------------------------------------------------------------
namespace Sign.Cli {
using System;
///
/// A strongly-typed resource class, for looking up localized strings, etc.
///
// This class was auto-generated by the StronglyTypedResourceBuilder
// class via a tool like ResGen or Visual Studio.
// To add or remove a member, edit your .ResX file then rerun ResGen
// with the /str option, or rebuild your VS project.
[global::System.CodeDom.Compiler.GeneratedCodeAttribute("System.Resources.Tools.StronglyTypedResourceBuilder", "17.0.0.0")]
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
[global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
internal class TrustedSigningResources {
private static global::System.Resources.ResourceManager resourceMan;
private static global::System.Globalization.CultureInfo resourceCulture;
[global::System.Diagnostics.CodeAnalysis.SuppressMessageAttribute("Microsoft.Performance", "CA1811:AvoidUncalledPrivateCode")]
internal TrustedSigningResources() {
}
///
/// Returns the cached ResourceManager instance used by this class.
///
[global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)]
internal static global::System.Resources.ResourceManager ResourceManager {
get {
if (object.ReferenceEquals(resourceMan, null)) {
global::System.Resources.ResourceManager temp = new global::System.Resources.ResourceManager("Sign.Cli.TrustedSigningResources", typeof(TrustedSigningResources).Assembly);
resourceMan = temp;
}
return resourceMan;
}
}
///
/// Overrides the current thread's CurrentUICulture property for all
/// resource lookups using this strongly typed resource class.
///
[global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)]
internal static global::System.Globalization.CultureInfo Culture {
get {
return resourceCulture;
}
set {
resourceCulture = value;
}
}
///
/// Looks up a localized string similar to The Trusted Signing Account name..
///
internal static string AccountOptionDescription {
get {
return ResourceManager.GetString("AccountOptionDescription", resourceCulture);
}
}
///
/// Looks up a localized string similar to The Certificate Profile name..
///
internal static string CertificateProfileOptionDescription {
get {
return ResourceManager.GetString("CertificateProfileOptionDescription", resourceCulture);
}
}
///
/// Looks up a localized string similar to Use Trusted Signing. (obsolete, use artifact-signing instead).
///
internal static string CommandDescription {
get {
return ResourceManager.GetString("CommandDescription", resourceCulture);
}
}
///
/// Looks up a localized string similar to The Trusted Signing Account endpoint. The value must be a URI that aligns to the region that your Trusted Signing Account and Certificate Profile were created in..
///
internal static string EndpointOptionDescription {
get {
return ResourceManager.GetString("EndpointOptionDescription", resourceCulture);
}
}
}
}
================================================
FILE: src/Sign.Cli/TrustedSigningResources.resx
================================================
text/microsoft-resx2.0System.Resources.ResXResourceReader, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089System.Resources.ResXResourceWriter, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089The Trusted Signing Account name.The Certificate Profile name.Use Trusted Signing. (obsolete, use artifact-signing instead)The Trusted Signing Account endpoint. The value must be a URI that aligns to the region that your Trusted Signing Account and Certificate Profile were created in.
================================================
FILE: src/Sign.Cli/appsettings.json
================================================
{
"Logging": {
"LogLevel": {
"Azure.Core": "Error",
"Azure.Identity": "Error"
}
}
}
================================================
FILE: src/Sign.Cli/xlf/ArtifactSigningResources.cs.xlf
================================================
The Artifact Signing Account name.Název účtu Artifact SigningThe Certificate Profile name.Název profilu certifikátuUse Artifact Signing.Použijte Artifact Signing.The Artifact Signing Account endpoint. The value must be a URI that aligns to the region that your Artifact Signing Account and Certificate Profile were created in.Koncový bod účtu Artifact Signing. Hodnota musí být identifikátor URI, který odpovídá oblasti, ve které jste vytvořili účet Artifact Signing a profil certifikátu.
================================================
FILE: src/Sign.Cli/xlf/ArtifactSigningResources.de.xlf
================================================
The Artifact Signing Account name.Der Name des Artefaktsignatur-Kontos.The Certificate Profile name.Der Zertifikatprofilname.Use Artifact Signing.Artefaktsignatur verwendenThe Artifact Signing Account endpoint. The value must be a URI that aligns to the region that your Artifact Signing Account and Certificate Profile were created in.Der Endpunkt des Kontos der Artefaktsignatur. Der Wert muss ein URI sein, der der Region entspricht, in der Ihr Konto der Artefaktsignatur und das Zertifikatprofil erstellt wurden.
================================================
FILE: src/Sign.Cli/xlf/ArtifactSigningResources.es.xlf
================================================
The Artifact Signing Account name.Nombre de la cuenta de firma de artefactos.The Certificate Profile name.Nombre del perfil de certificado.Use Artifact Signing.Use la firma de artefactos.The Artifact Signing Account endpoint. The value must be a URI that aligns to the region that your Artifact Signing Account and Certificate Profile were created in.Punto de conexión de la cuenta de firma de artefactos. El valor debe ser un URI que se alinee con la región en la que se crearon la cuenta de firma de artefactos y el perfil de certificado.
================================================
FILE: src/Sign.Cli/xlf/ArtifactSigningResources.fr.xlf
================================================
The Artifact Signing Account name.Nom du compte de Signature d'artefacts.The Certificate Profile name.Nom du profil de certificat.Use Artifact Signing.Utilisez la Signature d'artefacts.The Artifact Signing Account endpoint. The value must be a URI that aligns to the region that your Artifact Signing Account and Certificate Profile were created in.Point de terminaison du compte de Signature d'artefacts. La valeur doit être un URI correspondant à la région dans laquelle votre compte de Signature d'artefacts et votre profil de certificat ont été créés.
================================================
FILE: src/Sign.Cli/xlf/ArtifactSigningResources.it.xlf
================================================
The Artifact Signing Account name.Nome dell'account Firma artefatti.The Certificate Profile name.Nome profilo certificato.Use Artifact Signing.Usare Firma artefatti.The Artifact Signing Account endpoint. The value must be a URI that aligns to the region that your Artifact Signing Account and Certificate Profile were created in.Endpoint dell'account di Firma artefatti. Il valore deve essere un URI allineato all'area in cui sono stati creati l'account Firma artefatti e il profilo del certificato.
================================================
FILE: src/Sign.Cli/xlf/ArtifactSigningResources.ja.xlf
================================================
The Artifact Signing Account name.Artifact Signing アカウント名です。The Certificate Profile name.証明書プロファイル名。Use Artifact Signing.Artifact Signing を使用します。The Artifact Signing Account endpoint. The value must be a URI that aligns to the region that your Artifact Signing Account and Certificate Profile were created in.Artifact Signing アカウントのエンドポイントです。この値は、Artifact Signing アカウントと証明書プロファイルが作成されたリージョンに合った URI である必要があります。
================================================
FILE: src/Sign.Cli/xlf/ArtifactSigningResources.ko.xlf
================================================
The Artifact Signing Account name.Artifact Signing 계정 이름입니다.The Certificate Profile name.인증서 프로필 이름입니다.Use Artifact Signing.Artifact Signing을 사용합니다.The Artifact Signing Account endpoint. The value must be a URI that aligns to the region that your Artifact Signing Account and Certificate Profile were created in.Artifact Signing 계정 엔드포인트입니다. 값은 Artifact Signing 계정 및 인증서 프로필을 만든 지역에 맞는 URI여야 합니다.
================================================
FILE: src/Sign.Cli/xlf/ArtifactSigningResources.pl.xlf
================================================
The Artifact Signing Account name.Nazwa konta podpisywania artefaktu.The Certificate Profile name.Nazwa profilu certyfikatu.Use Artifact Signing.Użyj podpisywania artefaktu.The Artifact Signing Account endpoint. The value must be a URI that aligns to the region that your Artifact Signing Account and Certificate Profile were created in.Punkt końcowy konta podpisywania artefaktu. Wartość musi być identyfikatorem URI, który jest zgodny z regionem, w ramach którego utworzono konto podpisywania artefaktu i profil certyfikatu.
================================================
FILE: src/Sign.Cli/xlf/ArtifactSigningResources.pt-BR.xlf
================================================
The Artifact Signing Account name.O nome da Conta de Assinatura de Artefatos.The Certificate Profile name.O nome do Perfil de Certificado.Use Artifact Signing.Use a Assinatura de Artefatos.The Artifact Signing Account endpoint. The value must be a URI that aligns to the region that your Artifact Signing Account and Certificate Profile were created in.O ponto de extremidade da Conta de Assinatura de Artefatos. O valor deve ser um URI que se alinhe à região em que sua Conta de Assinatura de Artefatos e o Perfil de Certificado foram criados.
================================================
FILE: src/Sign.Cli/xlf/ArtifactSigningResources.ru.xlf
================================================
The Artifact Signing Account name.Имя учетной записи для подписания артефактов.The Certificate Profile name.Имя профиля сертификата.Use Artifact Signing.Использование подписания артефактов.The Artifact Signing Account endpoint. The value must be a URI that aligns to the region that your Artifact Signing Account and Certificate Profile were created in.Конечная точка учетной записи для подписания артефактов. Значение — универсальный код ресурса (URI), соответствующий региону, в котором созданы учетная запись для подписания артефактов и профиль сертификата.
================================================
FILE: src/Sign.Cli/xlf/ArtifactSigningResources.tr.xlf
================================================
The Artifact Signing Account name.Yapıt İmzalama Hesabı adı.The Certificate Profile name.Sertifika Profili adı.Use Artifact Signing.Yapıt İmzalama'yı kullanın.The Artifact Signing Account endpoint. The value must be a URI that aligns to the region that your Artifact Signing Account and Certificate Profile were created in.Yapıt İmzalama Hesabı uç noktası. Değer, Yapıt İmzalama Hesabınızın ve Sertifika Profilinizin oluşturulduğu bölgeyle uyumlu bir URI olmalıdır.
================================================
FILE: src/Sign.Cli/xlf/ArtifactSigningResources.zh-Hans.xlf
================================================
The Artifact Signing Account name.工件签名帐户名。The Certificate Profile name.证书配置文件名称。Use Artifact Signing.使用工件签名。The Artifact Signing Account endpoint. The value must be a URI that aligns to the region that your Artifact Signing Account and Certificate Profile were created in.工件签名帐户终结点。该值必须是与创建工件签名帐户和证书配置文件的区域相对应的 URI。
================================================
FILE: src/Sign.Cli/xlf/ArtifactSigningResources.zh-Hant.xlf
================================================
The Artifact Signing Account name.Artifact Signing 帳戶名稱。The Certificate Profile name.憑證設定檔名稱。Use Artifact Signing.使用 Artifact Signing。The Artifact Signing Account endpoint. The value must be a URI that aligns to the region that your Artifact Signing Account and Certificate Profile were created in.Artifact Signing 帳戶端點。值必須是 URI,且與您的 Artifact Signing 帳戶和憑證設定檔建立區域一致。
================================================
FILE: src/Sign.Cli/xlf/AzureKeyVaultResources.cs.xlf
================================================
Name of the certificate in Azure Key Vault.Název certifikátu v Azure Key Vault.ClickOnce signing via the legacy .clickonce ZIP workaround is no longer supported. See documentation.Podepisování ClickOnce prostřednictvím starší verze alternativního řešení .clickonce ZIP se už nepodporuje. Projděte si dokumentaci.Use Azure Key Vault.Použijte Azure Key Vault.URL must only contain the protocol and host. (e.g.: https://<vault-name>.vault.azure.net/)Adresa URL musí obsahovat pouze protokol a hostitele. (např. https://<vault-name>.vault.azure.net/)URL must be an absolute HTTPS URL to an Azure Key Vault.Adresa URL musí být absolutní adresa URL protokolu HTTPS pro Azure Key Vault.URL to an Azure Key Vault.Adresa URL Azure Key Vault.
================================================
FILE: src/Sign.Cli/xlf/AzureKeyVaultResources.de.xlf
================================================
Name of the certificate in Azure Key Vault.Name des Zertifikats in Azure Key Vault.ClickOnce signing via the legacy .clickonce ZIP workaround is no longer supported. See documentation.Die ClickOnce-Signierung über die Legacy-.clickonce-ZIP-Problemumgehung wird nicht mehr unterstützt. Siehe Dokumentation.Use Azure Key Vault.Verwenden Sie Azure Key Vault.URL must only contain the protocol and host. (e.g.: https://<vault-name>.vault.azure.net/)Die URL darf nur das Protokoll und den Host enthalten. (Beispiel: https://<vault-name>.vault.azure.net/)URL must be an absolute HTTPS URL to an Azure Key Vault.Die URL muss eine absolute HTTPS-URL zu einem Azure Key Vault sein.URL to an Azure Key Vault.URL zu einem Azure Key Vault.
================================================
FILE: src/Sign.Cli/xlf/AzureKeyVaultResources.es.xlf
================================================
Name of the certificate in Azure Key Vault.Nombre del certificado en Azure Key Vault.ClickOnce signing via the legacy .clickonce ZIP workaround is no longer supported. See documentation.Ya no se admite la firma ClickOnce a través de la solución zip heredada .clickonce. Consulte la documentación.Use Azure Key Vault.Use Azure Key Vault.URL must only contain the protocol and host. (e.g.: https://<vault-name>.vault.azure.net/)La dirección URL solo debe contener el protocolo y el host. (por ejemplo: https://<vault-name>.vault.azure.net/)URL must be an absolute HTTPS URL to an Azure Key Vault.La dirección URL debe ser una dirección URL HTTPS absoluta a un Azure Key Vault.URL to an Azure Key Vault.Dirección URL a un Azure Key Vault.
================================================
FILE: src/Sign.Cli/xlf/AzureKeyVaultResources.fr.xlf
================================================
Name of the certificate in Azure Key Vault.Nom du certificat dans Azure Key Vault.ClickOnce signing via the legacy .clickonce ZIP workaround is no longer supported. See documentation.La signature ClickOnce via la solution de contournement zip .clickonce héritée n’est plus prise en charge. Consulter la documentation.Use Azure Key Vault.Utilisez Azure Key Vault.URL must only contain the protocol and host. (e.g.: https://<vault-name>.vault.azure.net/)L’URL doit contenir uniquement le protocole et l’hôte. (par exemple : https://<vault-name>.vault.azure.net/)URL must be an absolute HTTPS URL to an Azure Key Vault.L’URL doit être une URL HTTPS absolue vers un coffre-fort Azure.URL to an Azure Key Vault.URL d’un Azure Key Vault.
================================================
FILE: src/Sign.Cli/xlf/AzureKeyVaultResources.it.xlf
================================================
Name of the certificate in Azure Key Vault.Nome del certificato in Azure Key Vault.ClickOnce signing via the legacy .clickonce ZIP workaround is no longer supported. See documentation.La firma ClickOnce tramite la soluzione alternativa legacy .clickonce ZIP non è più supportata. Vedere la documentazione.Use Azure Key Vault.Usare Azure Key Vault.URL must only contain the protocol and host. (e.g.: https://<vault-name>.vault.azure.net/)L'URL deve contenere solo il protocollo e l'host. (ad esempio: https://<vault-name>.vault.azure.net/)URL must be an absolute HTTPS URL to an Azure Key Vault.L'URL deve essere un URL HTTPS assoluto per Azure Key Vault.URL to an Azure Key Vault.URL a un Azure Key Vault.
================================================
FILE: src/Sign.Cli/xlf/AzureKeyVaultResources.ja.xlf
================================================
Name of the certificate in Azure Key Vault.Azure Key Vault 内の証明書の名前。ClickOnce signing via the legacy .clickonce ZIP workaround is no longer supported. See documentation.従来の .clickonce ZIP 回避策による ClickOnce 署名はサポートされなくなりました。ドキュメントを参照してください。Use Azure Key Vault.Azure Key Vault を使用してください。URL must only contain the protocol and host. (e.g.: https://<vault-name>.vault.azure.net/)URL にはプロトコルとホストのみを含めなければなりません。(例: https://<vault-name>.vault.azure.net/)URL must be an absolute HTTPS URL to an Azure Key Vault.URL は、Azure Key Vault への絶対 HTTPS URL である必要があります。URL to an Azure Key Vault.Azure Key Vault への URL。
================================================
FILE: src/Sign.Cli/xlf/AzureKeyVaultResources.ko.xlf
================================================
Name of the certificate in Azure Key Vault.Azure Key Vault의 인증서 이름입니다.ClickOnce signing via the legacy .clickonce ZIP workaround is no longer supported. See documentation.레거시 .clickonce ZIP 해결 방법을 통한 ClickOnce 서명은 더 이상 지원되지 않습니다. 설명서를 참조하세요.Use Azure Key Vault.Azure Key Vault를 사용합니다.URL must only contain the protocol and host. (e.g.: https://<vault-name>.vault.azure.net/)URL은 프로토콜과 호스트만 포함해야 합니다. (예: https://<vault-name>.vault.azure.net/)URL must be an absolute HTTPS URL to an Azure Key Vault.URL은 Azure Key Vault의 절대 HTTPS URL이어야 합니다.URL to an Azure Key Vault.Azure Key Vault에 대한 URL입니다.
================================================
FILE: src/Sign.Cli/xlf/AzureKeyVaultResources.pl.xlf
================================================
Name of the certificate in Azure Key Vault.Nazwa certyfikatu w usłudze Azure Key Vault.ClickOnce signing via the legacy .clickonce ZIP workaround is no longer supported. See documentation.Podpisywanie clickOnce za pośrednictwem starszego obejścia .clickonce ZIP nie jest już obsługiwane. Zobacz dokumentację.Use Azure Key Vault.Użyj usługi Azure Key Vault.URL must only contain the protocol and host. (e.g.: https://<vault-name>.vault.azure.net/)Adres URL może zawierać tylko protokół i hosta. (np. https://<vault-name>.vault.azure.net/)URL must be an absolute HTTPS URL to an Azure Key Vault.Adres URL musi być bezwzględnym adresem URL HTTPS do usługi Azure Key Vault.URL to an Azure Key Vault.Adres URL do usługi Azure Key Vault.
================================================
FILE: src/Sign.Cli/xlf/AzureKeyVaultResources.pt-BR.xlf
================================================
Name of the certificate in Azure Key Vault.Nome do certificado no Azure Key Vault.ClickOnce signing via the legacy .clickonce ZIP workaround is no longer supported. See documentation.Não há mais suporte para a assinatura do ClickOnce por meio da solução alternativa .clickonce ZIP herdada. Consulte a documentação.Use Azure Key Vault.Use o Azure Key Vault.URL must only contain the protocol and host. (e.g.: https://<vault-name>.vault.azure.net/)A URL deve conter somente o protocolo e o host. (por exemplo: https://<vault-name>.vault.azure.net/)URL must be an absolute HTTPS URL to an Azure Key Vault.A URL deve ser uma URL HTTPS absoluta para um Azure Key Vault.URL to an Azure Key Vault.URL para um Azure Key Vault.
================================================
FILE: src/Sign.Cli/xlf/AzureKeyVaultResources.ru.xlf
================================================
Name of the certificate in Azure Key Vault.Имя сертификата в Azure Key Vault.ClickOnce signing via the legacy .clickonce ZIP workaround is no longer supported. See documentation.Подписание ClickOnce с помощью устаревшего обходного пути .clickonce ZIP больше не поддерживается. См. документацию.Use Azure Key Vault.Использование Azure Key Vault.URL must only contain the protocol and host. (e.g.: https://<vault-name>.vault.azure.net/)URL-адрес должен содержать только протокол и хост. (например, https://<vault-name>.vault.azure.net/)URL must be an absolute HTTPS URL to an Azure Key Vault.URL-адрес должен быть абсолютным URL-адресом HTTPS для Azure Key Vault.URL to an Azure Key Vault.URL-адрес для Azure Key Vault.
================================================
FILE: src/Sign.Cli/xlf/AzureKeyVaultResources.tr.xlf
================================================
Name of the certificate in Azure Key Vault.Azure Key Vault’taki sertifikanın adı.ClickOnce signing via the legacy .clickonce ZIP workaround is no longer supported. See documentation.Eski .clickonce ZIP geçici çözümü aracılığıyla ClickOnce imzalama işlemi artık desteklenmiyor. Belgelere bakın.Use Azure Key Vault.Azure Key Vault’u kullanın.URL must only contain the protocol and host. (e.g.: https://<vault-name>.vault.azure.net/)URL yalnızca protokolü ve ana bilgisayarı içermeli. (ör. https://<vault-name>.vault.azure.net/)URL must be an absolute HTTPS URL to an Azure Key Vault.URL, bir Azure Key Vault'un mutlak HTTPS URL'si olmalıdır.URL to an Azure Key Vault.Azure Key Vault URL’si.
================================================
FILE: src/Sign.Cli/xlf/AzureKeyVaultResources.zh-Hans.xlf
================================================
Name of the certificate in Azure Key Vault.Azure Key Vault 中的证书名称。ClickOnce signing via the legacy .clickonce ZIP workaround is no longer supported. See documentation.不再支持通过旧版 .clickonce ZIP 解决方法进行 ClickOnce 签名。参阅文档。Use Azure Key Vault.使用 Azure Key Vault。URL must only contain the protocol and host. (e.g.: https://<vault-name>.vault.azure.net/)URL 只能包含协议和主机。(,例如: https://<vault-name>.vault.azure.net/)URL must be an absolute HTTPS URL to an Azure Key Vault.URL 必须是指向 Azure Key Vault 的绝对 HTTPS URL。URL to an Azure Key Vault.指向 Azure Key Vault 的 URL。
================================================
FILE: src/Sign.Cli/xlf/AzureKeyVaultResources.zh-Hant.xlf
================================================
Name of the certificate in Azure Key Vault.Azure Key Vault 中的憑證名稱。ClickOnce signing via the legacy .clickonce ZIP workaround is no longer supported. See documentation.已不再支援透過舊版 .clickonce ZIP 因應措施進行 ClickOnce 簽署。請參閱文件。Use Azure Key Vault.使用 Azure Key Vault。URL must only contain the protocol and host. (e.g.: https://<vault-name>.vault.azure.net/)URL 只能包含通訊協定和主機。(例如: https://<vault-name>.vault.azure.net/)URL must be an absolute HTTPS URL to an Azure Key Vault.URL 必須是指向 Azure Key Vault 的絕對 HTTPS URL。URL to an Azure Key Vault.Azure Key Vault 的 URL。
================================================
FILE: src/Sign.Cli/xlf/CertManagerResources.cs.xlf
================================================
Cryptographic Service Provider containing the private key container. Requires /k or /kmZprostředkovatel kryptografických služeb obsahující kontejner privátního klíče. Vyžaduje se /k nebo /km.Private key container in the user storeKontejner privátního klíče v úložišti uživatele.Private key container in the machine storeKontejner privátního klíče v úložišti počítače.Multiple private key containers provided. Use either /k for user stores or /km for machine storesPoskytlo se několik kontejnerů privátních klíčů. Pro úložiště uživatele použijte /k a pro úložiště počítače použijte /km.Private key container missing while using /csp. Use /k or /km to provide a key container.Při použití /csp chybí kontejner privátního klíče. K poskytnutí kontejneru klíčů použijte /k nebo /km.SHA1 thumprint used to access a certificate from a certificate store (VSIX)Kryptografický otisk SHA1 použitý pro přístup k certifikátu z úložiště certifikátů (VSIX).
================================================
FILE: src/Sign.Cli/xlf/CertManagerResources.de.xlf
================================================
Cryptographic Service Provider containing the private key container. Requires /k or /kmKryptografiedienstanbieter, der den privaten Schlüsselcontainer enthält. Erfordert /k oder /kmPrivate key container in the user storeContainer mit privatem Schlüssel im BenutzerspeicherPrivate key container in the machine storeContainer mit privatem Schlüssel im ComputerspeicherMultiple private key containers provided. Use either /k for user stores or /km for machine storesEs wurden mehrere Container mit privatem Schlüssel bereitgestellt. Verwenden Sie entweder "/k" für Benutzerspeicher oder "/km" für Computerspeicher.Private key container missing while using /csp. Use /k or /km to provide a key container.Der Container für den privaten Schlüssel fehlt bei Verwendung von "/csp". Verwenden Sie "/k" oder "/km", um einen Schlüsselcontainer bereitzustellen.SHA1 thumprint used to access a certificate from a certificate store (VSIX)SHA1-Fingerabdruck für den Zugriff auf ein Zertifikat aus einem Zertifikatspeicher (VSIX)
================================================
FILE: src/Sign.Cli/xlf/CertManagerResources.es.xlf
================================================
Cryptographic Service Provider containing the private key container. Requires /k or /kmProveedor de servicios criptográficos que contiene el contenedor de claves privadas. Requiere /k o /kmPrivate key container in the user storeContenedor de claves privadas en el almacén de usuariosPrivate key container in the machine storeContenedor de claves privadas en el almacén de máquinasMultiple private key containers provided. Use either /k for user stores or /km for machine storesSe proporcionaron varios contenedores de clave privada. Use /k para almacenes de usuarios o /km para almacenes de máquinasPrivate key container missing while using /csp. Use /k or /km to provide a key container.Falta el contenedor de clave privada al usar /csp. Use /k o /km para proporcionar un contenedor de claves.SHA1 thumprint used to access a certificate from a certificate store (VSIX)Huella digital SHA1 usada para obtener acceso a un certificado desde un almacén de certificados (VSIX)
================================================
FILE: src/Sign.Cli/xlf/CertManagerResources.fr.xlf
================================================
Cryptographic Service Provider containing the private key container. Requires /k or /kmFournisseur de services de chiffrement contenant le conteneur de clé privée. Nécessite /k ou /kmPrivate key container in the user storeConteneur de clés privées dans le magasin d’utilisateursPrivate key container in the machine storeConteneur de clés privées dans le magasin d’ordinateursMultiple private key containers provided. Use either /k for user stores or /km for machine storesPlusieurs conteneurs de clé privée ont été fournis. Utiliser /k pour les magasins d’utilisateurs ou /km pour les magasins d’ordinateursPrivate key container missing while using /csp. Use /k or /km to provide a key container.Le conteneur de clé privée est manquant lors de l’utilisation de /csp. Utilisez /k ou /km pour fournir un conteneur de clé.SHA1 thumprint used to access a certificate from a certificate store (VSIX)Empreinte SHA-1 utilisée pour accéder à un certificat à partir d’un magasin de certificats (VSIX)
================================================
FILE: src/Sign.Cli/xlf/CertManagerResources.it.xlf
================================================
Cryptographic Service Provider containing the private key container. Requires /k or /kmProvider del servizio di crittografia contenente il contenitore di chiavi private. Richiede /k o /kmPrivate key container in the user storeContenitore di chiavi private nell'archivio utentiPrivate key container in the machine storeContenitore di chiavi private nell'archivio computerMultiple private key containers provided. Use either /k for user stores or /km for machine storesSono stati specificati più contenitori di chiavi private. Utilizzare /k per gli archivi utente o /km per gli archivi computerPrivate key container missing while using /csp. Use /k or /km to provide a key container.Contenitore di chiavi private mancante durante l'utilizzo di /csp. Usare /k o /km per specificare un contenitore di chiavi.SHA1 thumprint used to access a certificate from a certificate store (VSIX)Identificazione personale SHA1 usata per accedere a un certificato da un archivio certificati (VSIX)
================================================
FILE: src/Sign.Cli/xlf/CertManagerResources.ja.xlf
================================================
Cryptographic Service Provider containing the private key container. Requires /k or /km秘密キー コンテナーを含む暗号化サービス プロバイダー。/k または /km が必要ですPrivate key container in the user storeユーザー ストア内の秘密キー コンテナーPrivate key container in the machine storeコンピューター ストア内の秘密キー コンテナーMultiple private key containers provided. Use either /k for user stores or /km for machine stores複数の秘密キー コンテナーが提供されています。ユーザー ストアには /k、コンピューター ストアの場合は /km を使用しますPrivate key container missing while using /csp. Use /k or /km to provide a key container./csp の使用中に秘密キー コンテナーが見つかりません。キー コンテナーを指定するには、/k または /km を使用します。SHA1 thumprint used to access a certificate from a certificate store (VSIX)証明書ストア (VSIX) から証明書にアクセスするために使用される SHA1 拇印
================================================
FILE: src/Sign.Cli/xlf/CertManagerResources.ko.xlf
================================================
Cryptographic Service Provider containing the private key container. Requires /k or /km프라이빗 키 컨테이너를 포함하는 암호화 서비스 공급자입니다. /k 또는 /km 필요Private key container in the user store사용자 저장소의 프라이빗 키 컨테이너Private key container in the machine store컴퓨터 저장소의 프라이빗 키 컨테이너Multiple private key containers provided. Use either /k for user stores or /km for machine stores여러 프라이빗 키 컨테이너가 제공되었습니다. 사용자 저장소에는 /k를, 컴퓨터 저장소에는 /km 사용Private key container missing while using /csp. Use /k or /km to provide a key container./csp를 사용하는 동안 프라이빗 키 컨테이너가 없습니다. /k 또는 /km를 사용하여 키 컨테이너를 제공합니다.SHA1 thumprint used to access a certificate from a certificate store (VSIX)VSIX(인증서 저장소)에서 인증서에 액세스하는 데 사용되는 SHA1 지문
================================================
FILE: src/Sign.Cli/xlf/CertManagerResources.pl.xlf
================================================
Cryptographic Service Provider containing the private key container. Requires /k or /kmDostawca usług kryptograficznych zawierający kontener kluczy prywatnych. Wymaga opcji /k lub /kmPrivate key container in the user storeKontener kluczy prywatnych w magazynie użytkownikówPrivate key container in the machine storeKontener kluczy prywatnych w magazynie komputerówMultiple private key containers provided. Use either /k for user stores or /km for machine storesPodano wiele kontenerów kluczy prywatnych. Użyj opcji /k dla magazynów użytkowników lub opcji /km dla magazynów komputerówPrivate key container missing while using /csp. Use /k or /km to provide a key container.Brak kontenera kluczy prywatnych podczas używania opcji /csp. Użyj opcji /k lub /km, aby podać kontener kluczy.SHA1 thumprint used to access a certificate from a certificate store (VSIX)Odcisk palca SHA1 używany do uzyskiwania dostępu do certyfikatu z magazynu certyfikatów (VSIX)
================================================
FILE: src/Sign.Cli/xlf/CertManagerResources.pt-BR.xlf
================================================
Cryptographic Service Provider containing the private key container. Requires /k or /kmProvedor de Serviços Criptográficos que contém o contêiner de chave privada. Requer /k ou /kmPrivate key container in the user storeContêiner de chave privada no repositório do usuárioPrivate key container in the machine storeContêiner de chave privada no repositório do computadorMultiple private key containers provided. Use either /k for user stores or /km for machine storesVários contêineres de chave privada fornecidos. Usar /k para repositórios de usuários ou /km para repositórios de computadoresPrivate key container missing while using /csp. Use /k or /km to provide a key container.Contêiner de chave privada ausente ao usar /csp. Use /k ou /km para fornecer um contêiner de chave.SHA1 thumprint used to access a certificate from a certificate store (VSIX)Impressão digital SHA1 usada para acessar um certificado de um repositório de certificados (VSIX)
================================================
FILE: src/Sign.Cli/xlf/CertManagerResources.ru.xlf
================================================
Cryptographic Service Provider containing the private key container. Requires /k or /kmПоставщик служб шифрования, содержащий контейнер закрытого ключа. Требуется /k или /kmPrivate key container in the user storeКонтейнер закрытого ключа в хранилище пользователяPrivate key container in the machine storeКонтейнер закрытого ключа в хранилище компьютераMultiple private key containers provided. Use either /k for user stores or /km for machine storesПредоставлено несколько контейнеров закрытых ключей. Используйте /k для хранилищ пользователей или /km для хранилищ компьютеровPrivate key container missing while using /csp. Use /k or /km to provide a key container.Отсутствует контейнер закрытого ключа при использовании /csp. Используйте /k или /km для предоставления контейнера ключей.SHA1 thumprint used to access a certificate from a certificate store (VSIX)Отпечаток SHA1, используемый для доступа к сертификату из хранилища сертификатов (VSIX)
================================================
FILE: src/Sign.Cli/xlf/CertManagerResources.tr.xlf
================================================
Cryptographic Service Provider containing the private key container. Requires /k or /kmÖzel anahtar kapsayıcısını içeren Şifreleme Hizmeti Sağlayıcısı. /k veya /km gerektirirPrivate key container in the user storeKullanıcı deposundaki özel anahtar kapsayıcısıPrivate key container in the machine storeMakine deposundaki özel anahtar kapsayıcısıMultiple private key containers provided. Use either /k for user stores or /km for machine storesBirden çok özel anahtar kapsayıcısı sağlandı. Kullanıcı depoları için /k veya makine depoları için /km kullanınPrivate key container missing while using /csp. Use /k or /km to provide a key container./csp kullanılırken özel anahtar kapsayıcısı eksik. Anahtar kapsayıcısı sağlamak için /k veya /km kullanın.SHA1 thumprint used to access a certificate from a certificate store (VSIX)Sertifika deposundan (VSIX) bir sertifikaya erişmek için SHA1 parmak izi kullanıldı
================================================
FILE: src/Sign.Cli/xlf/CertManagerResources.zh-Hans.xlf
================================================
Cryptographic Service Provider containing the private key container. Requires /k or /km包含私钥容器的加密服务提供程序。需要 /k 或 /kmPrivate key container in the user store用户存储中的私钥容器Private key container in the machine store计算机存储中的私钥容器Multiple private key containers provided. Use either /k for user stores or /km for machine stores提供了多个私钥容器。对用户存储使用 /k,或者对计算机存储使用 /kmPrivate key container missing while using /csp. Use /k or /km to provide a key container.使用 /csp 时缺少私钥容器。使用 /k 或 /km 提供密钥容器。SHA1 thumprint used to access a certificate from a certificate store (VSIX)用于访问证书存储(VSIX)中的证书的 SHA1 直纹
================================================
FILE: src/Sign.Cli/xlf/CertManagerResources.zh-Hant.xlf
================================================
Cryptographic Service Provider containing the private key container. Requires /k or /km包含私密金鑰容器的加密服務提供者。需要 /k 或 /kmPrivate key container in the user store使用者存放區中的私密金鑰Private key container in the machine store機器存放區中的私密金鑰Multiple private key containers provided. Use either /k for user stores or /km for machine stores已提供多個私密金鑰。使用者存放區使用 /k,機器存放區則使用 /kmPrivate key container missing while using /csp. Use /k or /km to provide a key container.使用 /csp 時遺漏私密金鑰容器。使用 /k 或 /km 來提供金鑰容器。SHA1 thumprint used to access a certificate from a certificate store (VSIX)用以從憑證存放區 (VSIX) 存取憑證的 SHA1 指紋
================================================
FILE: src/Sign.Cli/xlf/CertificateStoreResources.cs.xlf
================================================
PFX, P7B, or CER file containing a certificate and potentially a private key.Soubor PFX, P7B nebo CER obsahující certifikát a potenciálně privátní klíč.{Locked="PFX", "P7B", "CER"} are file extensions.SHA fingerprint used to identify a certificate.Otisk SHA sloužící k identifikaci certifikátu{Locked="SHA"} is a cryptographic algorithm.Password for certificate file.Heslo pro soubor certifikátu.Sign container contents.Podepsat obsah kontejneruCryptographic Service Provider containing the private key container. Requires /k and optionally /km.Zprostředkovatel kryptografických služeb obsahující kontejner privátního klíče. Vyžaduje /k a volitelně /km.{Locked="/k", "/km"} are command line options.Allow user interactions (such as a dialog box) when a private key is accessed.Povolit interakce uživatelů (například dialogové okno) při přístupu k privátnímu klíči.Private key container name.Název kontejneru privátního klíče.Cryptographic Service Provider missing. Use /csp to specify a CSP.Chybí zprostředkovatel kryptografických služeb. K určení zprostředkovatele kryptografických služeb použijte /csp.{Locked="/csp"} is a command line option.Private key container name missing. Use /k to specify a key container name.Chybí název kontejneru privátního klíče. Název kontejneru klíče zadejte pomocí /k.{Locked="/k"} is a command line option.Use a machine-level private key container. (The default is user-level.)Použijte kontejner privátního klíče na úrovni počítače. (Výchozí hodnota je na úrovni uživatele.)
================================================
FILE: src/Sign.Cli/xlf/CertificateStoreResources.de.xlf
================================================
PFX, P7B, or CER file containing a certificate and potentially a private key.PFX-, P7B- oder CER-Datei, die ein Zertifikat und möglicherweise einen privaten Schlüssel enthält.{Locked="PFX", "P7B", "CER"} are file extensions.SHA fingerprint used to identify a certificate.SHA-Fingerabdruck zum Identifizieren eines Zertifikats.{Locked="SHA"} is a cryptographic algorithm.Password for certificate file.Kennwort für Zertifikatdatei.Sign container contents.Inhalt des Containers signieren.Cryptographic Service Provider containing the private key container. Requires /k and optionally /km.Kryptografiedienstanbieter, der den privaten Schlüsselcontainer enthält. Erfordert /k und optional /km.{Locked="/k", "/km"} are command line options.Allow user interactions (such as a dialog box) when a private key is accessed.Benutzerinteraktionen (z. B. ein Dialogfeld) zulassen, wenn auf einen privaten Schlüssel zugegriffen wird.Private key container name.Name des privaten Schlüsselcontainers.Cryptographic Service Provider missing. Use /csp to specify a CSP.Kryptografiedienstanbieter fehlt. Verwenden Sie "/csp", um einen CSP anzugeben.{Locked="/csp"} is a command line option.Private key container name missing. Use /k to specify a key container name.Der Name des privaten Schlüsselcontainers fehlt. Verwenden Sie /k, um einen Schlüsselcontainernamen anzugeben.{Locked="/k"} is a command line option.Use a machine-level private key container. (The default is user-level.)Verwenden Sie einen Container mit privatem Schlüssel auf Computerebene. (Der Standardwert ist Benutzerebene.)
================================================
FILE: src/Sign.Cli/xlf/CertificateStoreResources.es.xlf
================================================
PFX, P7B, or CER file containing a certificate and potentially a private key.Archivo PFX, P7B o CER que contiene un certificado y una clave privada potencialmente.{Locked="PFX", "P7B", "CER"} are file extensions.SHA fingerprint used to identify a certificate.Huella digital SHA usada para identificar un certificado.{Locked="SHA"} is a cryptographic algorithm.Password for certificate file.Contraseña del archivo de certificado.Sign container contents.Firmar el contenido del contenedor.Cryptographic Service Provider containing the private key container. Requires /k and optionally /km.Proveedor de servicios criptográficos que contiene el contenedor de claves privadas. Requiere /k y, opcionalmente, /km.{Locked="/k", "/km"} are command line options.Allow user interactions (such as a dialog box) when a private key is accessed.Permitir interacciones de usuario (como un cuadro de diálogo) cuando se tiene acceso a una clave privada.Private key container name.Nombre de contenedor de clave privada.Cryptographic Service Provider missing. Use /csp to specify a CSP.Falta el proveedor de servicios criptográficos. Use /csp para especificar un CSP.{Locked="/csp"} is a command line option.Private key container name missing. Use /k to specify a key container name.Falta el nombre de contenedor de claves privadas. Use /k para especificar un nombre de contenedor de claves.{Locked="/k"} is a command line option.Use a machine-level private key container. (The default is user-level.)Use un contenedor de claves privadas de nivel de máquina. (El valor predeterminado es de nivel de usuario).
================================================
FILE: src/Sign.Cli/xlf/CertificateStoreResources.fr.xlf
================================================
PFX, P7B, or CER file containing a certificate and potentially a private key.Fichier PFX, P7B ou CER contenant un certificat et éventuellement une clé privée.{Locked="PFX", "P7B", "CER"} are file extensions.SHA fingerprint used to identify a certificate.Empreinte digitale SHA utilisée pour identifier un certificat.{Locked="SHA"} is a cryptographic algorithm.Password for certificate file.Mot de passe du fichier du certificat.Sign container contents.Signer le contenu du conteneur.Cryptographic Service Provider containing the private key container. Requires /k and optionally /km.Fournisseur de services de chiffrement contenant le conteneur de clé privée. Nécessite /k et éventuellement /km.{Locked="/k", "/km"} are command line options.Allow user interactions (such as a dialog box) when a private key is accessed.Autorisez les interactions utilisateur (par exemple, une boîte de dialogue) lorsqu’une clé privée est accessible.Private key container name.Nom de conteneur de clé privée.Cryptographic Service Provider missing. Use /csp to specify a CSP.Le fournisseur de services de chiffrement est manquant. Utilisez /csp pour spécifier un fournisseur de solutions Cloud.{Locked="/csp"} is a command line option.Private key container name missing. Use /k to specify a key container name.Le nom de conteneur de clé privée est manquant. Utilisez /k pour spécifier un nom de conteneur de clé.{Locked="/k"} is a command line option.Use a machine-level private key container. (The default is user-level.)Utilisez un conteneur de clé privée au niveau de l’ordinateur. (La valeur par défaut est au niveau de l’utilisateur.)
================================================
FILE: src/Sign.Cli/xlf/CertificateStoreResources.it.xlf
================================================
PFX, P7B, or CER file containing a certificate and potentially a private key.PFX, P7B o CER file contenente un certificato e potenzialmente una chiave privata.{Locked="PFX", "P7B", "CER"} are file extensions.SHA fingerprint used to identify a certificate.Impronta digitale SHA usata per identificare un certificato.{Locked="SHA"} is a cryptographic algorithm.Password for certificate file.Password per file certificato.Sign container contents.Contenuti contenitore firme.Cryptographic Service Provider containing the private key container. Requires /k and optionally /km.Provider del servizio di crittografia contenente il contenitore di chiavi private. Richiede /k e facoltativamente /km.{Locked="/k", "/km"} are command line options.Allow user interactions (such as a dialog box) when a private key is accessed.Consenti interazioni utente (ad esempio una finestra di dialogo) quando si accede a una chiave privata.Private key container name.Nome del contenitore di chiavi private.Cryptographic Service Provider missing. Use /csp to specify a CSP.Provider del servizio di crittografia mancante. Utilizzare /csp per specificare un CSP.{Locked="/csp"} is a command line option.Private key container name missing. Use /k to specify a key container name.Nome del contenitore di chiavi private mancante. Usare /k per specificare il nome di un contenitore di chiavi.{Locked="/k"} is a command line option.Use a machine-level private key container. (The default is user-level.)Usare un contenitore di chiavi private a livello di computer. (Il valore predefinito è a livello di utente).
================================================
FILE: src/Sign.Cli/xlf/CertificateStoreResources.ja.xlf
================================================
PFX, P7B, or CER file containing a certificate and potentially a private key.証明書と潜在的に秘密キーを含む PFX、P7B、または CER ファイル。{Locked="PFX", "P7B", "CER"} are file extensions.SHA fingerprint used to identify a certificate.証明書の識別に使用される SHA フィンガープリント。{Locked="SHA"} is a cryptographic algorithm.Password for certificate file.証明書ファイルのパスワード。Sign container contents.コンテナーの内容に署名します。Cryptographic Service Provider containing the private key container. Requires /k and optionally /km.秘密キー コンテナーを含む暗号化サービス プロバイダー。/k および必要に応じて /km が必要です。{Locked="/k", "/km"} are command line options.Allow user interactions (such as a dialog box) when a private key is accessed.秘密キーにアクセスするときにユーザーの操作 (ダイアログ ボックスなど) を許可します。Private key container name.秘密キー コンテナー名。Cryptographic Service Provider missing. Use /csp to specify a CSP.暗号化サービス プロバイダーがありません。/csp を使用して CSP を指定します。{Locked="/csp"} is a command line option.Private key container name missing. Use /k to specify a key container name.秘密キー コンテナー名がありません。キー コンテナー名を指定するには、/k を使用します。{Locked="/k"} is a command line option.Use a machine-level private key container. (The default is user-level.)コンピューター レベルの秘密キー コンテナーを使用します。(既定値はユーザー レベルです)。
================================================
FILE: src/Sign.Cli/xlf/CertificateStoreResources.ko.xlf
================================================
PFX, P7B, or CER file containing a certificate and potentially a private key.인증서와 잠재적으로 프라이빗 키가 포함된 PFX, P7B 또는 CER 파일입니다.{Locked="PFX", "P7B", "CER"} are file extensions.SHA fingerprint used to identify a certificate.인증서를 식별하는 데 사용되는 SHA 지문입니다.{Locked="SHA"} is a cryptographic algorithm.Password for certificate file.인증서 파일의 암호입니다.Sign container contents.컨테이너 내용 서명.Cryptographic Service Provider containing the private key container. Requires /k and optionally /km.프라이빗 키 컨테이너를 포함하는 암호화 서비스 공급자입니다. /k 및 선택적으로 /km이 필요합니다.{Locked="/k", "/km"} are command line options.Allow user interactions (such as a dialog box) when a private key is accessed.프라이빗 키에 액세스할 때 사용자 상호 작용(예: 대화 상자)을 허용합니다.Private key container name.프라이빗 키 컨테이너 이름입니다.Cryptographic Service Provider missing. Use /csp to specify a CSP.암호화 서비스 공급자가 없습니다. /csp를 사용하여 CSP를 지정합니다.{Locked="/csp"} is a command line option.Private key container name missing. Use /k to specify a key container name.프라이빗 키 컨테이너 이름이 없습니다. /k를 사용하여 키 컨테이너 이름을 지정합니다.{Locked="/k"} is a command line option.Use a machine-level private key container. (The default is user-level.)컴퓨터 수준 프라이빗 키 컨테이너를 사용합니다. (기본값은 사용자 수준입니다.)
================================================
FILE: src/Sign.Cli/xlf/CertificateStoreResources.pl.xlf
================================================
PFX, P7B, or CER file containing a certificate and potentially a private key.Plik PFX, P7B lub CER zawierający certyfikat i potencjalnie klucz prywatny.{Locked="PFX", "P7B", "CER"} are file extensions.SHA fingerprint used to identify a certificate.Odcisk palca SHA używany do identyfikowania certyfikatu.{Locked="SHA"} is a cryptographic algorithm.Password for certificate file.Hasło dla pliku certyfikatu.Sign container contents.Podpisz zawartość kontenera.Cryptographic Service Provider containing the private key container. Requires /k and optionally /km.Dostawca usług kryptograficznych zawierający kontener kluczy prywatnych. Wymaga opcji /k i opcjonalnie /km.{Locked="/k", "/km"} are command line options.Allow user interactions (such as a dialog box) when a private key is accessed.Zezwalaj na interakcje użytkownika (takie jak okno dialogowe) po uzyskaniu dostępu do klucza prywatnego.Private key container name.Nazwa kontenera kluczy prywatnych.Cryptographic Service Provider missing. Use /csp to specify a CSP.Brak dostawcy usług kryptograficznych. Użyj /csp, aby określić dostawcę CSP.{Locked="/csp"} is a command line option.Private key container name missing. Use /k to specify a key container name.Brak nazwy kontenera kluczy prywatnych. Użyj opcji /k, aby określić nazwę kontenera kluczy.{Locked="/k"} is a command line option.Use a machine-level private key container. (The default is user-level.)Użyj kontenera kluczy prywatnych na poziomie komputera. (Wartość domyślna to poziom użytkownika).
================================================
FILE: src/Sign.Cli/xlf/CertificateStoreResources.pt-BR.xlf
================================================
PFX, P7B, or CER file containing a certificate and potentially a private key.Arquivos PFX, P7B ou CER que contêm um certificado e, possivelmente, uma chave privada.{Locked="PFX", "P7B", "CER"} are file extensions.SHA fingerprint used to identify a certificate.Impressão digital SHA usada para identificar um certificado.{Locked="SHA"} is a cryptographic algorithm.Password for certificate file.Senha do arquivo do certificado.Sign container contents.Assinar o conteúdo do contêiner.Cryptographic Service Provider containing the private key container. Requires /k and optionally /km.Provedor de Serviços Criptográficos que contém o contêiner de chave privada. Requer /k e, opcionalmente, /km.{Locked="/k", "/km"} are command line options.Allow user interactions (such as a dialog box) when a private key is accessed.Permitir interações do usuário (como uma caixa de diálogo) quando uma chave privada for acessada.Private key container name.Nome do contêiner de chave privada.Cryptographic Service Provider missing. Use /csp to specify a CSP.Provedor de Serviços Criptográficos ausente. Use /csp para especificar um CSP.{Locked="/csp"} is a command line option.Private key container name missing. Use /k to specify a key container name.Nome do contêiner de chave privada ausente. Use /k para especificar um nome de contêiner de chave.{Locked="/k"} is a command line option.Use a machine-level private key container. (The default is user-level.)Use um contêiner de chave privada no nível do computador. (O padrão é o nível do usuário.)
================================================
FILE: src/Sign.Cli/xlf/CertificateStoreResources.ru.xlf
================================================
PFX, P7B, or CER file containing a certificate and potentially a private key.Файл PFX, P7B или CER, содержащий сертификат и, возможно, закрытый ключ.{Locked="PFX", "P7B", "CER"} are file extensions.SHA fingerprint used to identify a certificate.Отпечаток SHA, используемый для идентификации сертификата.{Locked="SHA"} is a cryptographic algorithm.Password for certificate file.Пароль для файла сертификата.Sign container contents.Подписать содержимое контейнера.Cryptographic Service Provider containing the private key container. Requires /k and optionally /km.Поставщик служб шифрования, содержащий контейнер закрытого ключа. Требуется /k и /km (необязательно).{Locked="/k", "/km"} are command line options.Allow user interactions (such as a dialog box) when a private key is accessed.Разрешить взаимодействие с пользователем (например, диалоговое окно) при доступе к закрытому ключу.Private key container name.Имя контейнера закрытого ключа.Cryptographic Service Provider missing. Use /csp to specify a CSP.Отсутствует поставщик служб шифрования. Используйте /csp, чтобы указать CSP.{Locked="/csp"} is a command line option.Private key container name missing. Use /k to specify a key container name.Отсутствует имя контейнера закрытого ключа. Используйте /k, чтобы указать имя контейнера ключей.{Locked="/k"} is a command line option.Use a machine-level private key container. (The default is user-level.)Используйте контейнер закрытого ключа на уровне компьютера. (По умолчанию используется уровень пользователя.)
================================================
FILE: src/Sign.Cli/xlf/CertificateStoreResources.tr.xlf
================================================
PFX, P7B, or CER file containing a certificate and potentially a private key.Bir sertifika ve potansiyel olarak özel anahtar içeren PFX, P7B veya CER dosyası.{Locked="PFX", "P7B", "CER"} are file extensions.SHA fingerprint used to identify a certificate.Sertifikayı tanımlamak için kullanılan SHA parmak izi.{Locked="SHA"} is a cryptographic algorithm.Password for certificate file.Sertifika dosyasının parolası.Sign container contents.Kapsayıcı içeriklerini imzalayın.Cryptographic Service Provider containing the private key container. Requires /k and optionally /km.Özel anahtar kapsayıcısını içeren Şifreleme Hizmeti Sağlayıcısı. /k ve alternatif olarak /km gerektirir.{Locked="/k", "/km"} are command line options.Allow user interactions (such as a dialog box) when a private key is accessed.Özel anahtara erişildiğinde kullanıcı etkileşimlerine (iletişim kutusu gibi) izin ver.Private key container name.Özel anahtar kapsayıcısı adı.Cryptographic Service Provider missing. Use /csp to specify a CSP.Şifreleme Hizmeti Sağlayıcısı eksik. CSP belirtmek için /csp kullanın.{Locked="/csp"} is a command line option.Private key container name missing. Use /k to specify a key container name.Özel anahtar kapsayıcısı adı eksik. Anahtar kapsayıcısı adı belirtmek için /k kullanın.{Locked="/k"} is a command line option.Use a machine-level private key container. (The default is user-level.)Makine düzeyinde özel anahtar kapsayıcısı kullanın. (Varsayılan, kullanıcı düzeyidir.)
================================================
FILE: src/Sign.Cli/xlf/CertificateStoreResources.zh-Hans.xlf
================================================
PFX, P7B, or CER file containing a certificate and potentially a private key.包含证书和可能包含私钥的 PFX、P7B 或 CER 文件。{Locked="PFX", "P7B", "CER"} are file extensions.SHA fingerprint used to identify a certificate.用于标识证书的 SHA 指纹。{Locked="SHA"} is a cryptographic algorithm.Password for certificate file.证书文件的密码。Sign container contents.对容器内容进行签名。Cryptographic Service Provider containing the private key container. Requires /k and optionally /km.包含私钥容器的加密服务提供程序。需要 /k 和 /km (可选)。{Locked="/k", "/km"} are command line options.Allow user interactions (such as a dialog box) when a private key is accessed.访问私钥时允许用户交互(如对话框)。Private key container name.私钥容器名称。Cryptographic Service Provider missing. Use /csp to specify a CSP.缺少加密服务提供程序。使用 /csp 指定 CSP。{Locked="/csp"} is a command line option.Private key container name missing. Use /k to specify a key container name.缺少私钥容器名称。使用 /k 指定密钥容器名称。{Locked="/k"} is a command line option.Use a machine-level private key container. (The default is user-level.)使用计算机级私钥容器。(默认值为用户级别。)
================================================
FILE: src/Sign.Cli/xlf/CertificateStoreResources.zh-Hant.xlf
================================================
PFX, P7B, or CER file containing a certificate and potentially a private key.包含憑證和潛在的私用金鑰的 PFX、P7B 或 CER 檔案。{Locked="PFX", "P7B", "CER"} are file extensions.SHA fingerprint used to identify a certificate.用以識別憑證的 SHA 指紋。{Locked="SHA"} is a cryptographic algorithm.Password for certificate file.憑證檔案的密碼。Sign container contents.簽署容器內容。Cryptographic Service Provider containing the private key container. Requires /k and optionally /km.包含私密金鑰容器的加密服務提供者。需要 /k 並選擇性地 /km。{Locked="/k", "/km"} are command line options.Allow user interactions (such as a dialog box) when a private key is accessed.當存取私密金鑰時,允許使用者互動 (例如對話方塊)。Private key container name.私密金鑰容器名稱。Cryptographic Service Provider missing. Use /csp to specify a CSP.遺漏密碼編譯服務提供者。使用 /csp 來指定雲端解決方案提供者。{Locked="/csp"} is a command line option.Private key container name missing. Use /k to specify a key container name.遺漏私密金鑰容器名稱。使用 /k 來指定金鑰容器名稱。{Locked="/k"} is a command line option.Use a machine-level private key container. (The default is user-level.)使用機器層級私密金鑰容器。(預設為使用者等級。)
================================================
FILE: src/Sign.Cli/xlf/Resources.cs.xlf
================================================
Application name (ClickOnce).Název aplikace (ClickOnce).Base directory for files. Overrides the current working directory.Základní adresář pro soubory Přepíše aktuální pracovní adresář.Use Windows Certificate Store or a local certificate file.Použijte úložiště certifikátů systému Windows nebo místní soubor certifikátu.Client ID to authenticate to Azure.ID klienta pro ověření v Azure.Client secret to authenticate to Azure.Tajný kód klienta pro ověření v Azure.The client secret options are obsolete and should no longer be specified.Možnosti tajného kódu klienta jsou zastaralé a již by neměly být zadány.Sign binaries and containers.Podepisovat binární soubory a kontejnery.Azure credential type that will be used. This defaults to DefaultAzureCredential.Typ přihlašovacího údaje Azure, který se použije. Výchozí hodnota je DefaultAzureCredential.Description of the signing certificate.Popis podpisového certifikátu.Description URL of the signing certificate.Popis adresy URL podpisového certifikátu.Digest algorithm to hash files with. Allowed values are 'sha256', 'sha384', and 'sha512'.Algoritmus hodnoty hash pro hash souborů. Povolené hodnoty jsou sha256, sha384 a sha512.{Locked="sha256", "sha384", "sha512"} are cryptographic hash algorithm names and should not be localized.Path to file containing paths of files to sign or to exclude from signing.Cesta k souboru obsahujícímu cesty k souborům, které se mají podepsat nebo vyloučit z podepisování.File(s) to sign.Soubor nebo soubory, které se mají podepsat.Invalid value for {0}. The value must be a fully rooted directory path.Neplatná hodnota pro {0}. Hodnota musí být plně kořenová cesta k adresáři.{NumberedPlaceholder="{0}"} is an option name (e.g.: --base-directory) and should not be localized.Invalid value for {0}. The value must be a SHA-256, SHA-384, or SHA-512 certificate fingerprint (in hexadecimal).Neplatná hodnota pro {0}. Hodnotou musí být otisk certifikátu SHA-256, SHA-384 nebo SHA-512 (v šestnáctkovém tvaru).{NumberedPlaceholder="{0}"} is the option name (e.g.: --certificate-fingerprint). {Locked="SHA-256", "SHA-384", "SHA-512"} are cryptographic hash algorithms.Invalid value for {0}. The value must be 'sha256', 'sha384', or 'sha512'.Neplatná hodnota pro {0}. Hodnota musí být sha256, sha384 nebo sha512.{Locked="sha256", "sha384", "sha512"} are cryptographic hash algorithm names and should not be localized. {NumberedPlaceholder="{0}"} is an option name (e.g.: --file-digest) and should not be localized.The file path cannot be rooted when using a glob. Use a path relative to the working directory (or base directory, if used).Cestu k souboru nelze při použití glob zakořenit. Použijte relativní cestu k pracovnímu adresáři (nebo základnímu adresáři, pokud se používá).Invalid value for {0}. The value must be an absolute HTTPS URL.Invalid value for {0}. The value must be an absolute HTTPS URL.{NumberedPlaceholder="{0}"} is an option name (e.g.: --artifact-signing-endpoint) and should not be localized.Invalid value for {0}. The value must be a number value greater than or equal to 1.Neplatná hodnota pro {0}. Hodnota musí být číselná hodnota větší nebo rovna 1.{NumberedPlaceholder="{0}"} is an option name (e.g.: --max-concurrency) and should not be localized.Invalid value for {0}. The value must be an absolute HTTP or HTTPS URL.Neplatná hodnota pro {0}. Hodnota musí být absolutní adresa URL protokolu HTTP nebo HTTPS.{NumberedPlaceholder="{0}"} is an option name (e.g.: --timestamp-url) and should not be localized.The client id of a user assigned ManagedIdentity.ID klienta Spravované identity přiřazené uživatelem.Managed identity to authenticate to Azure Key. (obsolete)Spravovaná identita pro ověření ve službě Azure Key. (zastaralé)The -kvm and --azure-key-vault-managed-identity options are obsolete and should no longer be specified.Možnosti -kvm a --azure-key-vault-managed-identity jsou zastaralé a už by se neměly zadávat.The resource id of a user assigned ManagedIdentity.ID prostředku Spravované identity přiřazené uživatelem.Maximum concurrency.Maximální souběžnost.A file or glob is required.Vyžaduje se soubor nebo glob.Warning: The Microsoft Visual C++ 14 runtime is required but was not detected on your system. Download and install from https://aka.ms/vs/17/release/vc_redist.x64.exeUpozornění: Modul runtime Microsoft Visual C++ 14 je povinný, ale ve vašem systému nebyl zjištěn. Stáhnout a nainstalovat z https://aka.ms/vs/17/release/vc_redist.x64.exe{Locked="https://aka.ms/vs/17/release/vc_redist.x64.exe"} is a URL.No inputs found to sign.Nenašly se žádné vstupy, které by bylo možné podepsat.Output file or directory. If omitted, input files will be overwritten.Výstupní soubor nebo adresář. Pokud je tento parametr vynechán, vstupní soubory budou přepsány.Publisher name (ClickOnce).Název vydavatele (ClickOnce)ClickOnce is a Microsoft deployment technology.Sign CLIPodepsat rozhraní příkazového řádkuSome files do not exist. Try using a different {0} value or a fully qualified file path.Některé soubory neexistují. Zkuste použít jinou hodnotu {0} nebo plně kvalifikovanou cestu k souboru.{NumberedPlaceholder="{0}"} is an option name (e.g.: --base-directory) and should not be localized.Tenant ID to authenticate to Azure.ID tenanta pro ověření v Azure.Digest algorithm for the RFC 3161 timestamp server. Allowed values are sha256, sha384, and sha512.Algoritmus hodnoty hash pro server časového razítka RFC 3161. Povolené hodnoty jsou sha256, sha384 a sha512.{Locked="RFC 3161"} is an Internet standard (https://www.rfc-editor.org/info/rfc3161), and {Locked="sha256", "sha384", "sha512"} are cryptographic hash algorithm names should not be localized.RFC 3161 timestamp server URL.Adresa URL serveru časového razítka RFC 3161.{Locked="RFC 3161"} is an Internet standard (https://www.rfc-editor.org/info/rfc3161) and should not be localized.The trusted-signing command is obsolete. Use the artifact-signing command instead.Příkaz trusted-signing je zastaralý. Místo toho použijte příkaz artifact-signing.Sets the verbosity level. Allowed values are 'none', 'critical', 'error', 'warning', 'information', 'debug', and 'trace'.Nastaví úroveň podrobností. Povolené hodnoty jsou none, critical, error, warning, information, debug a trace.{Locked="none", "critical", "error", "warning", "information", "debug", "trace"} are option values and should not be localized.Only Windows x64 is supported at this time. See https://github.com/dotnet/sign/issues/474 regarding Windows x86 support.V současné době je podporován pouze systém Windows x64. Podívejte se na https://github.com/dotnet/sign/issues/474 týkající se podpory windows x86.
================================================
FILE: src/Sign.Cli/xlf/Resources.de.xlf
================================================
Application name (ClickOnce).Anwendungsname (ClickOnce).Base directory for files. Overrides the current working directory.Basisverzeichnis für Dateien. Überschreibt das aktuelle Arbeitsverzeichnis.Use Windows Certificate Store or a local certificate file.Verwenden Sie den Windows-Zertifikatspeicher oder eine lokale Zertifikatdatei.Client ID to authenticate to Azure.Client-ID für die Authentifizierung bei Azure.Client secret to authenticate to Azure.Geheimer Clientschlüssel für die Authentifizierung bei Azure.The client secret options are obsolete and should no longer be specified.Die Optionen für geheime Clientschlüssel sind veraltet und sollten nicht mehr angegeben werden.Sign binaries and containers.Signieren Sie Binärdateien und Container.Azure credential type that will be used. This defaults to DefaultAzureCredential.Azure-Anmeldeinformationstyp, der verwendet wird. Standardmäßig ist dies DefaultAzureCredential.Description of the signing certificate.Beschreibung des Signaturzertifikats.Description URL of the signing certificate.Beschreibungs-URL des Signaturzertifikats.Digest algorithm to hash files with. Allowed values are 'sha256', 'sha384', and 'sha512'.Digestalgorithmus zum Hashen von Dateien. Zulässige Werte sind "sha256", "sha384" und "sha512".{Locked="sha256", "sha384", "sha512"} are cryptographic hash algorithm names and should not be localized.Path to file containing paths of files to sign or to exclude from signing.Pfad zur Datei mit Pfaden von Dateien, die signiert oder von der Signierung ausgeschlossen werden sollen.File(s) to sign.Zu signierende Datei(en).Invalid value for {0}. The value must be a fully rooted directory path.Ungültiger Wert für {0}. Der Wert muss ein vollständiger Stammverzeichnispfad sein.{NumberedPlaceholder="{0}"} is an option name (e.g.: --base-directory) and should not be localized.Invalid value for {0}. The value must be a SHA-256, SHA-384, or SHA-512 certificate fingerprint (in hexadecimal).Ungültiger Wert für {0}. Der Wert muss ein SHA-256-, SHA-384- oder SHA-512-Zertifikatfingerabdruck (hexadezimal) sein.{NumberedPlaceholder="{0}"} is the option name (e.g.: --certificate-fingerprint). {Locked="SHA-256", "SHA-384", "SHA-512"} are cryptographic hash algorithms.Invalid value for {0}. The value must be 'sha256', 'sha384', or 'sha512'.Ungültiger Wert für {0}. Der Wert muss "sha256", "sha384" oder "sha512" sein.{Locked="sha256", "sha384", "sha512"} are cryptographic hash algorithm names and should not be localized. {NumberedPlaceholder="{0}"} is an option name (e.g.: --file-digest) and should not be localized.The file path cannot be rooted when using a glob. Use a path relative to the working directory (or base directory, if used).Der Dateipfad kann keinem Stamm zugewiesen werden, wenn ein Glob verwendet wird. Verwenden Sie einen Pfad relativ zum Arbeitsverzeichnis (oder Basisverzeichnis, falls verwendet).Invalid value for {0}. The value must be an absolute HTTPS URL.Invalid value for {0}. The value must be an absolute HTTPS URL.{NumberedPlaceholder="{0}"} is an option name (e.g.: --artifact-signing-endpoint) and should not be localized.Invalid value for {0}. The value must be a number value greater than or equal to 1.Ungültiger Wert für {0}. Der Wert muss ein Zahlenwert größer oder gleich 1 sein.{NumberedPlaceholder="{0}"} is an option name (e.g.: --max-concurrency) and should not be localized.Invalid value for {0}. The value must be an absolute HTTP or HTTPS URL.Ungültiger Wert für {0}. Der Wert muss eine absolute HTTP- oder HTTPS-URL sein.{NumberedPlaceholder="{0}"} is an option name (e.g.: --timestamp-url) and should not be localized.The client id of a user assigned ManagedIdentity.Die Client-ID einer benutzerzugewiesenen ManagedIdentity.Managed identity to authenticate to Azure Key. (obsolete)Verwaltete Identität für die Authentifizierung bei Azure Key. (veraltet)The -kvm and --azure-key-vault-managed-identity options are obsolete and should no longer be specified.Die Optionen „-kvm“ und „--azure-key-vault-managed-identity“ sind veraltet und sollten nicht mehr angegeben werden.The resource id of a user assigned ManagedIdentity.Die Ressourcen-ID einer vom Benutzer zugewiesenen ManagedIdentity.Maximum concurrency.Maximale Parallelität.A file or glob is required.Eine Datei oder ein Glob ist erforderlich.Warning: The Microsoft Visual C++ 14 runtime is required but was not detected on your system. Download and install from https://aka.ms/vs/17/release/vc_redist.x64.exeWarnung: Die Microsoft Visual C++ 14-Runtime ist erforderlich, wurde jedoch auf ihrem System nicht erkannt. Von „https://aka.ms/vs/17/release/vc_redist.x64.exe“ herunterladen und installieren{Locked="https://aka.ms/vs/17/release/vc_redist.x64.exe"} is a URL.No inputs found to sign.Es wurden keine Eingaben zum Signieren gefunden.Output file or directory. If omitted, input files will be overwritten.Ausgabedatei oder -verzeichnis. Wenn dies weggelassen wird, werden Eingabedateien überschrieben.Publisher name (ClickOnce).Herausgebername (ClickOnce).ClickOnce is a Microsoft deployment technology.Sign CLICLI signierenSome files do not exist. Try using a different {0} value or a fully qualified file path.Einige Dateien sind nicht vorhanden. Versuchen Sie, einen anderen {0}-Wert oder einen vollqualifizierten Dateipfad zu verwenden.{NumberedPlaceholder="{0}"} is an option name (e.g.: --base-directory) and should not be localized.Tenant ID to authenticate to Azure.Mandanten-ID für die Authentifizierung bei Azure.Digest algorithm for the RFC 3161 timestamp server. Allowed values are sha256, sha384, and sha512.Digest-Algorithmus für den RFC 3161-Zeitstempelserver. Zulässige Werte sind sha256, sha384 und sha512.{Locked="RFC 3161"} is an Internet standard (https://www.rfc-editor.org/info/rfc3161), and {Locked="sha256", "sha384", "sha512"} are cryptographic hash algorithm names should not be localized.RFC 3161 timestamp server URL.URL für RFC 3161-Zeitstempelserver.{Locked="RFC 3161"} is an Internet standard (https://www.rfc-editor.org/info/rfc3161) and should not be localized.The trusted-signing command is obsolete. Use the artifact-signing command instead.Der vertrauenswürdige Signaturbefehl ist veraltet. Verwenden Sie stattdessen den Artefaktsignaturbefehl.Sets the verbosity level. Allowed values are 'none', 'critical', 'error', 'warning', 'information', 'debug', and 'trace'.Legt den Ausführlichkeitsgrad fest. Zulässige Werte sind "none", "critical", "error", "warning", "information", "debug" und "trace".{Locked="none", "critical", "error", "warning", "information", "debug", "trace"} are option values and should not be localized.Only Windows x64 is supported at this time. See https://github.com/dotnet/sign/issues/474 regarding Windows x86 support.Derzeit wird nur Windows x64 unterstützt. Weitere Informationen hinsichtlich Windows x86-Support finden Sie unter https://github.com/dotnet/sign/issues/474.
================================================
FILE: src/Sign.Cli/xlf/Resources.es.xlf
================================================
Application name (ClickOnce).Nombre de la aplicación (ClickOnce).Base directory for files. Overrides the current working directory.Directorio base para los archivos. Invalida el directorio de trabajo actual.Use Windows Certificate Store or a local certificate file.Use el Almacén de certificados de Windows o un archivo de certificados local.Client ID to authenticate to Azure.Id. de cliente para autenticarse en Azure.Client secret to authenticate to Azure.Secreto de cliente para autenticarse en Azure.The client secret options are obsolete and should no longer be specified.Las opciones de secreto de cliente están obsoletas y ya no deben especificarse.Sign binaries and containers.Firmar archivos binarios y contenedores.Azure credential type that will be used. This defaults to DefaultAzureCredential.Tipo de credencial de Azure que se usará. El valor predeterminado es DefaultAzureCredential.Description of the signing certificate.Descripción del certificado de firma.Description URL of the signing certificate.Descripción de URL del certificado de firma.Digest algorithm to hash files with. Allowed values are 'sha256', 'sha384', and 'sha512'.Algoritmo de resumen con el que se van a aplicar algoritmos hash a los archivos. Los valores permitidos son 'sha256', 'sha384', y 'sha512'.{Locked="sha256", "sha384", "sha512"} are cryptographic hash algorithm names and should not be localized.Path to file containing paths of files to sign or to exclude from signing.Ruta de acceso al archivo que contiene las rutas de acceso de los archivos que se van a firmar o que se van a excluir de la firma.File(s) to sign.Archivos para firmar.Invalid value for {0}. The value must be a fully rooted directory path.Valor no válido para {0}. El valor debe ser una ruta de acceso de directorio totalmente raíz.{NumberedPlaceholder="{0}"} is an option name (e.g.: --base-directory) and should not be localized.Invalid value for {0}. The value must be a SHA-256, SHA-384, or SHA-512 certificate fingerprint (in hexadecimal).Valor no válido para {0}. El valor debe ser una huella de certificado SHA-256, SHA-384 o SHA-512 (en hexadecimal).{NumberedPlaceholder="{0}"} is the option name (e.g.: --certificate-fingerprint). {Locked="SHA-256", "SHA-384", "SHA-512"} are cryptographic hash algorithms.Invalid value for {0}. The value must be 'sha256', 'sha384', or 'sha512'.Valor no válido para {0}. El valor debe ser 'sha256', 'sha384' o 'sha512'.{Locked="sha256", "sha384", "sha512"} are cryptographic hash algorithm names and should not be localized. {NumberedPlaceholder="{0}"} is an option name (e.g.: --file-digest) and should not be localized.The file path cannot be rooted when using a glob. Use a path relative to the working directory (or base directory, if used).La ruta de acceso del archivo no se puede rootear cuando se usa un glob. Use una ruta de acceso relativa al directorio de trabajo (o directorio base, si se usa).Invalid value for {0}. The value must be an absolute HTTPS URL.Invalid value for {0}. The value must be an absolute HTTPS URL.{NumberedPlaceholder="{0}"} is an option name (e.g.: --artifact-signing-endpoint) and should not be localized.Invalid value for {0}. The value must be a number value greater than or equal to 1.Valor no válido para {0}. El valor debe ser un valor numérico mayor o igual que 1.{NumberedPlaceholder="{0}"} is an option name (e.g.: --max-concurrency) and should not be localized.Invalid value for {0}. The value must be an absolute HTTP or HTTPS URL.Valor no válido para {0}. El valor debe ser una dirección URL HTTP o HTTPS absoluta.{NumberedPlaceholder="{0}"} is an option name (e.g.: --timestamp-url) and should not be localized.The client id of a user assigned ManagedIdentity.El identificador de cliente de un ManagedIdentity asignado por el usuario.Managed identity to authenticate to Azure Key. (obsolete)Identidad administrada para autenticarse en Azure Key. (obsoleto)The -kvm and --azure-key-vault-managed-identity options are obsolete and should no longer be specified.Las opciones -kvm y --azure-key-vault-managed-identity están obsoletas y ya no deben especificarse.The resource id of a user assigned ManagedIdentity.El identificador de recurso de un ManagedIdentity asignado por el usuario.Maximum concurrency.Simultaneidad máxima.A file or glob is required.Se requiere un archivo o glob.Warning: The Microsoft Visual C++ 14 runtime is required but was not detected on your system. Download and install from https://aka.ms/vs/17/release/vc_redist.x64.exeAdvertencia: se requiere el runtime Microsoft Visual C++ 14, pero no se detectó en el sistema. Descargar e instalar desde https://aka.ms/vs/17/release/vc_redist.x64.exe{Locked="https://aka.ms/vs/17/release/vc_redist.x64.exe"} is a URL.No inputs found to sign.No se encontraron entradas para firmar.Output file or directory. If omitted, input files will be overwritten.Archivo o directorio de salida. Si se omite, se sobrescribirán los archivos de entrada.Publisher name (ClickOnce).Nombre del publicador (ClickOnce).ClickOnce is a Microsoft deployment technology.Sign CLIFirma de la CLISome files do not exist. Try using a different {0} value or a fully qualified file path.Algunos archivos no existen. Pruebe a usar otro valor {0} o una ruta de acceso de archivo completa.{NumberedPlaceholder="{0}"} is an option name (e.g.: --base-directory) and should not be localized.Tenant ID to authenticate to Azure.Id. de inquilino para autenticarse en Azure.Digest algorithm for the RFC 3161 timestamp server. Allowed values are sha256, sha384, and sha512.Algoritmo de resumen para el servidor de marca de tiempo RFC 3161. Los valores permitidos son sha256, sha384 y sha512.{Locked="RFC 3161"} is an Internet standard (https://www.rfc-editor.org/info/rfc3161), and {Locked="sha256", "sha384", "sha512"} are cryptographic hash algorithm names should not be localized.RFC 3161 timestamp server URL.Dirección URL del servidor de marca de tiempo RFC 3161.{Locked="RFC 3161"} is an Internet standard (https://www.rfc-editor.org/info/rfc3161) and should not be localized.The trusted-signing command is obsolete. Use the artifact-signing command instead.El comando trusted-signing está obsoleto. En su lugar, use el comando artifact-signing.Sets the verbosity level. Allowed values are 'none', 'critical', 'error', 'warning', 'information', 'debug', and 'trace'.Establece el nivel de detalle. Los valores permitidos son "none", "critical", "error", "warning", "information", "debug" y "trace".{Locked="none", "critical", "error", "warning", "information", "debug", "trace"} are option values and should not be localized.Only Windows x64 is supported at this time. See https://github.com/dotnet/sign/issues/474 regarding Windows x86 support.En este momento solo se admite Windows x64. Consulta https://github.com/dotnet/sign/issues/474 sobre la compatibilidad con Windows x86.
================================================
FILE: src/Sign.Cli/xlf/Resources.fr.xlf
================================================
Application name (ClickOnce).Nom de l’application (ClickOnce).Base directory for files. Overrides the current working directory.Répertoire de base pour les fichiers. Remplace le répertoire de travail actuel.Use Windows Certificate Store or a local certificate file.Utilisez le magasin de certificats Windows ou un fichier local du certificat.Client ID to authenticate to Azure.ID client pour l’authentification auprès d’Azure.Client secret to authenticate to Azure.Clé secrète client pour s’authentifier auprès d’Azure.The client secret options are obsolete and should no longer be specified.Les options de secret du client sont obsolètes et ne doivent plus être spécifiées.Sign binaries and containers.Signer les fichiers binaires et les conteneurs.Azure credential type that will be used. This defaults to DefaultAzureCredential.Type d’informations d’identification Azure à utiliser. Cela par défaut à DefaultAzureCredential.Description of the signing certificate.Description du certificat de signature.Description URL of the signing certificate.URL de description du certificat de signature.Digest algorithm to hash files with. Allowed values are 'sha256', 'sha384', and 'sha512'.Algorithme Digest pour hacher les fichiers avec. Les valeurs autorisées sont 'sha256', 'sha384' et 'sha512'.{Locked="sha256", "sha384", "sha512"} are cryptographic hash algorithm names and should not be localized.Path to file containing paths of files to sign or to exclude from signing.Chemin du fichier contenant les chemins des fichiers à signer ou à exclure de la signature.File(s) to sign.Fichier(s) à signer.Invalid value for {0}. The value must be a fully rooted directory path.Valeur non valide pour {0}. La valeur doit être un chemin d’accès de répertoire entièrement rooté.{NumberedPlaceholder="{0}"} is an option name (e.g.: --base-directory) and should not be localized.Invalid value for {0}. The value must be a SHA-256, SHA-384, or SHA-512 certificate fingerprint (in hexadecimal).Valeur non valide pour {0}. La valeur doit être une empreinte digitale de certificat SHA-256, SHA-384 ou SHA-512 (en hexadécimal).{NumberedPlaceholder="{0}"} is the option name (e.g.: --certificate-fingerprint). {Locked="SHA-256", "SHA-384", "SHA-512"} are cryptographic hash algorithms.Invalid value for {0}. The value must be 'sha256', 'sha384', or 'sha512'.Valeur invalide pour {0}. La valeur doit être 'sha256', 'sha384' ou 'sha512'.{Locked="sha256", "sha384", "sha512"} are cryptographic hash algorithm names and should not be localized. {NumberedPlaceholder="{0}"} is an option name (e.g.: --file-digest) and should not be localized.The file path cannot be rooted when using a glob. Use a path relative to the working directory (or base directory, if used).Le chemin d’accès du fichier ne peut pas être rooté lors de l’utilisation d’un glob. Utilisez un chemin d’accès relatif au répertoire de travail (ou au répertoire de base, s’il est utilisé).Invalid value for {0}. The value must be an absolute HTTPS URL.Invalid value for {0}. The value must be an absolute HTTPS URL.{NumberedPlaceholder="{0}"} is an option name (e.g.: --artifact-signing-endpoint) and should not be localized.Invalid value for {0}. The value must be a number value greater than or equal to 1.Valeur non valide pour {0}. La valeur doit être une valeur numérique supérieure ou égale à 1.{NumberedPlaceholder="{0}"} is an option name (e.g.: --max-concurrency) and should not be localized.Invalid value for {0}. The value must be an absolute HTTP or HTTPS URL.Valeur non valide pour {0}. La valeur doit être une URL HTTP ou HTTPS absolue.{NumberedPlaceholder="{0}"} is an option name (e.g.: --timestamp-url) and should not be localized.The client id of a user assigned ManagedIdentity.L’ID client d’un utilisateur à qui a été attribuée ManagedIdentity.Managed identity to authenticate to Azure Key. (obsolete)Identité managée pour s’authentifier auprès d’Azure Key. (obsolète)The -kvm and --azure-key-vault-managed-identity options are obsolete and should no longer be specified.Les options -kvm et --azure-key-vault-managed-identity sont obsolètes et ne doivent plus être spécifiées.The resource id of a user assigned ManagedIdentity.L’ID de ressource d’une ManagedIdentity attribuée à un utilisateur.Maximum concurrency.Concurrence maximale.A file or glob is required.Un fichier ou un glob est requis.Warning: The Microsoft Visual C++ 14 runtime is required but was not detected on your system. Download and install from https://aka.ms/vs/17/release/vc_redist.x64.exeAvertissement : le runtime Microsoft Visual C++ 14 est requis mais n’a pas été détecté sur votre système. Télécharger et installer à partir de https://aka.ms/vs/17/release/vc_redist.x64.exe{Locked="https://aka.ms/vs/17/release/vc_redist.x64.exe"} is a URL.No inputs found to sign.Aucune entrée à signer.Output file or directory. If omitted, input files will be overwritten.Fichier ou répertoire de sortie. En cas d’omission, les fichiers d’entrée sont remplacés.Publisher name (ClickOnce).Nom du serveur de publication (ClickOnce).ClickOnce is a Microsoft deployment technology.Sign CLISigner l’interface CLISome files do not exist. Try using a different {0} value or a fully qualified file path.Certains fichiers n’existent pas. Essayez d’utiliser une autre valeur {0} ou un chemin de fichier complet.{NumberedPlaceholder="{0}"} is an option name (e.g.: --base-directory) and should not be localized.Tenant ID to authenticate to Azure.ID du locataire pour s’authentifier auprès d’Azure.Digest algorithm for the RFC 3161 timestamp server. Allowed values are sha256, sha384, and sha512.Algorithme de hachage pour le serveur d’horodatage RFC 3161. Les valeurs autorisées sont sha256, sha384 et sha512.{Locked="RFC 3161"} is an Internet standard (https://www.rfc-editor.org/info/rfc3161), and {Locked="sha256", "sha384", "sha512"} are cryptographic hash algorithm names should not be localized.RFC 3161 timestamp server URL.URL du serveur d'horodatage conforme au document RFC 3161.{Locked="RFC 3161"} is an Internet standard (https://www.rfc-editor.org/info/rfc3161) and should not be localized.The trusted-signing command is obsolete. Use the artifact-signing command instead.La commande de signature de confiance est obsolète. Utilisez plutôt la commande de signature d'artefacts.Sets the verbosity level. Allowed values are 'none', 'critical', 'error', 'warning', 'information', 'debug', and 'trace'.Définit le niveau de verbosité. Les valeurs autorisées sont 'none', 'critical', 'error', 'warning', 'information', 'debug', et 'trace'{Locked="none", "critical", "error", "warning", "information", "debug", "trace"} are option values and should not be localized.Only Windows x64 is supported at this time. See https://github.com/dotnet/sign/issues/474 regarding Windows x86 support.Seul Windows x64 est pris en charge pour l’instant. Consultez https://github.com/dotnet/sign/issues/474 concernant la prise en charge de Windows x86.
================================================
FILE: src/Sign.Cli/xlf/Resources.it.xlf
================================================
Application name (ClickOnce).Nome applicazione (ClickOnce).Base directory for files. Overrides the current working directory.Directory di base per i file. Esegue l'override della directory di lavoro corrente.Use Windows Certificate Store or a local certificate file.Utilizzare l'archivio certificati di Windows o un file di certificato locale.Client ID to authenticate to Azure.ID client da autenticare in Azure.Client secret to authenticate to Azure.Segreto client da autenticare in Azure.The client secret options are obsolete and should no longer be specified.Le opzioni del segreto client sono obsolete e non devono essere più specificate.Sign binaries and containers.Consente di firmare file binari e contenitori.Azure credential type that will be used. This defaults to DefaultAzureCredential.Tipo di credenziale di Azure che verrà utilizzato. Verrà utilizzata l'impostazione predefinita DefaultAzureCredential.Description of the signing certificate.Descrizione del certificato di firma.Description URL of the signing certificate.URL descrizione del certificato di firma.Digest algorithm to hash files with. Allowed values are 'sha256', 'sha384', and 'sha512'.Algoritmo di digest con cui eseguire l'hashing dei file. I valori consentiti sono 'sha256', 'sha384' e 'sha512'.{Locked="sha256", "sha384", "sha512"} are cryptographic hash algorithm names and should not be localized.Path to file containing paths of files to sign or to exclude from signing.Percorso del file contenente i percorsi dei file da firmare o escludere dalla firma.File(s) to sign.File da firmare.Invalid value for {0}. The value must be a fully rooted directory path.Valore non valido per {0}. Il valore deve essere un percorso di directory completo.{NumberedPlaceholder="{0}"} is an option name (e.g.: --base-directory) and should not be localized.Invalid value for {0}. The value must be a SHA-256, SHA-384, or SHA-512 certificate fingerprint (in hexadecimal).Valore non valido per {0}. Il valore deve essere un’impronta digitale del certificato SHA-256, SHA-384 o SHA-512 (in formato esadecimale).{NumberedPlaceholder="{0}"} is the option name (e.g.: --certificate-fingerprint). {Locked="SHA-256", "SHA-384", "SHA-512"} are cryptographic hash algorithms.Invalid value for {0}. The value must be 'sha256', 'sha384', or 'sha512'.Valore non valido per {0}. Il valore deve essere 'sha256', 'sha384' o 'sha512'.{Locked="sha256", "sha384", "sha512"} are cryptographic hash algorithm names and should not be localized. {NumberedPlaceholder="{0}"} is an option name (e.g.: --file-digest) and should not be localized.The file path cannot be rooted when using a glob. Use a path relative to the working directory (or base directory, if used).Il percorso del file non può avere accesso root quando si utilizza un GLOB. Usare un percorso relativo alla directory di lavoro (o alla directory di base, se usata).Invalid value for {0}. The value must be an absolute HTTPS URL.Invalid value for {0}. The value must be an absolute HTTPS URL.{NumberedPlaceholder="{0}"} is an option name (e.g.: --artifact-signing-endpoint) and should not be localized.Invalid value for {0}. The value must be a number value greater than or equal to 1.Valore non valido per {0}. Il valore deve essere un numero maggiore o uguale a 1.{NumberedPlaceholder="{0}"} is an option name (e.g.: --max-concurrency) and should not be localized.Invalid value for {0}. The value must be an absolute HTTP or HTTPS URL.Valore non valido per {0}. Il valore deve essere un URL HTTP o HTTPS assoluto.{NumberedPlaceholder="{0}"} is an option name (e.g.: --timestamp-url) and should not be localized.The client id of a user assigned ManagedIdentity.ID client di un elemento ManagedIdentity assegnato dall'utente.Managed identity to authenticate to Azure Key. (obsolete)Identità gestita da autenticare in Azure Key. (obsoleto)The -kvm and --azure-key-vault-managed-identity options are obsolete and should no longer be specified.Le opzioni -kvm e --azure-key-vault-managed-identity sono obsolete e non devono più essere specificate.The resource id of a user assigned ManagedIdentity.ID risorsa di un elemento ManagedIdentity assegnato dall'utente.Maximum concurrency.Concorrenza massima.A file or glob is required.È necessario specificare un file o un GLOB.Warning: The Microsoft Visual C++ 14 runtime is required but was not detected on your system. Download and install from https://aka.ms/vs/17/release/vc_redist.x64.exeAvviso: il runtime di Microsoft Visual C++ 14 è obbligatorio ma non è stato rilevato nel sistema. Scarica e installa da https://aka.ms/vs/17/release/vc_redist.x64.exe{Locked="https://aka.ms/vs/17/release/vc_redist.x64.exe"} is a URL.No inputs found to sign.Non sono stati trovati input da firmare.Output file or directory. If omitted, input files will be overwritten.File o directory di output. Se omessi, i file di input verranno sovrascritti.Publisher name (ClickOnce).Nome autore (ClickOnce).ClickOnce is a Microsoft deployment technology.Sign CLIConsente di firmare l'interfaccia della riga di comandoSome files do not exist. Try using a different {0} value or a fully qualified file path.Alcuni file non esistono. Provare a usare un valore di {0} diverso o un percorso di file completo.{NumberedPlaceholder="{0}"} is an option name (e.g.: --base-directory) and should not be localized.Tenant ID to authenticate to Azure.ID tenant da autenticare in Azure.Digest algorithm for the RFC 3161 timestamp server. Allowed values are sha256, sha384, and sha512.Algoritmo di digest per il server di timestamp RFC 3161. I valori consentiti sono sha256, sha384 e sha512.{Locked="RFC 3161"} is an Internet standard (https://www.rfc-editor.org/info/rfc3161), and {Locked="sha256", "sha384", "sha512"} are cryptographic hash algorithm names should not be localized.RFC 3161 timestamp server URL.URL del server di timestamp RFC 3161.{Locked="RFC 3161"} is an Internet standard (https://www.rfc-editor.org/info/rfc3161) and should not be localized.The trusted-signing command is obsolete. Use the artifact-signing command instead.Il comando di firma attendibile è obsoleto. Usare il comando di firma artefatti.Sets the verbosity level. Allowed values are 'none', 'critical', 'error', 'warning', 'information', 'debug', and 'trace'.Imposta il livello di dettaglio. I valori consentiti sono 'none', 'critical', 'error', 'warning', 'information', 'debug' e 'trace'.{Locked="none", "critical", "error", "warning", "information", "debug", "trace"} are option values and should not be localized.Only Windows x64 is supported at this time. See https://github.com/dotnet/sign/issues/474 regarding Windows x86 support.Al momento è supportato solo Windows x64. Vedere https://github.com/dotnet/sign/issues/474 sul supporto per Windows x86.
================================================
FILE: src/Sign.Cli/xlf/Resources.ja.xlf
================================================
Application name (ClickOnce).アプリケーション名 (ClickOnce)。Base directory for files. Overrides the current working directory.ファイルのベース ディレクトリ。 現在の作業ディレクトリをオーバーライドします。Use Windows Certificate Store or a local certificate file.Windows 証明書ストアまたはローカル証明書ファイルを使用します。Client ID to authenticate to Azure.Azure に対して認証するクライアント ID。Client secret to authenticate to Azure.Azure に対して認証するクライアント シークレット。The client secret options are obsolete and should no longer be specified.クライアント シークレットのオプションは廃止されているため、指定しないでください。Sign binaries and containers.バイナリとコンテナーに署名します。Azure credential type that will be used. This defaults to DefaultAzureCredential.使用される Azure 資格情報の種類。この既定値は DefaultAzureCredential です。Description of the signing certificate.署名証明書の説明。Description URL of the signing certificate.署名証明書の説明 URL。Digest algorithm to hash files with. Allowed values are 'sha256', 'sha384', and 'sha512'.ファイルのハッシュに使用するダイジェスト アルゴリズム。使用できる値は、'sha256'、'sha384'、および 'sha512' です。{Locked="sha256", "sha384", "sha512"} are cryptographic hash algorithm names and should not be localized.Path to file containing paths of files to sign or to exclude from signing.署名するファイルまたは署名から除外するファイルのパスを含むファイルへのパス。File(s) to sign.署名するファイル。Invalid value for {0}. The value must be a fully rooted directory path.{0} の値が無効です。値は、完全にルート化されたディレクトリ パスである必要があります。{NumberedPlaceholder="{0}"} is an option name (e.g.: --base-directory) and should not be localized.Invalid value for {0}. The value must be a SHA-256, SHA-384, or SHA-512 certificate fingerprint (in hexadecimal).{0} の値が無効です。値は SHA-256、SHA-384、または SHA-512 証明書フィンガープリント (16 進数) である必要があります。{NumberedPlaceholder="{0}"} is the option name (e.g.: --certificate-fingerprint). {Locked="SHA-256", "SHA-384", "SHA-512"} are cryptographic hash algorithms.Invalid value for {0}. The value must be 'sha256', 'sha384', or 'sha512'.{0} の値が無効です。値は 'sha256'、'sha384'、または 'sha512' である必要があります。{Locked="sha256", "sha384", "sha512"} are cryptographic hash algorithm names and should not be localized. {NumberedPlaceholder="{0}"} is an option name (e.g.: --file-digest) and should not be localized.The file path cannot be rooted when using a glob. Use a path relative to the working directory (or base directory, if used).glob を使用している場合、ファイル パスをルート化できません。作業ディレクトリへの相対パス (または使用されている場合はベース ディレクトリ) を使用してください。Invalid value for {0}. The value must be an absolute HTTPS URL.Invalid value for {0}. The value must be an absolute HTTPS URL.{NumberedPlaceholder="{0}"} is an option name (e.g.: --artifact-signing-endpoint) and should not be localized.Invalid value for {0}. The value must be a number value greater than or equal to 1.{0} の値が無効です。値は、1 以上の数値である必要があります。{NumberedPlaceholder="{0}"} is an option name (e.g.: --max-concurrency) and should not be localized.Invalid value for {0}. The value must be an absolute HTTP or HTTPS URL.{0}の値が無効です。値は HTTP または HTTPS の絶対 URL である必要があります。{NumberedPlaceholder="{0}"} is an option name (e.g.: --timestamp-url) and should not be localized.The client id of a user assigned ManagedIdentity.ユーザー割り当て済み ManagedIdentity のクライアント ID。Managed identity to authenticate to Azure Key. (obsolete)Azure Key に対して認証するマネージド ID。(旧形式)The -kvm and --azure-key-vault-managed-identity options are obsolete and should no longer be specified.-kvm オプションと --azure-key-vault-managed-identity オプションは旧形式であるため、指定しないでください。The resource id of a user assigned ManagedIdentity.ユーザー割り当て済み ManagedIdentity のリソース ID。Maximum concurrency.最大コンカレンシー。A file or glob is required.ファイルまたは glob が必要です。Warning: The Microsoft Visual C++ 14 runtime is required but was not detected on your system. Download and install from https://aka.ms/vs/17/release/vc_redist.x64.exe警告: Microsoft Visual C++ 14 ランタイムが必要ですが、システムで検出されませんでした。https://aka.ms/vs/17/release/vc_redist.x64.exe からダウンロードしてインストールしてください{Locked="https://aka.ms/vs/17/release/vc_redist.x64.exe"} is a URL.No inputs found to sign.署名する入力が見つかりません。Output file or directory. If omitted, input files will be overwritten.出力ファイルまたはディレクトリ。省略すると、入力ファイルが上書きされます。Publisher name (ClickOnce).パブリッシャー名 (ClickOnce)。ClickOnce is a Microsoft deployment technology.Sign CLICLI に署名Some files do not exist. Try using a different {0} value or a fully qualified file path.一部のファイルが存在しません。別の {0} 値または完全修飾ファイル パスを使用してみてください。{NumberedPlaceholder="{0}"} is an option name (e.g.: --base-directory) and should not be localized.Tenant ID to authenticate to Azure.Azure に対して認証するテナント ID。Digest algorithm for the RFC 3161 timestamp server. Allowed values are sha256, sha384, and sha512.RFC 3161 タイムスタンプ サーバーのダイジェスト アルゴリズム。使用できる値は、sha256、sha384、および sha512 です。{Locked="RFC 3161"} is an Internet standard (https://www.rfc-editor.org/info/rfc3161), and {Locked="sha256", "sha384", "sha512"} are cryptographic hash algorithm names should not be localized.RFC 3161 timestamp server URL.RFC 3161 タイムスタンプ サーバーの URL。{Locked="RFC 3161"} is an Internet standard (https://www.rfc-editor.org/info/rfc3161) and should not be localized.The trusted-signing command is obsolete. Use the artifact-signing command instead.trusted-signing コマンドは廃止されています。代わりに artifact-signing コマンドを使用してください。Sets the verbosity level. Allowed values are 'none', 'critical', 'error', 'warning', 'information', 'debug', and 'trace'.詳細レベルを設定します。指定できる値は、'none'、'critical'、'error'、'warning'、'information'、'debug'、'trace' です。{Locked="none", "critical", "error", "warning", "information", "debug", "trace"} are option values and should not be localized.Only Windows x64 is supported at this time. See https://github.com/dotnet/sign/issues/474 regarding Windows x86 support.現時点でサポートされているのは Windows x64 のみです。Windows x86 のサポートに関する https://github.com/dotnet/sign/issues/474 を参照してください。
================================================
FILE: src/Sign.Cli/xlf/Resources.ko.xlf
================================================
Application name (ClickOnce).애플리케이션 이름(ClickOnce).Base directory for files. Overrides the current working directory.파일의 기본 디렉터리입니다. 현재 작업 디렉터리를 재정의합니다.Use Windows Certificate Store or a local certificate file.Windows 인증서 저장소 또는 로컬 인증서 파일을 사용합니다.Client ID to authenticate to Azure.Azure에 인증할 클라이언트 ID입니다.Client secret to authenticate to Azure.Azure에 인증하는 클라이언트 암호입니다.The client secret options are obsolete and should no longer be specified.클라이언트 비밀 옵션은 사용되지 않으므로 더 이상 지정하지 않아야 합니다.Sign binaries and containers.이진 파일 및 컨테이너에 서명합니다.Azure credential type that will be used. This defaults to DefaultAzureCredential.사용할 Azure 자격 증명 형식입니다. 기본값은 DefaultAzureCredential입니다.Description of the signing certificate.서명 인증서에 대한 설명입니다.Description URL of the signing certificate.서명 인증서의 설명 URL입니다.Digest algorithm to hash files with. Allowed values are 'sha256', 'sha384', and 'sha512'.파일을 해시하는 다이제스트 알고리즘입니다. 허용되는 값은 'sha256', 'sha384' 및 'sha512'입니다.{Locked="sha256", "sha384", "sha512"} are cryptographic hash algorithm names and should not be localized.Path to file containing paths of files to sign or to exclude from signing.서명하거나 서명에서 제외할 파일의 경로가 포함된 파일 경로입니다.File(s) to sign.서명할 파일입니다.Invalid value for {0}. The value must be a fully rooted directory path.{0}에 대한 값이 잘못되었습니다. 값은 완전한 루트 디렉터리 경로여야 합니다.{NumberedPlaceholder="{0}"} is an option name (e.g.: --base-directory) and should not be localized.Invalid value for {0}. The value must be a SHA-256, SHA-384, or SHA-512 certificate fingerprint (in hexadecimal).{0}의 값이 잘못되었습니다. 값은 SHA-256, SHA-384 또는 SHA-512 인증서 지문(16진수)이어야 합니다.{NumberedPlaceholder="{0}"} is the option name (e.g.: --certificate-fingerprint). {Locked="SHA-256", "SHA-384", "SHA-512"} are cryptographic hash algorithms.Invalid value for {0}. The value must be 'sha256', 'sha384', or 'sha512'.{0}에 대한 값이 잘못되었습니다. 값은 'sha256', 'sha384' 또는 'sha512'.여야 합니다.{Locked="sha256", "sha384", "sha512"} are cryptographic hash algorithm names and should not be localized. {NumberedPlaceholder="{0}"} is an option name (e.g.: --file-digest) and should not be localized.The file path cannot be rooted when using a glob. Use a path relative to the working directory (or base directory, if used).glob을 사용할 때 파일 경로를 루팅할 수 없습니다. 작업 디렉터리(또는 사용되는 경우 기본 디렉터리)에 상대적인 경로를 사용하세요.Invalid value for {0}. The value must be an absolute HTTPS URL.Invalid value for {0}. The value must be an absolute HTTPS URL.{NumberedPlaceholder="{0}"} is an option name (e.g.: --artifact-signing-endpoint) and should not be localized.Invalid value for {0}. The value must be a number value greater than or equal to 1.{0}에 대한 값이 잘못되었습니다. 값은 1보다 크거나 같은 숫자 값이어야 합니다.{NumberedPlaceholder="{0}"} is an option name (e.g.: --max-concurrency) and should not be localized.Invalid value for {0}. The value must be an absolute HTTP or HTTPS URL.{0}에 대한 값이 잘못되었습니다. 값은 절대 HTTP 또는 HTTPS URL이어야 합니다.{NumberedPlaceholder="{0}"} is an option name (e.g.: --timestamp-url) and should not be localized.The client id of a user assigned ManagedIdentity.ManagedIdentity가 할당된 사용자의 클라이언트 ID입니다.Managed identity to authenticate to Azure Key. (obsolete)Azure Key에 인증할 관리 ID입니다. (사용되지 않음)The -kvm and --azure-key-vault-managed-identity options are obsolete and should no longer be specified.-kvm 및 --azure-key-vault-managed-identity 옵션은 사용되지 않으므로 더 이상 지정하지 않아야 합니다.The resource id of a user assigned ManagedIdentity.사용자가 할당한 ManagedIdentity의 리소스 ID입니다.Maximum concurrency.최대 동시성입니다.A file or glob is required.파일 또는 글로브가 필요합니다.Warning: The Microsoft Visual C++ 14 runtime is required but was not detected on your system. Download and install from https://aka.ms/vs/17/release/vc_redist.x64.exe경고: Microsoft Visual C++ 14 런타임이 필요하지만 시스템에서 검색되지 않았습니다. https://aka.ms/vs/17/release/vc_redist.x64.exe 다운로드 및 설치{Locked="https://aka.ms/vs/17/release/vc_redist.x64.exe"} is a URL.No inputs found to sign.서명할 입력이 없습니다.Output file or directory. If omitted, input files will be overwritten.출력 파일 또는 디렉터리입니다. 생략하면 입력 파일을 덮어씁니다.Publisher name (ClickOnce).게시자 이름(ClickOnce).ClickOnce is a Microsoft deployment technology.Sign CLICLI 서명Some files do not exist. Try using a different {0} value or a fully qualified file path.일부 파일이 존재하지 않습니다. 다른 {0} 값 또는 정규화된 파일 경로를 사용해 보세요.{NumberedPlaceholder="{0}"} is an option name (e.g.: --base-directory) and should not be localized.Tenant ID to authenticate to Azure.Azure에 인증할 테넌트 ID입니다.Digest algorithm for the RFC 3161 timestamp server. Allowed values are sha256, sha384, and sha512.RFC 3161 타임스탬프 서버용 다이제스트 알고리즘입니다. 허용되는 값은 sha256, sha384 및 sha512입니다.{Locked="RFC 3161"} is an Internet standard (https://www.rfc-editor.org/info/rfc3161), and {Locked="sha256", "sha384", "sha512"} are cryptographic hash algorithm names should not be localized.RFC 3161 timestamp server URL.RFC 3161 타임스탬프 서버 URL.{Locked="RFC 3161"} is an Internet standard (https://www.rfc-editor.org/info/rfc3161) and should not be localized.The trusted-signing command is obsolete. Use the artifact-signing command instead.trusted-signing 명령은 더 이상 사용되지 않습니다. 대신 artifact-signing 명령을 사용하세요.Sets the verbosity level. Allowed values are 'none', 'critical', 'error', 'warning', 'information', 'debug', and 'trace'.세부 정보 표시 수준을 설정합니다. 허용되는 값은 'none', 'critical', 'error', 'warning', 'information', 'debug' 및 'trace'입니다.{Locked="none", "critical", "error", "warning", "information", "debug", "trace"} are option values and should not be localized.Only Windows x64 is supported at this time. See https://github.com/dotnet/sign/issues/474 regarding Windows x86 support.현재 Windows x64만 지원됩니다. Windows x86 지원에 대해서는 https://github.com/dotnet/sign/issues/474를 참조하세요.
================================================
FILE: src/Sign.Cli/xlf/Resources.pl.xlf
================================================
Application name (ClickOnce).Nazwa aplikacji (ClickOnce).Base directory for files. Overrides the current working directory.Katalog podstawowy dla plików. Zastępuje bieżący katalog roboczy.Use Windows Certificate Store or a local certificate file.Użyj magazynu certyfikatów systemu Windows lub lokalnego pliku certyfikatów.Client ID to authenticate to Azure.Identyfikator klienta do uwierzytelniania na platformie Azure.Client secret to authenticate to Azure.Klucz tajny klienta do uwierzytelniania na platformie Azure.The client secret options are obsolete and should no longer be specified.Opcje klucza tajnego klienta są przestarzałe i nie należy ich już określać.Sign binaries and containers.Podpisz pliki binarne i kontenery.Azure credential type that will be used. This defaults to DefaultAzureCredential.Typ poświadczeń platformy Azure, który będzie używany. Wartość domyślna to DefaultAzureCredential.Description of the signing certificate.Opis certyfikatu podpisywania.Description URL of the signing certificate.Adres URL opisu certyfikatu podpisywania.Digest algorithm to hash files with. Allowed values are 'sha256', 'sha384', and 'sha512'.Algorytm skrótu, za pomocą którego jest tworzony skrót plików. Dozwolonymi wartościami są: „sha256”, „sha384”, lub „sha512”.{Locked="sha256", "sha384", "sha512"} are cryptographic hash algorithm names and should not be localized.Path to file containing paths of files to sign or to exclude from signing.Ścieżka do pliku zawierającego ścieżki plików do podpisania lub wykluczenia z podpisywania.File(s) to sign.Pliki do podpisania.Invalid value for {0}. The value must be a fully rooted directory path.Nieprawidłowa wartość dla {0}. Wartość musi być w pełni główną ścieżką katalogu.{NumberedPlaceholder="{0}"} is an option name (e.g.: --base-directory) and should not be localized.Invalid value for {0}. The value must be a SHA-256, SHA-384, or SHA-512 certificate fingerprint (in hexadecimal).Nieprawidłowa wartość dla algorytmu {0}. Wartość musi być odciskiem palca certyfikatu z algorytmem SHA-256, SHA-384 lub SHA-512 (w formacie szesnastkowym).{NumberedPlaceholder="{0}"} is the option name (e.g.: --certificate-fingerprint). {Locked="SHA-256", "SHA-384", "SHA-512"} are cryptographic hash algorithms.Invalid value for {0}. The value must be 'sha256', 'sha384', or 'sha512'.Nieprawidłowa wartość dla {0}. Wartością musi być „sha256”, „sha384”, lub „sha512”.{Locked="sha256", "sha384", "sha512"} are cryptographic hash algorithm names and should not be localized. {NumberedPlaceholder="{0}"} is an option name (e.g.: --file-digest) and should not be localized.The file path cannot be rooted when using a glob. Use a path relative to the working directory (or base directory, if used).Ścieżka pliku nie może być z dostępem do konta root, gdy jest używany element globalny. Użyj ścieżki odnoszącej się do katalogu roboczego (lub katalogu podstawowego, jeśli jest używany).Invalid value for {0}. The value must be an absolute HTTPS URL.Invalid value for {0}. The value must be an absolute HTTPS URL.{NumberedPlaceholder="{0}"} is an option name (e.g.: --artifact-signing-endpoint) and should not be localized.Invalid value for {0}. The value must be a number value greater than or equal to 1.Nieprawidłowa wartość dla {0}. Wartość musi być większa lub równa 1.{NumberedPlaceholder="{0}"} is an option name (e.g.: --max-concurrency) and should not be localized.Invalid value for {0}. The value must be an absolute HTTP or HTTPS URL.Nieprawidłowa wartość dla {0}. Wartość musi być bezwzględnym adresem URL protokołu HTTP lub HTTPS.{NumberedPlaceholder="{0}"} is an option name (e.g.: --timestamp-url) and should not be localized.The client id of a user assigned ManagedIdentity.Identyfikator klienta elementu ManagedIdentity przypisanego przez użytkownika.Managed identity to authenticate to Azure Key. (obsolete)Tożsamość zarządzana do uwierzytelnienia w usłudze Azure Key. (przestarzały)The -kvm and --azure-key-vault-managed-identity options are obsolete and should no longer be specified.Opcje -kvm i --azure-key-vault-managed-identity są przestarzałe i nie należy ich już określać.The resource id of a user assigned ManagedIdentity.Identyfikator zasobu elementu ManagedIdentity przypisanego przez użytkownika.Maximum concurrency.Maksymalna współbieżność.A file or glob is required.Wymagany jest plik lub element globalny.Warning: The Microsoft Visual C++ 14 runtime is required but was not detected on your system. Download and install from https://aka.ms/vs/17/release/vc_redist.x64.exeOstrzeżenie: środowisko uruchomieniowe Microsoft Visual C++ 14 jest wymagane, ale nie zostało wykryte w systemie. Pobierz i zainstaluj z witryny https://aka.ms/vs/17/release/vc_redist.x64.exe{Locked="https://aka.ms/vs/17/release/vc_redist.x64.exe"} is a URL.No inputs found to sign.Nie znaleziono danych wejściowych do podpisania.Output file or directory. If omitted, input files will be overwritten.Plik wyjściowy lub katalog. W przypadku pominięcia pliki wejściowe zostaną zastąpione.Publisher name (ClickOnce).Nazwa wydawcy (ClickOnce).ClickOnce is a Microsoft deployment technology.Sign CLIInterfejs wiersza polecenia podpisywaniaSome files do not exist. Try using a different {0} value or a fully qualified file path.Niektóre pliki nie istnieją. Spróbuj użyć innej wartości {0} lub w pełni kwalifikowanej ścieżki pliku.{NumberedPlaceholder="{0}"} is an option name (e.g.: --base-directory) and should not be localized.Tenant ID to authenticate to Azure.Identyfikator dzierżawy na potrzeby uwierzytelniania na platformie Azure.Digest algorithm for the RFC 3161 timestamp server. Allowed values are sha256, sha384, and sha512.Algorytm skrótu dla serwera znacznika czasu RFC 3161. Dozwolonymi wartościami są: sha256, sha384 i sha512.{Locked="RFC 3161"} is an Internet standard (https://www.rfc-editor.org/info/rfc3161), and {Locked="sha256", "sha384", "sha512"} are cryptographic hash algorithm names should not be localized.RFC 3161 timestamp server URL.Adres URL serwera znacznika czasu RFC 3161.{Locked="RFC 3161"} is an Internet standard (https://www.rfc-editor.org/info/rfc3161) and should not be localized.The trusted-signing command is obsolete. Use the artifact-signing command instead.Polecenie zaufanego podpisywania jest przestarzałe. Zamiast tego użyj polecenia podpisywania artefaktu.Sets the verbosity level. Allowed values are 'none', 'critical', 'error', 'warning', 'information', 'debug', and 'trace'.Ustawia poziom szczegółowości. Dozwolone wartości to „none”, „critical”, „error”, „warning”, „information”, „debug” i „trace”.{Locked="none", "critical", "error", "warning", "information", "debug", "trace"} are option values and should not be localized.Only Windows x64 is supported at this time. See https://github.com/dotnet/sign/issues/474 regarding Windows x86 support.Obecnie obsługiwany jest tylko system Windows x64. Zobacz stronę https://github.com/dotnet/sign/issues/474 odnośnie pomocy technicznej systemu Windows x86.
================================================
FILE: src/Sign.Cli/xlf/Resources.pt-BR.xlf
================================================
Application name (ClickOnce).Nome do aplicativo (ClickOnce).Base directory for files. Overrides the current working directory.Diretório base para arquivos. Substitui o diretório de trabalho atual.Use Windows Certificate Store or a local certificate file.Use o Repositório de Certificados do Windows ou um arquivo de certificado local.Client ID to authenticate to Azure.ID do cliente a ser autenticada no Azure.Client secret to authenticate to Azure.Segredo do cliente a ser autenticado no Azure.The client secret options are obsolete and should no longer be specified.As opções de segredo do cliente são obsoletas e não devem mais ser especificadas.Sign binaries and containers.Autenticar contêineres e binários.Azure credential type that will be used. This defaults to DefaultAzureCredential.Tipo de credencial do Azure que será usada. O padrão é DefaultAzureCredential.Description of the signing certificate.Descrição do certificado de autenticação.Description URL of the signing certificate.URL de descrição do certificado de autenticação.Digest algorithm to hash files with. Allowed values are 'sha256', 'sha384', and 'sha512'.Algoritmo de código hash para arquivos de hash. Os valores permitidos são 'sha256', 'sha384' e 'sha512'.{Locked="sha256", "sha384", "sha512"} are cryptographic hash algorithm names and should not be localized.Path to file containing paths of files to sign or to exclude from signing.Caminho para o arquivo que contém os caminhos dos arquivos a serem assinados ou excluídos da assinatura.File(s) to sign.Arquivo(s) para autenticar.Invalid value for {0}. The value must be a fully rooted directory path.Valor inválido para {0}. O valor deve ser um caminho de diretório totalmente desbloqueado por rooting.{NumberedPlaceholder="{0}"} is an option name (e.g.: --base-directory) and should not be localized.Invalid value for {0}. The value must be a SHA-256, SHA-384, or SHA-512 certificate fingerprint (in hexadecimal).Valor inválido para {0}. O valor deve ser uma impressão digital de certificado SHA-256, SHA-384 ou SHA-512 (em hexadecimal).{NumberedPlaceholder="{0}"} is the option name (e.g.: --certificate-fingerprint). {Locked="SHA-256", "SHA-384", "SHA-512"} are cryptographic hash algorithms.Invalid value for {0}. The value must be 'sha256', 'sha384', or 'sha512'.Valor inválido para {0}. O valor deve ser 'sha256', 'sha384' ou 'sha512'.{Locked="sha256", "sha384", "sha512"} are cryptographic hash algorithm names and should not be localized. {NumberedPlaceholder="{0}"} is an option name (e.g.: --file-digest) and should not be localized.The file path cannot be rooted when using a glob. Use a path relative to the working directory (or base directory, if used).O caminho do arquivo não pode estar desbloqueado por rooting ao usar um glob. Use um caminho relativo ao diretório de trabalho (ou diretório base, se usado).Invalid value for {0}. The value must be an absolute HTTPS URL.Invalid value for {0}. The value must be an absolute HTTPS URL.{NumberedPlaceholder="{0}"} is an option name (e.g.: --artifact-signing-endpoint) and should not be localized.Invalid value for {0}. The value must be a number value greater than or equal to 1.Valor inválido para {0}. O valor deve ser um valor numérico maior ou igual a 1.{NumberedPlaceholder="{0}"} is an option name (e.g.: --max-concurrency) and should not be localized.Invalid value for {0}. The value must be an absolute HTTP or HTTPS URL.Valor inválido para {0}. O valor deve ser uma URL HTTP ou HTTPS absoluta.{NumberedPlaceholder="{0}"} is an option name (e.g.: --timestamp-url) and should not be localized.The client id of a user assigned ManagedIdentity.A ID do cliente de um ManagedIdentity atribuído pelo usuário.Managed identity to authenticate to Azure Key. (obsolete)Identidade gerenciada a ser autenticada na Chave do Azure Key. (obsoleta)The -kvm and --azure-key-vault-managed-identity options are obsolete and should no longer be specified.As opções -kvm e --azure-key-vault-managed-identity são obsoletas e não devem mais ser especificadas.The resource id of a user assigned ManagedIdentity.A ID de recurso de um ManagedIdentity atribuído pelo usuário.Maximum concurrency.Simultaneidade máxima.A file or glob is required.É necessário um arquivo ou um glob.Warning: The Microsoft Visual C++ 14 runtime is required but was not detected on your system. Download and install from https://aka.ms/vs/17/release/vc_redist.x64.exeAviso: o runtime do Microsoft Visual C++ 14 é necessário, mas não foi detectado no sistema. Baixar e instalar de https://aka.ms/vs/17/release/vc_redist.x64.exe{Locked="https://aka.ms/vs/17/release/vc_redist.x64.exe"} is a URL.No inputs found to sign.Nenhuma entrada encontrada para autenticar.Output file or directory. If omitted, input files will be overwritten.Arquivo ou diretório de saída. Se omitido, os arquivos de entrada serão substituídos.Publisher name (ClickOnce).Nome do editor (ClickOnce).ClickOnce is a Microsoft deployment technology.Sign CLIAutenticar CLISome files do not exist. Try using a different {0} value or a fully qualified file path.Alguns arquivos não existem. Tente usar um valor diferente de {0} ou um caminho de arquivo totalmente qualificado.{NumberedPlaceholder="{0}"} is an option name (e.g.: --base-directory) and should not be localized.Tenant ID to authenticate to Azure.ID do locatário a ser autenticada no Azure.Digest algorithm for the RFC 3161 timestamp server. Allowed values are sha256, sha384, and sha512.Resumo do algoritmo para o servidor de carimbo de data/hora RFC 3161. Os valores permitidos são sha256, sha384 e sha512.{Locked="RFC 3161"} is an Internet standard (https://www.rfc-editor.org/info/rfc3161), and {Locked="sha256", "sha384", "sha512"} are cryptographic hash algorithm names should not be localized.RFC 3161 timestamp server URL.URL do servidor de carimbo de data/hora do RFC 3161.{Locked="RFC 3161"} is an Internet standard (https://www.rfc-editor.org/info/rfc3161) and should not be localized.The trusted-signing command is obsolete. Use the artifact-signing command instead.O comando de assinatura confiável está obsoleto. Em vez disso, use o comando de assinatura de artefato.Sets the verbosity level. Allowed values are 'none', 'critical', 'error', 'warning', 'information', 'debug', and 'trace'.Define o nível de detalhamento. Os valores permitidos são 'none', 'critical', 'error', 'warning', 'information', 'debug' e 'trace'.{Locked="none", "critical", "error", "warning", "information", "debug", "trace"} are option values and should not be localized.Only Windows x64 is supported at this time. See https://github.com/dotnet/sign/issues/474 regarding Windows x86 support.No momento, somente o Windows x64 é compatível. Consulte https://github.com/dotnet/sign/issues/474 para obter suporte com o Windows x86.
================================================
FILE: src/Sign.Cli/xlf/Resources.ru.xlf
================================================
Application name (ClickOnce).Имя приложения (ClickOnce).Base directory for files. Overrides the current working directory.Базовый каталог для файлов. Переопределяет текущий рабочий каталог.Use Windows Certificate Store or a local certificate file.Используйте хранилище сертификатов Windows или локальный файл сертификата.Client ID to authenticate to Azure.ИД клиента для проверки подлинности в Azure.Client secret to authenticate to Azure.Секрет клиента для проверки подлинности в Azure.The client secret options are obsolete and should no longer be specified.Параметры секрета клиента устарели и больше не должны указываться.Sign binaries and containers.Подписывание двоичных файлов и контейнеров.Azure credential type that will be used. This defaults to DefaultAzureCredential.Тип учетных данных Azure, который будет использоваться. По умолчанию: DefaultAzureCredential.Description of the signing certificate.Описание сертификата для подписиDescription URL of the signing certificate.Описание URL-адреса сертификата для подписиDigest algorithm to hash files with. Allowed values are 'sha256', 'sha384', and 'sha512'.Алгоритм дайджеста для хэширования файлов. Допустимые значения: "sha256", "sha384" и "sha512".{Locked="sha256", "sha384", "sha512"} are cryptographic hash algorithm names and should not be localized.Path to file containing paths of files to sign or to exclude from signing.Путь к файлу, содержащему пути к файлам, которые нужно подписать или исключить из подписания.File(s) to sign.Файлы для подписи.Invalid value for {0}. The value must be a fully rooted directory path.Недопустимое значение для {0}. Значение должно быть полным корневым путем к каталогу.{NumberedPlaceholder="{0}"} is an option name (e.g.: --base-directory) and should not be localized.Invalid value for {0}. The value must be a SHA-256, SHA-384, or SHA-512 certificate fingerprint (in hexadecimal).Недопустимое значение для {0}. Значение должно быть отпечатком сертификата SHA-256, SHA-384 или SHA-512 (в шестнадцатеричном формате).{NumberedPlaceholder="{0}"} is the option name (e.g.: --certificate-fingerprint). {Locked="SHA-256", "SHA-384", "SHA-512"} are cryptographic hash algorithms.Invalid value for {0}. The value must be 'sha256', 'sha384', or 'sha512'.Недопустимое значение для {0}. Необходимо использовать "sha256", "sha384" или "sha512".{Locked="sha256", "sha384", "sha512"} are cryptographic hash algorithm names and should not be localized. {NumberedPlaceholder="{0}"} is an option name (e.g.: --file-digest) and should not be localized.The file path cannot be rooted when using a glob. Use a path relative to the working directory (or base directory, if used).Путь к файлу не может быть корневым при использовании стандартной маски. Используйте путь относительно рабочего каталога (или базового каталога, если он используется).Invalid value for {0}. The value must be an absolute HTTPS URL.Invalid value for {0}. The value must be an absolute HTTPS URL.{NumberedPlaceholder="{0}"} is an option name (e.g.: --artifact-signing-endpoint) and should not be localized.Invalid value for {0}. The value must be a number value greater than or equal to 1.Недопустимое значение для "{0}". Значение должно быть числом, большим или равным 1.{NumberedPlaceholder="{0}"} is an option name (e.g.: --max-concurrency) and should not be localized.Invalid value for {0}. The value must be an absolute HTTP or HTTPS URL.Недопустимое значение для {0}. Значение должно быть абсолютным URL-адресом HTTP или HTTPS.{NumberedPlaceholder="{0}"} is an option name (e.g.: --timestamp-url) and should not be localized.The client id of a user assigned ManagedIdentity.Идентификатор клиента для управляемого удостоверения, назначаемого пользователем.Managed identity to authenticate to Azure Key. (obsolete)Управляемое удостоверение для проверки подлинности в Azure Key. (устарело)The -kvm and --azure-key-vault-managed-identity options are obsolete and should no longer be specified.Параметры -kvm и --azure-key-vault-managed-identity устарели и больше не должны указываться.The resource id of a user assigned ManagedIdentity.ИД ресурса для управляемого удостоверения, назначаемого пользователем.Maximum concurrency.Максимальный параллелизм.A file or glob is required.Требуется файл или стандартная маска.Warning: The Microsoft Visual C++ 14 runtime is required but was not detected on your system. Download and install from https://aka.ms/vs/17/release/vc_redist.x64.exeВнимание! Среда выполнения Microsoft Visual C++ 14 является обязательной, но не обнаружена в вашей системе. Скачайте и установите ее: https://aka.ms/vs/17/release/vc_redist.x64.exe{Locked="https://aka.ms/vs/17/release/vc_redist.x64.exe"} is a URL.No inputs found to sign.Не найдены входящие данные для подписи.Output file or directory. If omitted, input files will be overwritten.Выходной файл или каталог. Если этот параметр опущен, входные файлы будут перезаписаны.Publisher name (ClickOnce).Имя издателя (ClickOnce).ClickOnce is a Microsoft deployment technology.Sign CLIПодписывание CLISome files do not exist. Try using a different {0} value or a fully qualified file path.Некоторые файлы не существуют. Попробуйте использовать другое значение {0} или полный путь к файлу.{NumberedPlaceholder="{0}"} is an option name (e.g.: --base-directory) and should not be localized.Tenant ID to authenticate to Azure.ИД клиента для проверки подлинности в Azure.Digest algorithm for the RFC 3161 timestamp server. Allowed values are sha256, sha384, and sha512.Алгоритм дайджеста для сервера меток времени RFC 3161. Допустимые значения: sha256, sha384 и sha512.{Locked="RFC 3161"} is an Internet standard (https://www.rfc-editor.org/info/rfc3161), and {Locked="sha256", "sha384", "sha512"} are cryptographic hash algorithm names should not be localized.RFC 3161 timestamp server URL.URL-адрес сервера меток времени RFC 3161.{Locked="RFC 3161"} is an Internet standard (https://www.rfc-editor.org/info/rfc3161) and should not be localized.The trusted-signing command is obsolete. Use the artifact-signing command instead.Команда доверенного подписания устарела. Вместо нее применяйте команду подписания артефактов.Sets the verbosity level. Allowed values are 'none', 'critical', 'error', 'warning', 'information', 'debug', and 'trace'.Задает уровень детализации. Допустимые значения: "none" (нет), "critical" (критическое), "error" (ошибки), "warning" (предупреждения), "information" (информация), "debug" (отладка) и "trace" (трассировка).{Locked="none", "critical", "error", "warning", "information", "debug", "trace"} are option values and should not be localized.Only Windows x64 is supported at this time. See https://github.com/dotnet/sign/issues/474 regarding Windows x86 support.Сейчас поддерживается только Windows x64. Сведения о поддержке Windows x86 см. на странице https://github.com/dotnet/sign/issues/474.
================================================
FILE: src/Sign.Cli/xlf/Resources.tr.xlf
================================================
Application name (ClickOnce).Uygulama adı (ClickOnce).Base directory for files. Overrides the current working directory.Dosyalar için temel dizin. Geçerli çalışma dizinini geçersiz kılar.Use Windows Certificate Store or a local certificate file.Windows Sertifika Deposu veya yerel bir sertifika dosyası kullanın.Client ID to authenticate to Azure.Azure için kimlik doğrulamak amacıyla kullanılan istemci kimliği.Client secret to authenticate to Azure.Azure için kimlik doğrulamak amacıyla kullanılan gizli anahtar.The client secret options are obsolete and should no longer be specified.Gizli anahtar seçenekleri kullanımdan kaldırıldı ve artık belirtilmemelidir.Sign binaries and containers.İkili dosyaları ve kapsayıcıları imzalayın.Azure credential type that will be used. This defaults to DefaultAzureCredential.Kullanılacak Azure kimlik bilgisi türü. Bu varsayılan DefaultAzureCredential değerine ayarlanır.Description of the signing certificate.İmzalama sertifikasının açıklaması.Description URL of the signing certificate.İmzalama sertifikasının açıklama URL’si.Digest algorithm to hash files with. Allowed values are 'sha256', 'sha384', and 'sha512'.Dosyaların karmasını oluşturmak için kullanılan karma algoritması. İzin verilen değerler şunlardır: 'sha256', 'sha384' ve 'sha512'.{Locked="sha256", "sha384", "sha512"} are cryptographic hash algorithm names and should not be localized.Path to file containing paths of files to sign or to exclude from signing.İmzalanacak veya imzalamadan dışlanacak dosyaların yollarını içeren dosya yolu.File(s) to sign.İmzalanacak dosyalar.Invalid value for {0}. The value must be a fully rooted directory path.{0} için geçersiz değer. Değer, tam olarak kök erişim izni verilmiş bir dizin yolu olmalıdır.{NumberedPlaceholder="{0}"} is an option name (e.g.: --base-directory) and should not be localized.Invalid value for {0}. The value must be a SHA-256, SHA-384, or SHA-512 certificate fingerprint (in hexadecimal).{0} için geçersiz değer. Değer bir SHA-256, SHA-384 veya SHA-512 sertifikası parmak izi (onaltılı olarak) olmalıdır.{NumberedPlaceholder="{0}"} is the option name (e.g.: --certificate-fingerprint). {Locked="SHA-256", "SHA-384", "SHA-512"} are cryptographic hash algorithms.Invalid value for {0}. The value must be 'sha256', 'sha384', or 'sha512'.{0} için geçersiz değer. Değer 'sha256', 'sha384' veya 'sha512' olmalıdır.{Locked="sha256", "sha384", "sha512"} are cryptographic hash algorithm names and should not be localized. {NumberedPlaceholder="{0}"} is an option name (e.g.: --file-digest) and should not be localized.The file path cannot be rooted when using a glob. Use a path relative to the working directory (or base directory, if used).Glob kullanılırken dosya yoluna kök erişim izni verilemez. Çalışma diziniyle (veya kullanılıyorsa, temel dizinle) ilişkili bir yol kullanın.Invalid value for {0}. The value must be an absolute HTTPS URL.Invalid value for {0}. The value must be an absolute HTTPS URL.{NumberedPlaceholder="{0}"} is an option name (e.g.: --artifact-signing-endpoint) and should not be localized.Invalid value for {0}. The value must be a number value greater than or equal to 1.{0} için geçersiz değer. Değer, 1’den büyük veya buna eşit bir sayı değeri olmalıdır.{NumberedPlaceholder="{0}"} is an option name (e.g.: --max-concurrency) and should not be localized.Invalid value for {0}. The value must be an absolute HTTP or HTTPS URL.{0} için geçersiz değer. Değer mutlak bir HTTP veya HTTPS URL’si olmalıdır.{NumberedPlaceholder="{0}"} is an option name (e.g.: --timestamp-url) and should not be localized.The client id of a user assigned ManagedIdentity.Kullanıcı tarafından atanan yönetilen kimliğin istemci kimliği.Managed identity to authenticate to Azure Key. (obsolete)Azure Key Vault için kimlik doğrulamak amacıyla kullanılan yönetilen kimlik. (kullanımdan kaldırıldı)The -kvm and --azure-key-vault-managed-identity options are obsolete and should no longer be specified.-kvm ve --azure-key-vault-managed-identity seçenekleri kullanımdan kaldırıldı ve artık belirtilmemelidir.The resource id of a user assigned ManagedIdentity.Kullanıcı tarafından atanan yönetilen kimliğin kaynak kimliği.Maximum concurrency.Maksimum eşzamanlılık.A file or glob is required.Dosya veya glob gerekiyor.Warning: The Microsoft Visual C++ 14 runtime is required but was not detected on your system. Download and install from https://aka.ms/vs/17/release/vc_redist.x64.exeUyarı: Microsoft Visual C++ 14 çalışma zamanı gerekiyor ancak sisteminiz üzerinde algılanmadı. İndirme ve yükleme için https://aka.ms/vs/17/release/vc_redist.x64.exe{Locked="https://aka.ms/vs/17/release/vc_redist.x64.exe"} is a URL.No inputs found to sign.İmzalanacak giriş dosyası yok.Output file or directory. If omitted, input files will be overwritten.Çıkış dosyası veya dizini. Atlanırsa, giriş dosyalarının üzerine yazılır.Publisher name (ClickOnce).Yayımcı adı (ClickOnce).ClickOnce is a Microsoft deployment technology.Sign CLICLI’yı imzalaSome files do not exist. Try using a different {0} value or a fully qualified file path.Bazı dosyalar mevcut değil. Farklı bir {0} değeri veya tam bir dosya yolu kullanmayı deneyin.{NumberedPlaceholder="{0}"} is an option name (e.g.: --base-directory) and should not be localized.Tenant ID to authenticate to Azure.Azure için kimlik doğrulamak amacıyla kullanılan kiracı kimliği.Digest algorithm for the RFC 3161 timestamp server. Allowed values are sha256, sha384, and sha512.RFC 3161 zaman damgası sunucusu için karma algoritması. İzin verilen değerler şunlardır. sha256, sha384 ve sha512.{Locked="RFC 3161"} is an Internet standard (https://www.rfc-editor.org/info/rfc3161), and {Locked="sha256", "sha384", "sha512"} are cryptographic hash algorithm names should not be localized.RFC 3161 timestamp server URL.RFC 3161 zaman damgası sunucusu URL’si.{Locked="RFC 3161"} is an Internet standard (https://www.rfc-editor.org/info/rfc3161) and should not be localized.The trusted-signing command is obsolete. Use the artifact-signing command instead.Güvenilen imzalama komutu artık kullanılmıyor. Bunun yerine yapıt imzalama komutunu kullanın.Sets the verbosity level. Allowed values are 'none', 'critical', 'error', 'warning', 'information', 'debug', and 'trace'.Ayrıntı düzeyini ayarlar. İzin verilen değerler: 'none', 'critical', 'error', 'warning', 'information', 'debug' ve 'trace'.{Locked="none", "critical", "error", "warning", "information", "debug", "trace"} are option values and should not be localized.Only Windows x64 is supported at this time. See https://github.com/dotnet/sign/issues/474 regarding Windows x86 support.Şu anda yalnızca Windows x64 destekleniyor. Windows x86 desteği için https://github.com/dotnet/sign/issues/474 sayfasına bakın.
================================================
FILE: src/Sign.Cli/xlf/Resources.zh-Hans.xlf
================================================
Application name (ClickOnce).应用程序名称(ClickOnce)。Base directory for files. Overrides the current working directory.文件的基目录。替代当前工作目录。Use Windows Certificate Store or a local certificate file.使用 Windows 证书存储或本地证书文件。Client ID to authenticate to Azure.要向 Azure 进行身份验证的客户端 ID。Client secret to authenticate to Azure.要向 Azure 进行身份验证的客户端密码。The client secret options are obsolete and should no longer be specified.客户端密码选项已过时,不应再指定。Sign binaries and containers.对二进制文件和容器进行签名。Azure credential type that will be used. This defaults to DefaultAzureCredential.将使用的 Azure 凭据类型。此项默认为 DefaultAzureCredential。Description of the signing certificate.签名证书的说明。Description URL of the signing certificate.签名证书的说明 URL。Digest algorithm to hash files with. Allowed values are 'sha256', 'sha384', and 'sha512'.用于对文件进行哈希处理的摘要算法。允许的值为 "sha256"、"sha384" 和 "sha512"。{Locked="sha256", "sha384", "sha512"} are cryptographic hash algorithm names and should not be localized.Path to file containing paths of files to sign or to exclude from signing.包含要签名或从签名中排除的文件路径的文件的路径。File(s) to sign.要签名的文件。Invalid value for {0}. The value must be a fully rooted directory path.{0} 的值无效。该值必须是完整的根目录路径。{NumberedPlaceholder="{0}"} is an option name (e.g.: --base-directory) and should not be localized.Invalid value for {0}. The value must be a SHA-256, SHA-384, or SHA-512 certificate fingerprint (in hexadecimal).{0} 的值无效。该值必须是 SHA-256、SHA-384 或 SHA-512 证书指纹(十六进制)。{NumberedPlaceholder="{0}"} is the option name (e.g.: --certificate-fingerprint). {Locked="SHA-256", "SHA-384", "SHA-512"} are cryptographic hash algorithms.Invalid value for {0}. The value must be 'sha256', 'sha384', or 'sha512'.{0} 的值无效。值必须为 "sha256"、"sha384" 或 "sha512"。{Locked="sha256", "sha384", "sha512"} are cryptographic hash algorithm names and should not be localized. {NumberedPlaceholder="{0}"} is an option name (e.g.: --file-digest) and should not be localized.The file path cannot be rooted when using a glob. Use a path relative to the working directory (or base directory, if used).使用 glob 时,文件路径不能为根路径。请使用相对于工作目录(或基目录,如果使用)的路径。Invalid value for {0}. The value must be an absolute HTTPS URL.Invalid value for {0}. The value must be an absolute HTTPS URL.{NumberedPlaceholder="{0}"} is an option name (e.g.: --artifact-signing-endpoint) and should not be localized.Invalid value for {0}. The value must be a number value greater than or equal to 1.{0} 的值无效。该值必须是大于或等于 1 的数值。{NumberedPlaceholder="{0}"} is an option name (e.g.: --max-concurrency) and should not be localized.Invalid value for {0}. The value must be an absolute HTTP or HTTPS URL.{0} 的值无效。值必须是绝对 HTTP 或 HTTPS URL。{NumberedPlaceholder="{0}"} is an option name (e.g.: --timestamp-url) and should not be localized.The client id of a user assigned ManagedIdentity.用户分配的 ManagedIdentity 的客户端 ID。Managed identity to authenticate to Azure Key. (obsolete)要向 Azure Key 进行身份验证的托管标识。(已过时)The -kvm and --azure-key-vault-managed-identity options are obsolete and should no longer be specified.-kvm 和 --azure-key-vault-managed-identity 选项已过时,不应再指定。The resource id of a user assigned ManagedIdentity.用户分配的 ManagedIdentity 的资源 ID。Maximum concurrency.最大并发。A file or glob is required.文件或 glob 是必需的。Warning: The Microsoft Visual C++ 14 runtime is required but was not detected on your system. Download and install from https://aka.ms/vs/17/release/vc_redist.x64.exe警告: Microsoft Visual C++ 14 运行时是必需项,但在系统上未检测到。从 https://aka.ms/vs/17/release/vc_redist.x64.exe 下载并安装{Locked="https://aka.ms/vs/17/release/vc_redist.x64.exe"} is a URL.No inputs found to sign.找不到要签名的输入。Output file or directory. If omitted, input files will be overwritten.输出文件或目录。如果省略,则输入文件将被覆盖。Publisher name (ClickOnce).发布服务器名称(ClickOnce)。ClickOnce is a Microsoft deployment technology.Sign CLI对 CLI 进行签名Some files do not exist. Try using a different {0} value or a fully qualified file path.某些文件不存在。请尝试使用其他 {0} 值或完全限定的文件路径。{NumberedPlaceholder="{0}"} is an option name (e.g.: --base-directory) and should not be localized.Tenant ID to authenticate to Azure.要向 Azure 进行身份验证的租户 ID。Digest algorithm for the RFC 3161 timestamp server. Allowed values are sha256, sha384, and sha512.RFC 3161 时间戳服务器的摘要算法。允许的值为 sha256、sha384 和 sha512。{Locked="RFC 3161"} is an Internet standard (https://www.rfc-editor.org/info/rfc3161), and {Locked="sha256", "sha384", "sha512"} are cryptographic hash algorithm names should not be localized.RFC 3161 timestamp server URL.RFC 3161 时间戳服务器 URL。{Locked="RFC 3161"} is an Internet standard (https://www.rfc-editor.org/info/rfc3161) and should not be localized.The trusted-signing command is obsolete. Use the artifact-signing command instead.受信任签名命令已过时。请改用工件签名命令。Sets the verbosity level. Allowed values are 'none', 'critical', 'error', 'warning', 'information', 'debug', and 'trace'.设置详细级别。允许的值为 "none"、"critical"、"error"、"warning"、"information"、"debug" 和 "trace"。{Locked="none", "critical", "error", "warning", "information", "debug", "trace"} are option values and should not be localized.Only Windows x64 is supported at this time. See https://github.com/dotnet/sign/issues/474 regarding Windows x86 support.目前仅支持 Windows x64。有关 Windows x86 支持,请参阅 https://github.com/dotnet/sign/issues/474。
================================================
FILE: src/Sign.Cli/xlf/Resources.zh-Hant.xlf
================================================
Application name (ClickOnce).應用程式名稱 (ClickOnce)。Base directory for files. Overrides the current working directory.檔案的基底目錄。覆寫目前的工作目錄。Use Windows Certificate Store or a local certificate file.使用 Windows 憑證存放區或本機憑證檔案。Client ID to authenticate to Azure.要向 Azure 進行驗證的用戶端識別碼。Client secret to authenticate to Azure.要向 Azure 進行驗證的用戶端密碼。The client secret options are obsolete and should no longer be specified.用戶端密碼選項已過時,應不再指定。Sign binaries and containers.簽署二進位檔和容器。Azure credential type that will be used. This defaults to DefaultAzureCredential.將使用的 Azure 認證類型。這會預設為 DefaultAzureCredential。Description of the signing certificate.簽署憑證的描述。Description URL of the signing certificate.簽署憑證的描述 URL。Digest algorithm to hash files with. Allowed values are 'sha256', 'sha384', and 'sha512'.用於雜湊檔案的摘要演算法。允許的值為 'sha256'、'sha384' 和 'sha512'。{Locked="sha256", "sha384", "sha512"} are cryptographic hash algorithm names and should not be localized.Path to file containing paths of files to sign or to exclude from signing.包含要簽署或從簽署排除之檔案路徑的檔案路徑。File(s) to sign.要簽署的檔案。Invalid value for {0}. The value must be a fully rooted directory path.{0} 的值無效。值必須是完整的根目錄路徑。{NumberedPlaceholder="{0}"} is an option name (e.g.: --base-directory) and should not be localized.Invalid value for {0}. The value must be a SHA-256, SHA-384, or SHA-512 certificate fingerprint (in hexadecimal).{0} 的值無效。該值必須是 SHA-256、SHA-384 或 SHA-512 憑證指紋 (十六進位格式)。{NumberedPlaceholder="{0}"} is the option name (e.g.: --certificate-fingerprint). {Locked="SHA-256", "SHA-384", "SHA-512"} are cryptographic hash algorithms.Invalid value for {0}. The value must be 'sha256', 'sha384', or 'sha512'.{0} 的值無效。值必須是 'sha256'、'sha384' 或 'sha512'。{Locked="sha256", "sha384", "sha512"} are cryptographic hash algorithm names and should not be localized. {NumberedPlaceholder="{0}"} is an option name (e.g.: --file-digest) and should not be localized.The file path cannot be rooted when using a glob. Use a path relative to the working directory (or base directory, if used).使用 Glob 時,檔案路徑不可為根目錄。請使用工作目錄或基底目錄 (若使用該目錄) 的相對路徑。Invalid value for {0}. The value must be an absolute HTTPS URL.Invalid value for {0}. The value must be an absolute HTTPS URL.{NumberedPlaceholder="{0}"} is an option name (e.g.: --artifact-signing-endpoint) and should not be localized.Invalid value for {0}. The value must be a number value greater than or equal to 1.{0} 的值無效。值必須是大或等於 1 的數值。{NumberedPlaceholder="{0}"} is an option name (e.g.: --max-concurrency) and should not be localized.Invalid value for {0}. The value must be an absolute HTTP or HTTPS URL.{0} 的值無效。值必須是絕對 HTTP 或 HTTPS URL。{NumberedPlaceholder="{0}"} is an option name (e.g.: --timestamp-url) and should not be localized.The client id of a user assigned ManagedIdentity.使用者指派 ManagedIdentity 的用戶端識別碼。Managed identity to authenticate to Azure Key. (obsolete)要向 Azure Key 進行驗證的受控識別。(已過時)The -kvm and --azure-key-vault-managed-identity options are obsolete and should no longer be specified.-kvm 和 --azure-key-vault-managed-identity 選項已過時,不應再指定。The resource id of a user assigned ManagedIdentity.使用者指派 ManagedIdentity 的資源識別碼。Maximum concurrency.並行最大值。A file or glob is required.需要檔案或 Glob。Warning: The Microsoft Visual C++ 14 runtime is required but was not detected on your system. Download and install from https://aka.ms/vs/17/release/vc_redist.x64.exe警告: 需要 Microsoft Visual C++ 14 執行階段,但在您的系統上偵測不到。從 https://aka.ms/vs/17/release/vc_redist.x64.exe 下載並安裝{Locked="https://aka.ms/vs/17/release/vc_redist.x64.exe"} is a URL.No inputs found to sign.找不到要簽署的輸入。Output file or directory. If omitted, input files will be overwritten.輸出檔案或目錄。如果省略,則會覆寫輸入檔案。Publisher name (ClickOnce).發行者名稱 (ClickOnce)。ClickOnce is a Microsoft deployment technology.Sign CLI簽署 CLISome files do not exist. Try using a different {0} value or a fully qualified file path.某些檔案不存在。請嘗試使用不同的 {0} 值或完整檔案路徑。{NumberedPlaceholder="{0}"} is an option name (e.g.: --base-directory) and should not be localized.Tenant ID to authenticate to Azure.要向 Azure 進行驗證的租用戶識別碼。Digest algorithm for the RFC 3161 timestamp server. Allowed values are sha256, sha384, and sha512.RFC 3161 時間戳記伺服器的摘要演算法。允許的值為 sha256、sha384 和 sha512。{Locked="RFC 3161"} is an Internet standard (https://www.rfc-editor.org/info/rfc3161), and {Locked="sha256", "sha384", "sha512"} are cryptographic hash algorithm names should not be localized.RFC 3161 timestamp server URL.RFC 3161 時間戳記伺服器 URL。{Locked="RFC 3161"} is an Internet standard (https://www.rfc-editor.org/info/rfc3161) and should not be localized.The trusted-signing command is obsolete. Use the artifact-signing command instead.信任簽署命令已過時。請改為使用成品簽署命令。Sets the verbosity level. Allowed values are 'none', 'critical', 'error', 'warning', 'information', 'debug', and 'trace'.設定詳細資訊層級。允許的值為 'none'、'critical'、'error'、'warning'、'information'、'debug' 和 'trace'。{Locked="none", "critical", "error", "warning", "information", "debug", "trace"} are option values and should not be localized.Only Windows x64 is supported at this time. See https://github.com/dotnet/sign/issues/474 regarding Windows x86 support.目前只支援 Windows x64。如需 Windows x86 支援,請參閱 https://github.com/dotnet/sign/issues/474 (英文)。
================================================
FILE: src/Sign.Cli/xlf/TrustedSigningResources.cs.xlf
================================================
The Trusted Signing Account name.Název účtu pro důvěryhodné podepisováníThe Certificate Profile name.Název profilu certifikátuUse Trusted Signing. (obsolete, use artifact-signing instead)Použijte důvěryhodné podepisování. (zastaralé, místo toho použít artifact-signing)The Trusted Signing Account endpoint. The value must be a URI that aligns to the region that your Trusted Signing Account and Certificate Profile were created in.Koncový bod důvěryhodného podpisového účtu Hodnota musí být identifikátor URI, který odpovídá oblasti, ve které jste vytvořili důvěryhodný podpisový účet a profil certifikátu.
================================================
FILE: src/Sign.Cli/xlf/TrustedSigningResources.de.xlf
================================================
The Trusted Signing Account name.Der Kontoname der vertrauensvollen Signatur.The Certificate Profile name.Der Zertifikatprofilname.Use Trusted Signing. (obsolete, use artifact-signing instead)Verwenden Sie die vertrauenswürdige Signatur. (veraltet, stattdessen Artefaktsignierung verwenden)The Trusted Signing Account endpoint. The value must be a URI that aligns to the region that your Trusted Signing Account and Certificate Profile were created in.Der Endpunkt des Kontos der vertrauensvollen Signatur. Der Wert muss ein URI sein, der der Region entspricht, in der Ihr Konto der vertrauensvollen Signatur und das Zertifikatprofil erstellt wurden.
================================================
FILE: src/Sign.Cli/xlf/TrustedSigningResources.es.xlf
================================================
The Trusted Signing Account name.Nombre de la cuenta de firma de confianza.The Certificate Profile name.Nombre del perfil de certificado.Use Trusted Signing. (obsolete, use artifact-signing instead)Use la firma de confianza. (obsoleto, use la firma de artefactos en su lugar)The Trusted Signing Account endpoint. The value must be a URI that aligns to the region that your Trusted Signing Account and Certificate Profile were created in.Punto de conexión de la cuenta de firma de confianza. El valor debe ser un identificador URI que se alinee con la región en la que se han creado la cuenta de firma de confianza y el perfil de certificado.
================================================
FILE: src/Sign.Cli/xlf/TrustedSigningResources.fr.xlf
================================================
The Trusted Signing Account name.Nom de compte Signatures de confiance.The Certificate Profile name.Nom du profil de certificat.Use Trusted Signing. (obsolete, use artifact-signing instead)Utilisez le service Signatures de confiance. (obsolète, utilisez plutôt la signature d’artefact)The Trusted Signing Account endpoint. The value must be a URI that aligns to the region that your Trusted Signing Account and Certificate Profile were created in.Point de terminaison du compte Signatures de confiance. La valeur doit être un URI qui s’aligne sur la région dans laquelle votre compte Signatures de confiance et votre profil de certificat ont été créés.
================================================
FILE: src/Sign.Cli/xlf/TrustedSigningResources.it.xlf
================================================
The Trusted Signing Account name.Nome dell'account di firma attendibile.The Certificate Profile name.Nome profilo certificato.Use Trusted Signing. (obsolete, use artifact-signing instead)Usare Firma attendibile. (obsoleto, usare la firma artefatti)The Trusted Signing Account endpoint. The value must be a URI that aligns to the region that your Trusted Signing Account and Certificate Profile were created in.Endpoint dell'account di firma attendibile. Il valore deve essere un URI allineato all'area in cui sono stati creati l'account di firma attendibile e il profilo certificato.
================================================
FILE: src/Sign.Cli/xlf/TrustedSigningResources.ja.xlf
================================================
The Trusted Signing Account name.信頼された署名のアカウント名。The Certificate Profile name.証明書プロファイル名。Use Trusted Signing. (obsolete, use artifact-signing instead)信頼された署名を使用します。(廃止されています。代わりに artifact-signing を使用してください)The Trusted Signing Account endpoint. The value must be a URI that aligns to the region that your Trusted Signing Account and Certificate Profile were created in.信頼された署名アカウントのエンドポイント。値は、信頼された署名アカウントと証明書プロファイルが作成されたリージョンに合った URI である必要があります。
================================================
FILE: src/Sign.Cli/xlf/TrustedSigningResources.ko.xlf
================================================
The Trusted Signing Account name.신뢰할 수 있는 서명 계정 이름입니다.The Certificate Profile name.인증서 프로필 이름입니다.Use Trusted Signing. (obsolete, use artifact-signing instead)신뢰할 수 있는 서명을 사용합니다. (사용되지 않음, 대신 아티팩트 서명 사용)The Trusted Signing Account endpoint. The value must be a URI that aligns to the region that your Trusted Signing Account and Certificate Profile were created in.신뢰할 수 있는 서명 계정 엔드포인트입니다. 값은 신뢰할 수 있는 서명 계정 및 인증서 프로필이 생성된 지역에 맞는 URI여야 합니다.
================================================
FILE: src/Sign.Cli/xlf/TrustedSigningResources.pl.xlf
================================================
The Trusted Signing Account name.Nazwa konta usługi Zaufane podpisywanie.The Certificate Profile name.Nazwa profilu certyfikatu.Use Trusted Signing. (obsolete, use artifact-signing instead)Użyj usługi Zaufane podpisywanie. (przestarzałe, zamiast tego użyj rozwiązania Artifact Signing)The Trusted Signing Account endpoint. The value must be a URI that aligns to the region that your Trusted Signing Account and Certificate Profile were created in.Punkt końcowy konta usługi Zaufane podpisywanie. Wartość musi być identyfikatorem URI, który jest zgodny z regionem, w ramach którego utworzono konto usługi Zaufane podpisywanie i profil certyfikatu.
================================================
FILE: src/Sign.Cli/xlf/TrustedSigningResources.pt-BR.xlf
================================================
The Trusted Signing Account name.O nome da Conta de Assinatura Confiável.The Certificate Profile name.O nome do Perfil de Certificado.Use Trusted Signing. (obsolete, use artifact-signing instead)Use a Assinatura Confiável. (obsoleto, use a assinatura de artefatos)The Trusted Signing Account endpoint. The value must be a URI that aligns to the region that your Trusted Signing Account and Certificate Profile were created in.O ponto de extremidade da Conta de Assinatura Confiável. O valor deve ser uma URI que se alinhe à região em que sua Conta de Assinatura Confiável e o Perfil de Certificado foram criados.
================================================
FILE: src/Sign.Cli/xlf/TrustedSigningResources.ru.xlf
================================================
The Trusted Signing Account name.Имя учетной записи для доверенного подписания.The Certificate Profile name.Имя профиля сертификата.Use Trusted Signing. (obsolete, use artifact-signing instead)Используйте доверенное подписание. (устарело, вместо этого применяйте подписание артефактов)The Trusted Signing Account endpoint. The value must be a URI that aligns to the region that your Trusted Signing Account and Certificate Profile were created in.Конечная точка учетной записи для доверенного подписания. Значением должен быть URI, соответствующий региону, в котором созданы учетная запись для доверенного подписания и профиль сертификата.
================================================
FILE: src/Sign.Cli/xlf/TrustedSigningResources.tr.xlf
================================================
The Trusted Signing Account name.Güvenilir İmzalama Hesabı adı.The Certificate Profile name.Sertifika Profili adı.Use Trusted Signing. (obsolete, use artifact-signing instead)Güvenilen İmzalama hizmetini kullanın. (artık kullanılmıyor, bunun yerine yapıt imzalama kullanın)The Trusted Signing Account endpoint. The value must be a URI that aligns to the region that your Trusted Signing Account and Certificate Profile were created in.Güvenilir İmzalama Hesabı uç noktası. De, Güvenilir İmzalama Hesabınızın ve Sertifika Profilinin oluşturulduğu bölgeyle uyumlu bir URI olmalıdır.
================================================
FILE: src/Sign.Cli/xlf/TrustedSigningResources.zh-Hans.xlf
================================================
The Trusted Signing Account name.受信任签名帐户名称。The Certificate Profile name.证书配置文件名称。Use Trusted Signing. (obsolete, use artifact-signing instead)使用受信任签名。(已过时,请改用工件签名)The Trusted Signing Account endpoint. The value must be a URI that aligns to the region that your Trusted Signing Account and Certificate Profile were created in.受信任签名帐户终结点。该值必须是与创建受信任签名帐户和证书配置文件的区域相对应的 URI。
================================================
FILE: src/Sign.Cli/xlf/TrustedSigningResources.zh-Hant.xlf
================================================
The Trusted Signing Account name.信任簽署帳戶名稱。The Certificate Profile name.憑證設定檔名稱。Use Trusted Signing. (obsolete, use artifact-signing instead)使用信任簽署。(已過時,請改用成品簽署)The Trusted Signing Account endpoint. The value must be a URI that aligns to the region that your Trusted Signing Account and Certificate Profile were created in.信任簽署帳戶端點。值必須是 URI,且與您的信任簽署帳戶和憑證設定檔建立區域一致。
================================================
FILE: src/Sign.Core/AppInitializer.cs
================================================
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE.txt file in the project root for more information.
namespace Sign.Core
{
internal static class AppInitializer
{
internal static void Initialize()
{
AppRootDirectoryLocator locator = new();
DirectoryInfo appRootDirectory = locator.Directory;
string baseDirectory = Path.Combine(appRootDirectory.FullName, "tools", "SDK", "x64");
//
// Ensure we invoke wintrust!DllMain before we get too far.
// This will call wintrust!RegisterSipsFromIniFile and read in wintrust.dll.ini
// to swap out some local SIPs. Internally, wintrust will call LoadLibraryW
// on each DLL= entry, so we need to also adjust our DLL search path or we'll
// load unwanted system-provided copies.
//
Kernel32.SetDllDirectoryW(baseDirectory);
Kernel32.LoadLibraryW(Path.Combine(baseDirectory, "wintrust.dll"));
Kernel32.LoadLibraryW(Path.Combine(baseDirectory, "mssign32.dll"));
// This is here because we need to P/Invoke into clr.dll for _AxlPublicKeyBlobToPublicKeyToken.
string windir = Environment.GetEnvironmentVariable("windir")!;
string netfxDir = Path.Combine(windir, "Microsoft.NET", "Framework64", "v4.0.30319");
AddEnvironmentPath(netfxDir);
}
private static void AddEnvironmentPath(string path)
{
const string name = "PATH";
string paths = Environment.GetEnvironmentVariable(name) ?? string.Empty;
string newPaths = string.Join(Path.PathSeparator, paths, path);
Environment.SetEnvironmentVariable(name, newPaths);
}
}
}
================================================
FILE: src/Sign.Core/Certificates/CertificateVerifier.cs
================================================
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE.txt file in the project root for more information.
using System.Security.Cryptography.X509Certificates;
using Microsoft.Extensions.Logging;
namespace Sign.Core
{
internal sealed class CertificateVerifier : ICertificateVerifier
{
private readonly ILogger _logger;
// Dependency injection requires a public constructor.
public CertificateVerifier(ILogger logger)
{
ArgumentNullException.ThrowIfNull(logger, nameof(logger));
_logger = logger;
}
public void Verify(X509Certificate2 certificate)
{
ArgumentNullException.ThrowIfNull(certificate, nameof(certificate));
DateTime now = DateTime.Now;
if (now < certificate.NotBefore)
{
// See https://github.com/dotnet/roslyn-analyzers/issues/5626
#pragma warning disable CA2254 // Template should be a static expression
_logger.LogError(Resources.CertificateIsNotYetTimeValid);
throw new SigningException(Resources.CertificateIsNotYetTimeValid);
}
else if (certificate.NotAfter < now)
{
_logger.LogError(Resources.CertificateIsExpired);
throw new SigningException(Resources.CertificateIsExpired);
#pragma warning restore CA2254 // Template should be a static expression
}
}
}
}
================================================
FILE: src/Sign.Core/Certificates/ICertificateVerifier.cs
================================================
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE.txt file in the project root for more information.
using System.Security.Cryptography.X509Certificates;
namespace Sign.Core
{
internal interface ICertificateVerifier
{
void Verify(X509Certificate2 certificate);
}
}
================================================
FILE: src/Sign.Core/Containers/AppxBundleContainer.cs
================================================
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE.txt file in the project root for more information.
using System.Xml.Linq;
using Microsoft.Extensions.Logging;
namespace Sign.Core
{
internal sealed class AppxBundleContainer : Container
{
private readonly FileInfo _appxBundle;
private string? _bundleVersion;
private readonly IDirectoryService _directoryService;
private readonly ILogger _logger;
private readonly IMakeAppxCli _makeAppxCli;
public AppxBundleContainer(
FileInfo appxBundle,
IDirectoryService directoryService,
IFileMatcher fileMatcher,
IMakeAppxCli makeAppxCli,
ILogger logger)
: base(fileMatcher)
{
ArgumentNullException.ThrowIfNull(appxBundle, nameof(appxBundle));
ArgumentNullException.ThrowIfNull(directoryService, nameof(directoryService));
ArgumentNullException.ThrowIfNull(makeAppxCli, nameof(makeAppxCli));
ArgumentNullException.ThrowIfNull(logger, nameof(logger));
_appxBundle = appxBundle;
_directoryService = directoryService;
_makeAppxCli = makeAppxCli;
_logger = logger;
}
public override async ValueTask OpenAsync()
{
if (TemporaryDirectory is not null)
{
throw new InvalidOperationException();
}
TemporaryDirectory = new TemporaryDirectory(_directoryService);
var args = $@"unbundle /p ""{_appxBundle.FullName}"" /d ""{TemporaryDirectory!.Directory.FullName}"" /o";
await _makeAppxCli.RunAsync(args);
_bundleVersion = GetBundleVersion();
}
public override async ValueTask SaveAsync()
{
if (TemporaryDirectory is null)
{
throw new InvalidOperationException();
}
using (TemporaryDirectory temporaryDirectory = new(_directoryService))
{
FileInfo newAppxBundle = new(Path.Combine(temporaryDirectory.Directory.FullName, _appxBundle.Name));
var args = $@"bundle /d ""{TemporaryDirectory.Directory.FullName}"" /p ""{newAppxBundle.FullName}"" /bv {_bundleVersion} /o";
await _makeAppxCli.RunAsync(args);
_appxBundle.Delete();
File.Move(newAppxBundle.FullName, _appxBundle.FullName, overwrite: true);
_appxBundle.Refresh();
}
}
private string? GetBundleVersion()
{
string fileName = Path.Combine(TemporaryDirectory!.Directory.FullName, "AppxMetadata", "AppxBundleManifest.xml");
using (FileStream stream = File.OpenRead(fileName))
{
XDocument manifest = XDocument.Load(stream, LoadOptions.PreserveWhitespace);
XNamespace ns = "http://schemas.microsoft.com/appx/2013/bundle";
return manifest.Root?.Element(ns + "Identity")?.Attribute("Version")?.Value;
}
}
}
}
================================================
FILE: src/Sign.Core/Containers/AppxContainer.cs
================================================
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE.txt file in the project root for more information.
using System.Security.Cryptography.X509Certificates;
using System.Xml.Linq;
using Microsoft.Extensions.Logging;
namespace Sign.Core
{
// Unpacking and repacking an appx will strip it of its signature
// We can also update the publisher of the appxmanifest
internal sealed class AppxContainer : Container
{
private readonly FileInfo _appx;
private readonly IDirectoryService _directoryService;
private readonly ICertificateProvider _certificateProvider;
private readonly ILogger _logger;
private readonly IMakeAppxCli _makeAppxCli;
public AppxContainer(
FileInfo appx,
ICertificateProvider certificateProvider,
IDirectoryService directoryService,
IFileMatcher fileMatcher,
IMakeAppxCli makeAppxCli,
ILogger logger)
: base(fileMatcher)
{
ArgumentNullException.ThrowIfNull(appx, nameof(appx));
ArgumentNullException.ThrowIfNull(certificateProvider, nameof(certificateProvider));
ArgumentNullException.ThrowIfNull(directoryService, nameof(directoryService));
ArgumentNullException.ThrowIfNull(makeAppxCli, nameof(makeAppxCli));
ArgumentNullException.ThrowIfNull(logger, nameof(logger));
_appx = appx;
_directoryService = directoryService;
_certificateProvider = certificateProvider;
_makeAppxCli = makeAppxCli;
_logger = logger;
}
public override async ValueTask OpenAsync()
{
if (TemporaryDirectory is not null)
{
throw new InvalidOperationException();
}
TemporaryDirectory = new TemporaryDirectory(_directoryService);
_logger.LogInformation(
Resources.OpeningContainer,
_appx.FullName,
TemporaryDirectory.Directory.FullName);
var args = $@"unpack /p ""{_appx.FullName}"" /d ""{TemporaryDirectory!.Directory.FullName}"" /l /o";
await _makeAppxCli.RunAsync(args);
await UpdateManifestPublisherAsync();
}
public override async ValueTask SaveAsync()
{
if (TemporaryDirectory is null)
{
throw new InvalidOperationException();
}
using (TemporaryDirectory temporaryDirectory = new(_directoryService))
{
FileInfo newAppx = new(Path.Combine(temporaryDirectory.Directory.FullName, _appx.Name));
var args = $@"pack /d ""{TemporaryDirectory!.Directory.FullName}"" /p ""{newAppx.FullName}"" /o /l";
await _makeAppxCli.RunAsync(args);
_appx.Delete();
File.Move(newAppx.FullName, _appx.FullName, overwrite: true);
_appx.Refresh();
}
}
private async Task UpdateManifestPublisherAsync()
{
FileInfo appxManifest = new(Path.Combine(TemporaryDirectory!.Directory.FullName, "AppxManifest.xml"));
XDocument manifest;
using (FileStream stream = appxManifest.OpenRead())
{
manifest = XDocument.Load(stream, LoadOptions.PreserveWhitespace);
XNamespace ns = "http://schemas.microsoft.com/appx/manifest/foundation/windows10";
XElement? idElement = manifest.Root?.Element(ns + "Identity");
if (idElement is not null)
{
using (X509Certificate2 certificate = await _certificateProvider.GetCertificateAsync())
{
string publisher = certificate.SubjectName.Name;
idElement.SetAttributeValue("Publisher", publisher);
}
}
}
using (FileStream stream = appxManifest.Open(FileMode.Create, FileAccess.Write, FileShare.None))
{
manifest.Save(stream);
}
}
}
}
================================================
FILE: src/Sign.Core/Containers/Container.cs
================================================
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE.txt file in the project root for more information.
using Microsoft.Extensions.FileSystemGlobbing;
using Microsoft.Extensions.FileSystemGlobbing.Abstractions;
namespace Sign.Core
{
internal abstract class Container : IContainer
{
private readonly IFileMatcher _fileMatcher;
protected TemporaryDirectory? TemporaryDirectory { get; set; }
protected Container(IFileMatcher fileMatcher)
{
ArgumentNullException.ThrowIfNull(fileMatcher, nameof(fileMatcher));
_fileMatcher = fileMatcher;
}
public virtual void Dispose()
{
TemporaryDirectory?.Dispose();
}
public IEnumerable GetFiles()
{
if (TemporaryDirectory is null)
{
throw new InvalidOperationException();
}
return TemporaryDirectory.Directory.EnumerateFiles("*", SearchOption.AllDirectories);
}
public IEnumerable GetFiles(Matcher matcher)
{
ArgumentNullException.ThrowIfNull(matcher, nameof(matcher));
if (TemporaryDirectory is null)
{
throw new InvalidOperationException();
}
DirectoryInfoWrapper directoryInfo = new(TemporaryDirectory.Directory);
return _fileMatcher.EnumerateMatches(directoryInfo, matcher);
}
public abstract ValueTask OpenAsync();
public abstract ValueTask SaveAsync();
}
}
================================================
FILE: src/Sign.Core/Containers/ContainerProvider.cs
================================================
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE.txt file in the project root for more information.
using Microsoft.Extensions.Logging;
namespace Sign.Core
{
internal sealed class ContainerProvider : IContainerProvider
{
private readonly HashSet _appxBundleExtensions;
private readonly HashSet _appxExtensions;
private readonly IDirectoryService _directoryService;
private readonly IFileMatcher _fileMatcher;
private readonly ICertificateProvider _certificateProvider;
private readonly ILogger _logger;
private readonly IMakeAppxCli _makeAppxCli;
private readonly HashSet _nuGetExtensions;
private readonly HashSet _zipExtensions;
// Dependency injection requires a public constructor.
public ContainerProvider(
ICertificateProvider certificateProvider,
IDirectoryService directoryService,
IFileMatcher fileMatcher,
IMakeAppxCli makeAppxCli,
ILogger logger)
{
ArgumentNullException.ThrowIfNull(certificateProvider, nameof(certificateProvider));
ArgumentNullException.ThrowIfNull(directoryService, nameof(directoryService));
ArgumentNullException.ThrowIfNull(fileMatcher, nameof(fileMatcher));
ArgumentNullException.ThrowIfNull(makeAppxCli, nameof(makeAppxCli));
ArgumentNullException.ThrowIfNull(logger, nameof(logger));
_certificateProvider = certificateProvider;
_directoryService = directoryService;
_fileMatcher = fileMatcher;
_makeAppxCli = makeAppxCli;
_logger = logger;
_appxBundleExtensions = new HashSet(StringComparer.OrdinalIgnoreCase)
{
".appxbundle",
".eappxbundle",
".emsixbundle",
".msixbundle"
};
_appxExtensions = new HashSet(StringComparer.OrdinalIgnoreCase)
{
".appx",
".eappx",
".emsix",
".msix"
};
_nuGetExtensions = new HashSet(StringComparer.OrdinalIgnoreCase)
{
".nupkg",
".snupkg"
};
_zipExtensions = new HashSet(StringComparer.OrdinalIgnoreCase)
{
".appxupload",
".clickonce",
".msixupload",
".vsix",
".zip"
};
}
public bool IsAppxBundleContainer(FileInfo file)
{
ArgumentNullException.ThrowIfNull(file, nameof(file));
return _appxBundleExtensions.Contains(file.Extension);
}
public bool IsAppxContainer(FileInfo file)
{
ArgumentNullException.ThrowIfNull(file, nameof(file));
return _appxExtensions.Contains(file.Extension);
}
public bool IsNuGetContainer(FileInfo file)
{
ArgumentNullException.ThrowIfNull(file, nameof(file));
return _nuGetExtensions.Contains(file.Extension);
}
public bool IsZipContainer(FileInfo file)
{
ArgumentNullException.ThrowIfNull(file, nameof(file));
return _zipExtensions.Contains(file.Extension);
}
public IContainer? GetContainer(FileInfo file)
{
ArgumentNullException.ThrowIfNull(file, nameof(file));
if (IsAppxBundleContainer(file))
{
return new AppxBundleContainer(file, _directoryService, _fileMatcher, _makeAppxCli, _logger);
}
if (IsAppxContainer(file))
{
return new AppxContainer(file, _certificateProvider, _directoryService, _fileMatcher, _makeAppxCli, _logger);
}
if (IsZipContainer(file))
{
return new ZipContainer(file, _directoryService, _fileMatcher, _logger);
}
if (IsNuGetContainer(file))
{
return new NuGetContainer(file, _directoryService, _fileMatcher, _logger);
}
return null;
}
}
}
================================================
FILE: src/Sign.Core/Containers/IContainer.cs
================================================
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE.txt file in the project root for more information.
using Microsoft.Extensions.FileSystemGlobbing;
namespace Sign.Core
{
internal interface IContainer : IDisposable
{
IEnumerable GetFiles();
IEnumerable GetFiles(Matcher matcher);
ValueTask OpenAsync();
ValueTask SaveAsync();
}
}
================================================
FILE: src/Sign.Core/Containers/IContainerProvider.cs
================================================
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE.txt file in the project root for more information.
namespace Sign.Core
{
internal interface IContainerProvider
{
bool IsAppxBundleContainer(FileInfo file);
bool IsAppxContainer(FileInfo file);
bool IsNuGetContainer(FileInfo file);
bool IsZipContainer(FileInfo file);
IContainer? GetContainer(FileInfo file);
}
}
================================================
FILE: src/Sign.Core/Containers/NuGetContainer.cs
================================================
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE.txt file in the project root for more information.
using Microsoft.Extensions.Logging;
using NuGet.Packaging.Signing;
namespace Sign.Core
{
internal sealed class NuGetContainer : ZipContainer
{
internal NuGetContainer(
FileInfo zipFile,
IDirectoryService directoryService,
IFileMatcher fileMatcher,
ILogger logger)
: base(zipFile, directoryService, fileMatcher, logger)
{
}
public override ValueTask SaveAsync()
{
if (TemporaryDirectory is null)
{
throw new InvalidOperationException();
}
FileInfo signatureFile = new(
Path.Combine(
TemporaryDirectory.Directory.FullName,
SigningSpecifications.V1.SignaturePath));
if (signatureFile.Exists)
{
signatureFile.Delete();
}
return base.SaveAsync();
}
}
}
================================================
FILE: src/Sign.Core/Containers/ZipContainer.cs
================================================
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE.txt file in the project root for more information.
using System.IO.Compression;
using Microsoft.Extensions.Logging;
namespace Sign.Core
{
internal class ZipContainer : Container
{
private readonly IDirectoryService _directoryService;
private readonly ILogger _logger;
private readonly FileInfo _zipFile;
internal ZipContainer(
FileInfo zipFile,
IDirectoryService directoryService,
IFileMatcher fileMatcher,
ILogger logger)
: base(fileMatcher)
{
ArgumentNullException.ThrowIfNull(zipFile, nameof(zipFile));
ArgumentNullException.ThrowIfNull(directoryService, nameof(directoryService));
ArgumentNullException.ThrowIfNull(logger, nameof(logger));
_directoryService = directoryService;
_logger = logger;
_zipFile = zipFile;
}
public override ValueTask OpenAsync()
{
if (TemporaryDirectory is not null)
{
throw new InvalidOperationException();
}
TemporaryDirectory = new TemporaryDirectory(_directoryService);
_logger.LogInformation(
Resources.OpeningContainer,
_zipFile.FullName,
TemporaryDirectory.Directory.FullName);
ZipFile.ExtractToDirectory(_zipFile.FullName, TemporaryDirectory.Directory.FullName);
return ValueTask.CompletedTask;
}
public override ValueTask SaveAsync()
{
if (TemporaryDirectory is null)
{
throw new InvalidOperationException();
}
_logger.LogInformation(
Resources.SavingContainer,
_zipFile.FullName,
TemporaryDirectory.Directory.FullName);
using (TemporaryDirectory temporaryDirectory = new(_directoryService))
{
string destinationFilePath = Path.Combine(temporaryDirectory.Directory.FullName, _zipFile.Name);
ZipFile.CreateFromDirectory(TemporaryDirectory.Directory.FullName, destinationFilePath, CompressionLevel.Optimal, false);
_zipFile.Delete();
File.Move(destinationFilePath, _zipFile.FullName, overwrite: true);
_zipFile.Refresh();
}
return ValueTask.CompletedTask;
}
}
}
================================================
FILE: src/Sign.Core/DataFormatSigners/AggregatingSigner.cs
================================================
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE.txt file in the project root for more information.
using Microsoft.Extensions.FileSystemGlobbing;
namespace Sign.Core
{
internal sealed class AggregatingSigner : IAggregatingDataFormatSigner
{
private readonly IContainerProvider _containerProvider;
private readonly IDefaultDataFormatSigner _defaultSigner;
private readonly IFileMetadataService _fileMetadataService;
private readonly IMatcherFactory _matcherFactory;
private readonly IEnumerable _signers;
// Dependency injection requires a public constructor.
public AggregatingSigner(
IEnumerable signers,
IDefaultDataFormatSigner defaultSigner,
IContainerProvider containerProvider,
IFileMetadataService fileMetadataService,
IMatcherFactory matcherFactory)
{
ArgumentNullException.ThrowIfNull(signers, nameof(signers));
ArgumentNullException.ThrowIfNull(defaultSigner, nameof(defaultSigner));
ArgumentNullException.ThrowIfNull(containerProvider, nameof(containerProvider));
ArgumentNullException.ThrowIfNull(fileMetadataService, nameof(fileMetadataService));
ArgumentNullException.ThrowIfNull(matcherFactory, nameof(matcherFactory));
_signers = signers;
_defaultSigner = defaultSigner;
_containerProvider = containerProvider;
_fileMetadataService = fileMetadataService;
_matcherFactory = matcherFactory;
}
public bool CanSign(FileInfo file)
{
ArgumentNullException.ThrowIfNull(file, nameof(file));
foreach (IDataFormatSigner signer in _signers)
{
if (signer.CanSign(file))
{
return true;
}
}
string extension = file.Extension.ToLowerInvariant();
return extension switch
{
// archives
".zip" or ".appxupload" or ".msixupload" => true,
_ => false
};
}
public async Task SignAsync(IEnumerable files, SignOptions options)
{
ArgumentNullException.ThrowIfNull(files, nameof(files));
ArgumentNullException.ThrowIfNull(options, nameof(options));
if (options.RecurseContainers)
{
await SignContainerContentsAsync(files, options);
}
// split by code sign service and fallback to default
var grouped = (from signer in _signers
from file in files
where signer.CanSign(file)
group file by signer into groups
select groups).ToList();
// get all files and exclude existing;
// This is to catch PE files that don't have the correct extension set
var defaultFiles = files.Except(grouped.SelectMany(g => g))
.Where(_fileMetadataService.IsPortableExecutable)
.Select(f => new { _defaultSigner.Signer, f })
.GroupBy(a => a.Signer, k => k.f)
.SingleOrDefault(); // one group here
if (defaultFiles != null)
{
grouped.Add(defaultFiles);
}
await Task.WhenAll(grouped.Select(g => g.Key.SignAsync(g.ToList(), options)));
}
private async Task SignContainerContentsAsync(IEnumerable files, SignOptions options)
{
// See if any of them are archives
List archives = (from file in files
where _containerProvider.IsZipContainer(file) || _containerProvider.IsNuGetContainer(file)
select file).ToList();
// expand the archives and sign recursively first
List containers = new();
try
{
foreach (FileInfo archive in archives)
{
IContainer container = _containerProvider.GetContainer(archive)!;
await container.OpenAsync();
containers.Add(container);
}
// See if there's any files in the expanded zip that we need to sign
List allFiles = containers
.SelectMany(container => GetFiles(container, options))
.ToList();
if (allFiles.Count > 0)
{
// Send the files from the archives through the aggregator to sign
await SignAsync(allFiles, options);
// After signing the contents, save the zip
// For NuPkg, this step removes the signature too, but that's ok as it'll get signed below
await Parallel.ForEachAsync(containers, (container, cancellationToken) => container.SaveAsync());
}
}
finally
{
containers.ForEach(tz => tz.Dispose());
containers.Clear();
}
// See if there's any appx's in here, process them recursively first to sign the inner files
List appxs = (from file in files
where _containerProvider.IsAppxContainer(file)
select file).ToList();
// See if there's any appxbundles here, process them recursively first
// expand the archives and sign recursively first
// This will also update the publisher information to get it ready for signing
try
{
foreach (FileInfo appx in appxs)
{
IContainer container = _containerProvider.GetContainer(appx)!;
await container.OpenAsync();
containers.Add(container);
}
// See if there's any files in the expanded zip that we need to sign
List allFiles = containers
.SelectMany(container => GetFiles(container, options))
.ToList();
if (allFiles.Count > 0)
{
// Send the files from the archives through the aggregator to sign
await SignAsync(allFiles, options);
}
// Save the appx with the updated publisher info
await Parallel.ForEachAsync(containers, (container, cancellationToken) => container.SaveAsync());
}
finally
{
containers.ForEach(tz => tz.Dispose());
containers.Clear();
}
List bundles = (from file in files
where _containerProvider.IsAppxBundleContainer(file)
select file).ToList();
try
{
foreach (FileInfo bundle in bundles)
{
IContainer container = _containerProvider.GetContainer(bundle)!;
await container.OpenAsync();
containers.Add(container);
}
Matcher appxBundleFileMatcher = _matcherFactory.Create();
appxBundleFileMatcher.AddInclude("**/*.appx");
appxBundleFileMatcher.AddInclude("**/*.msix");
// See if there's any files in the expanded zip that we need to sign
List allFiles = containers.SelectMany(tz => tz.GetFiles(appxBundleFileMatcher)).ToList();
if (allFiles.Count > 0)
{
// Send the files from the archives through the aggregator to sign
await SignAsync(allFiles, options);
// After signing the contents, save the zip
await Parallel.ForEachAsync(containers, (container, cancellationToken) => container.SaveAsync());
}
}
finally
{
containers.ForEach(tz => tz.Dispose());
containers.Clear();
}
}
public void CopySigningDependencies(FileInfo file, DirectoryInfo destination, SignOptions options)
{
// pass the handling for this down to the actual implementations
foreach (IDataFormatSigner signer in _signers)
{
if (signer.CanSign(file))
{
signer.CopySigningDependencies(file, destination, options);
}
}
}
private static IEnumerable GetFiles(IContainer container, SignOptions options)
{
IEnumerable files;
if (options.Matcher is null)
{
// If not filtered, default to all
files = container.GetFiles();
}
else
{
files = container.GetFiles(options.Matcher);
}
if (options.AntiMatcher is not null)
{
IEnumerable antiFiles = container.GetFiles(options.AntiMatcher);
files = files.Except(antiFiles, FileInfoComparer.Instance).ToList();
}
return files;
}
}
}
================================================
FILE: src/Sign.Core/DataFormatSigners/AppInstallerServiceSigner.cs
================================================
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE.txt file in the project root for more information.
using System.Diagnostics.CodeAnalysis;
using System.Security.Cryptography.X509Certificates;
using System.Xml.Linq;
using Microsoft.Extensions.Logging;
namespace Sign.Core
{
// Not really signing anything, but updates the manifest file with the
// correct publisher information
internal sealed class AppInstallerServiceSigner : IDataFormatSigner
{
// Windows 10, version 1709.
internal static readonly XNamespace AppInstaller2017 = XNamespace.Get("http://schemas.microsoft.com/appx/appinstaller/2017");
// Windows 10, version 1803.
internal static readonly XNamespace AppInstaller2017_2 = XNamespace.Get("http://schemas.microsoft.com/appx/appinstaller/2017/2");
// Windows 10, version 1809.
internal static readonly XNamespace AppInstaller2018 = XNamespace.Get("http://schemas.microsoft.com/appx/appinstaller/2018");
// Windows version 21H2 build 22000
internal static readonly XNamespace AppInstaller2021 = XNamespace.Get("http://schemas.microsoft.com/appx/appinstaller/2021");
private readonly ICertificateProvider _certificateProvider;
private readonly ILogger _logger;
// Dependency injection requires a public constructor.
public AppInstallerServiceSigner(
ICertificateProvider certificateProvider,
ILogger logger)
{
ArgumentNullException.ThrowIfNull(certificateProvider, nameof(certificateProvider));
ArgumentNullException.ThrowIfNull(logger, nameof(logger));
_certificateProvider = certificateProvider;
_logger = logger;
}
public bool CanSign(FileInfo file)
{
ArgumentNullException.ThrowIfNull(file, nameof(file));
return string.Equals(file.Extension, ".appinstaller", StringComparison.OrdinalIgnoreCase);
}
public async Task SignAsync(IEnumerable files, SignOptions options)
{
ArgumentNullException.ThrowIfNull(files, nameof(files));
ArgumentNullException.ThrowIfNull(options, nameof(options));
_logger.LogInformation(Resources.EditingAppInstaller, files.Count());
using (X509Certificate2 certificate = await _certificateProvider.GetCertificateAsync().ConfigureAwait(false))
{
// We need to open the files, and update the publisher value
foreach (FileInfo file in files)
{
XDocument manifest;
using (FileStream stream = file.OpenRead())
{
manifest = XDocument.Load(stream, LoadOptions.PreserveWhitespace);
if (TryGetMainElement(manifest, out XElement? mainElement))
{
string publisher = certificate.SubjectName.Name;
mainElement.SetAttributeValue("Publisher", publisher);
}
}
using (FileStream stream = file.Open(FileMode.Create, FileAccess.Write, FileShare.None))
{
manifest.Save(stream);
}
}
}
}
internal static bool TryGetMainElement(XDocument appInstallerManifest, [NotNullWhen(true)] out XElement? mainElement)
{
mainElement = null;
XElement? rootElement = appInstallerManifest.Root;
if (rootElement is null)
{
return false;
}
XNamespace[] xmlNamespaces = [AppInstaller2017, AppInstaller2017_2, AppInstaller2018, AppInstaller2021];
foreach (XNamespace xmlNamespace in xmlNamespaces)
{
mainElement = rootElement.Element(xmlNamespace + "MainBundle") ?? rootElement.Element(xmlNamespace + "MainPackage");
if (mainElement is not null)
{
return true;
}
}
return false;
}
}
}
================================================
FILE: src/Sign.Core/DataFormatSigners/AzureSignToolSigner.cs
================================================
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE.txt file in the project root for more information.
using System.Globalization;
using System.Runtime.ExceptionServices;
using System.Security.Cryptography;
using System.Security.Cryptography.X509Certificates;
using AzureSign.Core;
using Microsoft.Extensions.Logging;
namespace Sign.Core
{
internal class AzureSignToolSigner : IAzureSignToolDataFormatSigner
{
// COM-based signing of .js and .vbs files requires an STA thread.
// See https://github.com/dotnet/sign/issues/880
private static readonly HashSet StaThreadExtensions = new(StringComparer.OrdinalIgnoreCase)
{
".js",
".vbs"
};
internal const int S_OK = 0;
private readonly ICertificateProvider _certificateProvider;
private readonly ISignatureAlgorithmProvider _signatureAlgorithmProvider;
private readonly ILogger _logger;
private readonly IReadOnlyList _signableFileTypes;
private readonly IToolConfigurationProvider _toolConfigurationProvider;
// Dependency injection requires a public constructor.
public AzureSignToolSigner(
IToolConfigurationProvider toolConfigurationProvider,
ISignatureAlgorithmProvider signatureAlgorithmProvider,
ICertificateProvider certificateProvider,
ILogger logger)
{
ArgumentNullException.ThrowIfNull(toolConfigurationProvider, nameof(toolConfigurationProvider));
ArgumentNullException.ThrowIfNull(signatureAlgorithmProvider, nameof(signatureAlgorithmProvider));
ArgumentNullException.ThrowIfNull(certificateProvider, nameof(certificateProvider));
ArgumentNullException.ThrowIfNull(logger, nameof(logger));
_signatureAlgorithmProvider = signatureAlgorithmProvider;
_certificateProvider = certificateProvider;
_signatureAlgorithmProvider = signatureAlgorithmProvider;
_logger = logger;
_toolConfigurationProvider = toolConfigurationProvider;
_signableFileTypes = new List()
{
// For PowerShell file extensions, see https://github.com/PowerShell/PowerShell/blob/2f4f585e7fe075f5c1669397ae738c554fa18391/src/System.Management.Automation/security/SecurityManager.cs#L97C1-L106C10
new SignableFileTypeByExtension(
".appx",
".appxbundle",
".cab",
".cat",
".cdxml", // PowerShell cmdlet definition XML
".dll",
".eappx",
".eappxbundle",
".emsix",
".emsixbundle",
".exe",
".js",
".msi",
".msix",
".msixbundle",
".msm",
".msp",
".mst",
".ocx",
".ps1", // PowerShell script files
".ps1xml", // PowerShell display configuration files
".psd1", // PowerShell data files
".psm1", // PowerShell module files
".stl",
".sys",
".vbs",
".vxd",
".winmd"
),
new DynamicsBusinessCentralAppFileType()
};
}
public bool CanSign(FileInfo file)
{
ArgumentNullException.ThrowIfNull(file, nameof(file));
foreach (ISignableFileType signableFileType in _signableFileTypes)
{
if (signableFileType.IsMatch(file))
{
return true;
}
}
return false;
}
public async Task SignAsync(IEnumerable files, SignOptions options)
{
ArgumentNullException.ThrowIfNull(files, nameof(files));
ArgumentNullException.ThrowIfNull(options, nameof(options));
_logger.LogInformation(Resources.AzureSignToolSignatureProviderSigning, files.Count());
TimeStampConfiguration timestampConfiguration;
if (options.TimestampService is null)
{
timestampConfiguration = TimeStampConfiguration.None;
}
else
{
timestampConfiguration = new(options.TimestampService.AbsoluteUri, options.TimestampHashAlgorithm, TimeStampType.RFC3161);
}
using (X509Certificate2 certificate = await _certificateProvider.GetCertificateAsync())
using (RSA rsa = await _signatureAlgorithmProvider.GetRsaAsync())
using (AuthenticodeKeyVaultSigner signer = new(
rsa,
certificate,
options.FileHashAlgorithm,
timestampConfiguration))
{
// Partition files: STA-required files (.js, .vbs) are signed sequentially
// to avoid blocking ThreadPool threads (each STA call uses thread.Join()).
// Non-STA files are signed in parallel as before.
List staFiles = new();
List nonStaFiles = new();
foreach (FileInfo file in files)
{
if (StaThreadExtensions.Contains(file.Extension))
{
staFiles.Add(file);
}
else
{
nonStaFiles.Add(file);
}
}
foreach (FileInfo file in staFiles)
{
if (!await SignAsync(signer, file, options))
{
string message = string.Format(CultureInfo.CurrentCulture, Resources.SigningFailed, file.FullName);
throw new SigningException(message);
}
}
await Parallel.ForEachAsync(nonStaFiles, async (file, state) =>
{
if (!await SignAsync(signer, file, options))
{
string message = string.Format(CultureInfo.CurrentCulture, Resources.SigningFailed, file.FullName);
throw new SigningException(message);
}
});
}
}
// Inspired from https://github.com/squaredup/bettersigntool/blob/master/bettersigntool/bettersigntool/SignCommand.cs
private async Task SignAsync(
AuthenticodeKeyVaultSigner signer,
FileInfo file,
SignOptions options)
{
TimeSpan retry = TimeSpan.FromSeconds(5);
const int maxAttempts = 3;
int attempt = 1;
do
{
if (attempt > 1)
{
_logger.LogInformation(Resources.SigningAttempt, attempt, maxAttempts, retry.TotalSeconds);
await Task.Delay(retry);
retry = TimeSpan.FromSeconds(Math.Pow(retry.TotalSeconds, 1.5));
}
if (RunSignTool(signer, file, options))
{
return true;
}
++attempt;
} while (attempt <= maxAttempts);
_logger.LogError(Resources.SigningFailedAfterAllAttempts);
return false;
}
private bool RunSignTool(AuthenticodeKeyVaultSigner signer, FileInfo file, SignOptions options)
{
FileInfo manifestFile = _toolConfigurationProvider.SignToolManifest;
_logger.LogInformation(Resources.SigningFile, file.FullName);
bool success = false;
int code = 0;
try
{
if (StaThreadExtensions.Contains(file.Extension))
{
code = RunOnStaThread(() => SignFileCore(signer, file, options, manifestFile));
}
else
{
code = SignFileCore(signer, file, options, manifestFile);
}
success = code == S_OK;
}
catch (Exception e)
{
_logger.LogError(e, e.Message);
}
if (success)
{
_logger.LogInformation(Resources.SigningSucceeded, file.FullName);
return true;
}
_logger.LogError(Resources.SigningFailedWithError, code);
return false;
}
internal virtual int SignFileCore(
AuthenticodeKeyVaultSigner signer,
FileInfo file,
SignOptions options,
FileInfo manifestFile)
{
using (Kernel32.ActivationContext ctx = new(manifestFile))
{
return signer.SignFile(
file.FullName,
options.Description ?? string.Empty,
options.DescriptionUrl?.AbsoluteUri ?? string.Empty,
pageHashing: null,
_logger);
}
}
private static T RunOnStaThread(Func func)
{
if (!OperatingSystem.IsWindows())
{
throw new PlatformNotSupportedException();
}
T result = default!;
Exception? exception = null;
Thread thread = new(() =>
{
try
{
result = func();
}
catch (Exception ex)
{
exception = ex;
}
});
thread.SetApartmentState(ApartmentState.STA);
thread.Start();
thread.Join();
if (exception is not null)
{
ExceptionDispatchInfo.Capture(exception).Throw();
}
return result;
}
}
}
================================================
FILE: src/Sign.Core/DataFormatSigners/ClickOnceSigner.cs
================================================
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE.txt file in the project root for more information.
using System.Globalization;
using System.Security.Cryptography;
using System.Security.Cryptography.X509Certificates;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.FileSystemGlobbing.Abstractions;
using Microsoft.Extensions.Logging;
namespace Sign.Core
{
internal sealed class ClickOnceSigner : RetryingSigner, IDataFormatSigner
{
private readonly Lazy _aggregatingSigner;
private readonly ICertificateProvider _certificateProvider;
private readonly ISignatureAlgorithmProvider _signatureAlgorithmProvider;
private readonly IMageCli _mageCli;
private readonly IManifestSigner _manifestSigner;
private readonly ParallelOptions _parallelOptions = new() { MaxDegreeOfParallelism = 4 };
private readonly IFileMatcher _fileMatcher;
// Dependency injection requires a public constructor.
public ClickOnceSigner(
ISignatureAlgorithmProvider signatureAlgorithmProvider,
ICertificateProvider certificateProvider,
IServiceProvider serviceProvider,
IMageCli mageCli,
IManifestSigner manifestSigner,
ILogger logger,
IFileMatcher fileMatcher)
: base(logger)
{
ArgumentNullException.ThrowIfNull(signatureAlgorithmProvider, nameof(signatureAlgorithmProvider));
ArgumentNullException.ThrowIfNull(certificateProvider, nameof(certificateProvider));
ArgumentNullException.ThrowIfNull(serviceProvider, nameof(serviceProvider));
ArgumentNullException.ThrowIfNull(mageCli, nameof(mageCli));
ArgumentNullException.ThrowIfNull(manifestSigner, nameof(manifestSigner));
ArgumentNullException.ThrowIfNull(fileMatcher, nameof(fileMatcher));
_signatureAlgorithmProvider = signatureAlgorithmProvider;
_certificateProvider = certificateProvider;
_mageCli = mageCli;
_manifestSigner = manifestSigner;
_fileMatcher = fileMatcher;
// Need to delay this as it'd create a dependency loop if directly in the ctor
_aggregatingSigner = new Lazy(() => serviceProvider.GetService()!);
}
public bool CanSign(FileInfo file)
{
ArgumentNullException.ThrowIfNull(file, nameof(file));
return file.Extension.ToLowerInvariant() switch
{
".vsto" or ".application" => true,
_ => false
};
}
public async Task SignAsync(IEnumerable files, SignOptions options)
{
ArgumentNullException.ThrowIfNull(files, nameof(files));
ArgumentNullException.ThrowIfNull(options, nameof(options));
Logger.LogInformation(Resources.ClickOnceSignatureProviderSigning, files.Count());
var args = "-a sha256RSA";
if (!string.IsNullOrWhiteSpace(options.ApplicationName))
{
args += $@" -n ""{options.ApplicationName}""";
}
Uri? timeStampUrl = options.TimestampService;
using (X509Certificate2 certificate = await _certificateProvider.GetCertificateAsync())
using (RSA rsaPrivateKey = await _signatureAlgorithmProvider.GetRsaAsync())
{
// This outer loop is for a deployment manifest file (.application/.vsto).
await Parallel.ForEachAsync(files, _parallelOptions, async (file, state) =>
{
// We need to be explicit about the order these files are signed in. The data files must be signed first
// Then the .manifest file
// Then the nested clickonce/vsto file
// finally the top-level clickonce/vsto file
// It's possible that there might not actually be a .manifest file or any data files if the user just
// wants to re-sign an existing deployment manifest because e.g. the update URL has changed but nothing
// else has. In that case we don't need to touch the other files and we can just sign the deployment manifest.
// Look for the data files first - these are .deploy files
// we need to rename them, sign, then restore the name
DirectoryInfo clickOnceDirectory = file.Directory!;
// get the files, _including_ the SignOptions, so that we only actually try to sign the files specified.
// this is useful if e.g. you don't want to sign third-party assemblies that your application depends on
// but you do still want to sign your own assemblies.
List filteredFiles = GetFiles(clickOnceDirectory, options).ToList();
List deployFilesToSign = filteredFiles
.Where(f => ".deploy".Equals(f.Extension, StringComparison.OrdinalIgnoreCase))
.ToList();
List contentFiles = new();
RemoveDeployExtension(deployFilesToSign, contentFiles);
List filesToSign = contentFiles.ToList(); // copy it since we may add setup.exe
IEnumerable setupExe = filteredFiles.Where(f => ".exe".Equals(f.Extension, StringComparison.OrdinalIgnoreCase));
filesToSign.AddRange(setupExe);
// sign the inner files
await _aggregatingSigner.Value.SignAsync(filesToSign!, options);
// rename the rest of the deploy files since signing the manifest will need them.
// this uses the overload of GetFiles() that ignores file matching options because we
// require all files to be named correctly in order to generate valid manifests.
List filesExceptFiltered = GetFiles(clickOnceDirectory).Except(filteredFiles, FileInfoComparer.Instance).ToList();
List deployFiles = filesExceptFiltered
.Where(f => ".deploy".Equals(f.Extension, StringComparison.OrdinalIgnoreCase))
.ToList();
RemoveDeployExtension(deployFiles, contentFiles);
// at this point contentFiles has all deploy files renamed
// Inner files are now signed
// now look for the manifest file and sign that if we have one
FileInfo? manifestFile = filteredFiles.SingleOrDefault(f => ".manifest".Equals(f.Extension, StringComparison.OrdinalIgnoreCase));
string fileArgs = $@"-update ""{manifestFile}"" {args}";
if (manifestFile is not null && !await SignAsync(fileArgs, manifestFile, rsaPrivateKey, certificate, options))
{
string message = string.Format(CultureInfo.CurrentCulture, Resources.SigningFailed, manifestFile.FullName);
throw new SigningException(message);
}
string publisherParam = string.Empty;
if (string.IsNullOrEmpty(options.PublisherName))
{
string publisherName = certificate.SubjectName.Name;
// get the DN. it may be quoted
publisherParam = $@"-pub ""{publisherName.Replace("\"", "")}""";
}
else
{
publisherParam = $"-pub \"{options.PublisherName}\"";
}
// Now sign deployment manifest files (.application/.vsto).
// Order by desending length to put the inner one first
List deploymentManifestFiles = filteredFiles
.Where(f => ".vsto".Equals(f.Extension, StringComparison.OrdinalIgnoreCase) ||
".application".Equals(f.Extension, StringComparison.OrdinalIgnoreCase))
.Select(f => new { file = f, f.FullName.Length })
.OrderByDescending(f => f.Length)
.Select(f => f.file)
.ToList();
foreach (FileInfo deploymentManifestFile in deploymentManifestFiles)
{
fileArgs = $@"-update ""{deploymentManifestFile.FullName}"" {args} {publisherParam}";
if (manifestFile is not null)
{
fileArgs += $@" -appm ""{manifestFile.FullName}""";
}
if (options.DescriptionUrl is not null)
{
fileArgs += $@" -SupportURL {options.DescriptionUrl.AbsoluteUri}";
}
if (!await SignAsync(fileArgs, deploymentManifestFile, rsaPrivateKey, certificate, options))
{
string message = string.Format(CultureInfo.CurrentCulture, Resources.SigningFailed, deploymentManifestFile.FullName);
throw new SigningException(message);
}
}
// restore the .deploy files
foreach (FileInfo contentFile in contentFiles)
{
File.Move(contentFile.FullName, $"{contentFile.FullName}.deploy");
}
});
}
}
private static void RemoveDeployExtension(List deployFilesToSign, List contentFiles)
{
foreach (FileInfo deployFileToSign in deployFilesToSign)
{
// Rename to file without .deploy extension
// For example:
// * MyApp.dll.deploy => MyApp.dll
// * MyApp.exe.deploy => MyApp.exe
string contentFilePath = Path.Combine(
deployFileToSign.DirectoryName!,
Path.GetFileNameWithoutExtension(deployFileToSign.Name));
FileInfo contentFile = new(contentFilePath);
File.Move(deployFileToSign.FullName, contentFile.FullName);
contentFiles.Add(contentFile);
}
}
protected override async Task SignCoreAsync(string? args, FileInfo file, RSA rsaPrivateKey, X509Certificate2 certificate, SignOptions options)
{
int exitCode = await _mageCli.RunAsync(args);
if (exitCode == 0)
{
// Now add the signature
_manifestSigner.Sign(file, certificate, rsaPrivateKey, options);
return true;
}
Logger.LogError(Resources.SigningFailedWithError, exitCode);
return false;
}
private IEnumerable GetFiles(DirectoryInfo clickOnceRoot)
{
return clickOnceRoot.EnumerateFiles("*", SearchOption.AllDirectories);
}
private IEnumerable GetFiles(DirectoryInfo clickOnceRoot, SignOptions options)
{
IEnumerable files;
if (options.Matcher is null)
{
// If not filtered, default to all
files = GetFiles(clickOnceRoot);
}
else
{
files = _fileMatcher.EnumerateMatches(new DirectoryInfoWrapper(clickOnceRoot), options.Matcher);
}
if (options.AntiMatcher is not null)
{
IEnumerable antiFiles = _fileMatcher.EnumerateMatches(new DirectoryInfoWrapper(clickOnceRoot), options.AntiMatcher);
files = files.Except(antiFiles, FileInfoComparer.Instance).ToList();
}
return files;
}
public void CopySigningDependencies(FileInfo deploymentManifestFile, DirectoryInfo destination, SignOptions signOptions)
{
// copy _all_ files, ignoring matching options, because we need them to be available to generate
// valid manifests.
foreach (FileInfo file in GetFiles(deploymentManifestFile.Directory!))
{
// don't copy the file itself because that's already taken care of (and we don't want a duplicate copy with the 'real' name)
// lying around since it'll get copied back and overwrite the signed one.
if (file.FullName != deploymentManifestFile.FullName)
{
string relativeDestPath = Path.GetRelativePath(deploymentManifestFile.Directory!.FullName, file.FullName);
string fullDestPath = Path.Combine(destination.FullName, relativeDestPath);
Directory.CreateDirectory(Path.GetDirectoryName(fullDestPath!)!);
file.CopyTo(fullDestPath, overwrite: true);
}
}
}
}
}
================================================
FILE: src/Sign.Core/DataFormatSigners/DefaultSigner.cs
================================================
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE.txt file in the project root for more information.
using Microsoft.Extensions.DependencyInjection;
namespace Sign.Core
{
internal sealed class DefaultSigner : IDefaultDataFormatSigner
{
public IDataFormatSigner Signer { get; }
// Dependency injection requires a public constructor.
public DefaultSigner(IServiceProvider serviceProvider)
{
ArgumentNullException.ThrowIfNull(serviceProvider, nameof(serviceProvider));
foreach (IDataFormatSigner signer in serviceProvider.GetServices())
{
if (signer is IAzureSignToolDataFormatSigner)
{
Signer = signer;
return;
}
}
Signer = new DoNothingDefaultDataFormatSigner();
}
public bool CanSign(FileInfo file)
{
return Signer.CanSign(file);
}
public Task SignAsync(IEnumerable files, SignOptions options)
{
return Signer.SignAsync(files, options);
}
private sealed class DoNothingDefaultDataFormatSigner : IDataFormatSigner
{
public bool CanSign(FileInfo file)
{
return false;
}
public Task SignAsync(IEnumerable files, SignOptions options)
{
throw new NotImplementedException();
}
}
}
}
================================================
FILE: src/Sign.Core/DataFormatSigners/DistinguishedNameParser.cs
================================================
#pragma warning disable IDE0073 // The file header does not match the required text
// The MIT License (MIT)
//
// Copyright (c) 2015 Kevin Jones
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
using System.Runtime.InteropServices;
namespace Sign.Core
{
// From https://github.com/vcsjones/FiddlerCert/blob/06642751314a9ff224cb37a1cd7c14b86062a119/VCSJones.FiddlerCert/DistinguishedNameParser.cs
internal static class DistinguishedNameParser
{
internal static Dictionary> Parse(string distingishedName)
{
var result = new Dictionary>(StringComparer.CurrentCultureIgnoreCase);
var distinguishedNamePtr = IntPtr.Zero;
try
{
distinguishedNamePtr = Marshal.StringToCoTaskMemUni(distingishedName);
//We need to copy the IntPtr.
//The copy is necessary because DsGetRdnW modifies the pointer to advance it. We need to keep
//The original so we can free it later, otherwise we'll leak memory.
var distinguishedNamePtrCopy = distinguishedNamePtr;
var pcDN = (uint)distingishedName.Length;
while (pcDN != 0 && Ntdsapi.DsGetRdnW(ref distinguishedNamePtrCopy, ref pcDN, out var ppKey, out var pcKey, out var ppVal, out var pcVal) == 0)
{
if (pcKey == 0 || pcVal == 0)
{
continue;
}
var key = Marshal.PtrToStringUni(ppKey, (int)pcKey);
var value = Marshal.PtrToStringUni(ppVal, (int)pcVal);
if (result.ContainsKey(key))
{
result[key].Add(value);
}
else
{
result.Add(key, new List { value });
}
if (pcDN == 0)
{
break;
}
}
return result;
}
finally
{
Marshal.FreeCoTaskMem(distinguishedNamePtr);
}
}
}
}
================================================
FILE: src/Sign.Core/DataFormatSigners/DynamicsBusinessCentralAppFileType.cs
================================================
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE.txt file in the project root for more information.
namespace Sign.Core
{
internal sealed class DynamicsBusinessCentralAppFileType : ISignableFileType
{
private const string FileExtension = ".app";
private readonly byte[] _expectedHeader;
internal DynamicsBusinessCentralAppFileType()
{
_expectedHeader = new byte[] { 0x4e, 0x41, 0x56, 0x58 }; // NAVX
}
public bool IsMatch(FileInfo file)
{
ArgumentNullException.ThrowIfNull(file, nameof(file));
if (!FileExtension.Equals(file.Extension, StringComparison.OrdinalIgnoreCase))
{
return false;
}
using (FileStream stream = file.OpenRead())
{
var header = new byte[_expectedHeader.Length];
if (stream.Read(header, offset: 0, header.Length) != header.Length)
{
return false;
}
return header.SequenceEqual(_expectedHeader);
}
}
}
}
================================================
FILE: src/Sign.Core/DataFormatSigners/IAggregatingDataFormatSigner.cs
================================================
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE.txt file in the project root for more information.
namespace Sign.Core
{
internal interface IAggregatingDataFormatSigner : IDataFormatSigner
{
}
}
================================================
FILE: src/Sign.Core/DataFormatSigners/IAzureSignToolDataFormatSigner.cs
================================================
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE.txt file in the project root for more information.
namespace Sign.Core
{
internal interface IAzureSignToolDataFormatSigner : IDataFormatSigner
{
}
}
================================================
FILE: src/Sign.Core/DataFormatSigners/IDataFormatSigner.cs
================================================
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE.txt file in the project root for more information.
namespace Sign.Core
{
internal interface IDataFormatSigner
{
bool CanSign(FileInfo file);
Task SignAsync(IEnumerable files, SignOptions options);
// Some signature mechanisms (e.g. ClickOnce) require extra files alongside the main file to be signed.
// We can't rely on the user specifying everything (and even if we did, we sign all inputs in parallel
// so we'd have to add extra synchronisation) so this method instructs an implementation to grab all
// dependencies of a file and copy them to the specified directory.
void CopySigningDependencies(FileInfo file, DirectoryInfo destination, SignOptions options) { }
}
}
================================================
FILE: src/Sign.Core/DataFormatSigners/IDefaultDataFormatSigner.cs
================================================
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE.txt file in the project root for more information.
namespace Sign.Core
{
internal interface IDefaultDataFormatSigner
{
IDataFormatSigner Signer { get; }
}
}
================================================
FILE: src/Sign.Core/DataFormatSigners/IManifestSigner.cs
================================================
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE.txt file in the project root for more information.
using System.Security.Cryptography;
using System.Security.Cryptography.X509Certificates;
namespace Sign.Core
{
internal interface IManifestSigner
{
void Sign(FileInfo file, X509Certificate2 certificate, RSA rsaPrivateKey, SignOptions options);
}
}
================================================
FILE: src/Sign.Core/DataFormatSigners/ISignableFileType.cs
================================================
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE.txt file in the project root for more information.
namespace Sign.Core
{
internal interface ISignableFileType
{
bool IsMatch(FileInfo file);
}
}
================================================
FILE: src/Sign.Core/DataFormatSigners/ManifestSigner.cs
================================================
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE.txt file in the project root for more information.
using System.Deployment.Internal.CodeSigning;
using System.Runtime.InteropServices;
using System.Security.Cryptography;
using System.Security.Cryptography.X509Certificates;
using System.Xml;
namespace Sign.Core
{
internal sealed class ManifestSigner : IManifestSigner
{
public void Sign(FileInfo file, X509Certificate2 certificate, RSA rsaPrivateKey, SignOptions options)
{
try
{
XmlDocument manifestDom = new()
{
PreserveWhitespace = true
};
manifestDom.Load(file.FullName);
SignedCmiManifest2 signedCmiManifest2 = new(manifestDom);
CmiManifestSigner2 signer;
if (rsaPrivateKey is RSACryptoServiceProvider rsaProvider)
{
signer = new CmiManifestSigner2(SignedCmiManifest2.GetFixedRSACryptoServiceProvider(rsaProvider), certificate);
}
else
{
signer = new CmiManifestSigner2(rsaPrivateKey, certificate);
}
if (options.TimestampService is null)
{
signedCmiManifest2.Sign(signer);
}
else
{
signedCmiManifest2.Sign(signer, options.TimestampService.AbsoluteUri);
}
manifestDom.Save(file.FullName);
}
catch (Exception ex)
{
throw Marshal.GetHRForException(ex) switch
{
-2147012889 or -2147012867 => new ApplicationException("TimestampUrlNotFound", ex),
_ => new ApplicationException(ex.Message, ex)
};
}
}
}
}
================================================
FILE: src/Sign.Core/DataFormatSigners/NuGetSigner.cs
================================================
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE.txt file in the project root for more information.
using System.Globalization;
using System.Security.Cryptography;
using System.Security.Cryptography.X509Certificates;
using Microsoft.Extensions.Logging;
namespace Sign.Core
{
internal sealed class NuGetSigner : RetryingSigner, IDataFormatSigner
{
private readonly ICertificateProvider _certificateProvider;
private readonly ISignatureAlgorithmProvider _signatureAlgorithmProvider;
private readonly INuGetSignTool _nuGetSignTool;
// Dependency injection requires a public constructor.
public NuGetSigner(
ISignatureAlgorithmProvider signatureAlgorithmProvider,
ICertificateProvider certificateProvider,
INuGetSignTool nuGetSignTool,
ILogger logger)
: base(logger)
{
ArgumentNullException.ThrowIfNull(signatureAlgorithmProvider, nameof(signatureAlgorithmProvider));
ArgumentNullException.ThrowIfNull(certificateProvider, nameof(certificateProvider));
ArgumentNullException.ThrowIfNull(nuGetSignTool, nameof(nuGetSignTool));
_signatureAlgorithmProvider = signatureAlgorithmProvider;
_certificateProvider = certificateProvider;
_nuGetSignTool = nuGetSignTool;
}
public bool CanSign(FileInfo file)
{
ArgumentNullException.ThrowIfNull(file, nameof(file));
return string.Equals(file.Extension, ".nupkg", StringComparison.OrdinalIgnoreCase)
|| string.Equals(file.Extension, ".snupkg", StringComparison.OrdinalIgnoreCase);
}
public async Task SignAsync(IEnumerable files, SignOptions options)
{
ArgumentNullException.ThrowIfNull(files, nameof(files));
ArgumentNullException.ThrowIfNull(options, nameof(options));
using (X509Certificate2 certificate = await _certificateProvider.GetCertificateAsync())
using (RSA rsa = await _signatureAlgorithmProvider.GetRsaAsync())
{
var fileTaskPairs = files
.Select(file => new
{
File = file,
Task = SignAsync(args: null, file, rsa, certificate, options)
})
.ToList();
await Task.WhenAll(fileTaskPairs.Select(pair => pair.Task));
List failedFiles = fileTaskPairs
.Where(pair => !pair.Task.Result)
.Select(pair => pair.File.FullName)
.ToList();
if (failedFiles.Count > 0)
{
string failedFilePaths = string.Join(", ", failedFiles);
string message = string.Format(CultureInfo.CurrentCulture, Resources.SigningFailed, failedFilePaths);
throw new SigningException(message);
}
}
}
protected override Task SignCoreAsync(string? args, FileInfo file, RSA rsaPrivateKey, X509Certificate2 certificate, SignOptions options)
{
return _nuGetSignTool.SignAsync(file, rsaPrivateKey, certificate, options);
}
}
}
================================================
FILE: src/Sign.Core/DataFormatSigners/RSAPKCS1SHA256SignatureDescription.cs
================================================
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE.txt file in the project root for more information.
using System.Security.Cryptography;
namespace Sign.Core
{
// This type and its default constructor are public because:
// "Algorithms added to CryptoConfig must be accessible from outside their assembly."
// See https://learn.microsoft.com/en-us/dotnet/api/system.security.cryptography.cryptoconfig.addalgorithm?view=net-7.0#exceptions
public sealed class RSAPKCS1SHA256SignatureDescription : RSAPKCS1SignatureDescription
{
public RSAPKCS1SHA256SignatureDescription()
: base("SHA256")
{
}
public sealed override HashAlgorithm CreateDigest()
{
return SHA256.Create();
}
}
}
================================================
FILE: src/Sign.Core/DataFormatSigners/RSAPKCS1SignatureDescription.cs
================================================
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE.txt file in the project root for more information.
using System.Security.Cryptography;
namespace Sign.Core
{
public abstract class RSAPKCS1SignatureDescription : SignatureDescription
{
public RSAPKCS1SignatureDescription(string hashAlgorithmName)
{
KeyAlgorithm = typeof(RSA).AssemblyQualifiedName;
FormatterAlgorithm = typeof(RSAPKCS1SignatureFormatter).AssemblyQualifiedName;
DeformatterAlgorithm = typeof(RSAPKCS1SignatureDeformatter).AssemblyQualifiedName;
DigestAlgorithm = hashAlgorithmName;
}
public sealed override AsymmetricSignatureDeformatter CreateDeformatter(AsymmetricAlgorithm key)
{
var item = (AsymmetricSignatureDeformatter)CryptoConfig.CreateFromName(DeformatterAlgorithm!)!;
item.SetKey(key);
item.SetHashAlgorithm(DigestAlgorithm!);
return item;
}
public sealed override AsymmetricSignatureFormatter CreateFormatter(AsymmetricAlgorithm key)
{
var item = (AsymmetricSignatureFormatter)CryptoConfig.CreateFromName(FormatterAlgorithm!)!;
item.SetKey(key);
item.SetHashAlgorithm(DigestAlgorithm!);
return item;
}
public abstract override HashAlgorithm CreateDigest();
}
}
================================================
FILE: src/Sign.Core/DataFormatSigners/RetryingSigner.cs
================================================
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE.txt file in the project root for more information.
using System.Security.Cryptography;
using System.Security.Cryptography.X509Certificates;
using Microsoft.Extensions.Logging;
namespace Sign.Core
{
internal abstract class RetryingSigner
{
protected ILogger Logger { get; }
// Non-private for testing purposes.
internal TimeSpan Retry { get; set; } = TimeSpan.FromSeconds(5);
protected RetryingSigner(ILogger logger)
{
ArgumentNullException.ThrowIfNull(logger, nameof(logger));
Logger = logger;
}
protected abstract Task SignCoreAsync(string? args, FileInfo file, RSA rsaPrivateKey, X509Certificate2 certificate, SignOptions options);
// Inspired from https://github.com/squaredup/bettersigntool/blob/master/bettersigntool/bettersigntool/SignCommand.cs
protected async Task SignAsync(string? args, FileInfo file, RSA rsaPrivateKey, X509Certificate2 publicCertificate, SignOptions options)
{
TimeSpan retry = Retry;
const int maxAttempts = 3;
var attempt = 1;
do
{
if (attempt > 1)
{
Logger.LogInformation(Resources.SigningAttempt, attempt, maxAttempts, retry.TotalSeconds);
await Task.Delay(retry);
retry = TimeSpan.FromSeconds(Math.Pow(retry.TotalSeconds, 1.5));
}
if (await SignCoreAsync(args, file, rsaPrivateKey, publicCertificate, options))
{
Logger.LogInformation(Resources.SigningSucceeded, file.FullName);
return true;
}
attempt++;
} while (attempt <= maxAttempts);
Logger.LogError(Resources.SigningFailedAfterAllAttempts);
return false;
}
}
}
================================================
FILE: src/Sign.Core/DataFormatSigners/SignOptions.cs
================================================
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE.txt file in the project root for more information.
using System.Security.Cryptography;
using Microsoft.Extensions.FileSystemGlobbing;
namespace Sign.Core
{
internal sealed class SignOptions
{
internal string? ApplicationName { get; }
internal string? PublisherName { get; }
internal string? Description { get; }
internal Uri? DescriptionUrl { get; }
internal Matcher? Matcher { get; }
internal Matcher? AntiMatcher { get; }
internal HashAlgorithmName FileHashAlgorithm { get; } = HashAlgorithmName.SHA256;
internal HashAlgorithmName TimestampHashAlgorithm { get; } = HashAlgorithmName.SHA256;
internal Uri TimestampService { get; }
internal bool RecurseContainers { get; }
internal SignOptions(
string? applicationName,
string? publisherName,
string? description,
Uri? descriptionUrl,
HashAlgorithmName fileHashAlgorithm,
HashAlgorithmName timestampHashAlgorithm,
Uri timestampService,
Matcher? matcher,
Matcher? antiMatcher,
bool recurseContainers)
{
ApplicationName = applicationName;
PublisherName = publisherName;
Description = description;
DescriptionUrl = descriptionUrl;
FileHashAlgorithm = fileHashAlgorithm;
TimestampHashAlgorithm = timestampHashAlgorithm;
TimestampService = timestampService;
Matcher = matcher;
AntiMatcher = antiMatcher;
RecurseContainers = recurseContainers;
}
internal SignOptions(HashAlgorithmName fileHashAlgorithm, Uri timestampService)
: this(applicationName: null, publisherName: null, description: null, descriptionUrl: null,
fileHashAlgorithm, HashAlgorithmName.SHA256, timestampService, matcher: null,
antiMatcher: null, recurseContainers: true)
{
}
}
}
================================================
FILE: src/Sign.Core/DataFormatSigners/SignableFileTypeByExtension.cs
================================================
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE.txt file in the project root for more information.
namespace Sign.Core
{
internal sealed class SignableFileTypeByExtension : ISignableFileType
{
private readonly HashSet _fileExtensions;
internal SignableFileTypeByExtension(params string[] fileExtensions)
{
ArgumentNullException.ThrowIfNull(fileExtensions, nameof(fileExtensions));
if (fileExtensions.Length == 0)
{
throw new ArgumentException(Resources.ArgumentCannotBeEmpty, nameof(fileExtensions));
}
_fileExtensions = new HashSet(fileExtensions, StringComparer.OrdinalIgnoreCase);
}
public bool IsMatch(FileInfo file)
{
ArgumentNullException.ThrowIfNull(file, nameof(file));
return _fileExtensions.Contains(file.Extension);
}
}
}
================================================
FILE: src/Sign.Core/DataFormatSigners/VsixSigner.cs
================================================
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE.txt file in the project root for more information.
using System.Globalization;
using System.Security.Cryptography;
using System.Security.Cryptography.X509Certificates;
using Microsoft.Extensions.Logging;
namespace Sign.Core
{
internal sealed class VsixSigner : RetryingSigner, IDataFormatSigner
{
private readonly ICertificateProvider _certificateProvider;
private readonly ISignatureAlgorithmProvider _signatureAlgorithmProvider;
private readonly IVsixSignTool _vsixSignTool;
// Dependency injection requires a public constructor.
public VsixSigner(
ISignatureAlgorithmProvider signatureAlgorithmProvider,
ICertificateProvider certificateProvider,
IVsixSignTool vsixSignTool,
ILogger logger)
: base(logger)
{
ArgumentNullException.ThrowIfNull(signatureAlgorithmProvider, nameof(signatureAlgorithmProvider));
ArgumentNullException.ThrowIfNull(certificateProvider, nameof(certificateProvider));
ArgumentNullException.ThrowIfNull(vsixSignTool, nameof(vsixSignTool));
_signatureAlgorithmProvider = signatureAlgorithmProvider;
_certificateProvider = certificateProvider;
_vsixSignTool = vsixSignTool;
}
public bool CanSign(FileInfo file)
{
ArgumentNullException.ThrowIfNull(file, nameof(file));
return string.Equals(file.Extension, ".vsix", StringComparison.OrdinalIgnoreCase);
}
public async Task SignAsync(IEnumerable files, SignOptions options)
{
ArgumentNullException.ThrowIfNull(files, nameof(files));
ArgumentNullException.ThrowIfNull(options, nameof(options));
Logger.LogInformation(Resources.VsixSignatureProviderSigning, files.Count());
using (X509Certificate2 certificate = await _certificateProvider.GetCertificateAsync())
using (RSA rsa = await _signatureAlgorithmProvider.GetRsaAsync())
{
var fileTaskPairs = files
.Select(file => new
{
File = file,
Task = SignAsync(args: null, file, rsa, certificate, options)
})
.ToList();
await Task.WhenAll(fileTaskPairs.Select(pair => pair.Task));
List failedFiles = fileTaskPairs
.Where(pair => !pair.Task.Result)
.Select(pair => pair.File.FullName)
.ToList();
if (failedFiles.Count > 0)
{
string failedFilePaths = string.Join(", ", failedFiles);
string message = string.Format(CultureInfo.CurrentCulture, Resources.SigningFailed, failedFilePaths);
throw new SigningException(message);
}
}
}
protected override async Task SignCoreAsync(string? args, FileInfo file, RSA rsaPrivateKey, X509Certificate2 certificate, SignOptions options)
{
// Dual isn't supported, use Sha256
SignConfigurationSet configuration = new(
options.FileHashAlgorithm,
options.FileHashAlgorithm,
rsaPrivateKey,
certificate);
return await _vsixSignTool.SignAsync(file, configuration, options);
}
}
}
================================================
FILE: src/Sign.Core/ExitCode.cs
================================================
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE.txt file in the project root for more information.
namespace Sign.Core
{
internal static class ExitCode
{
internal const int Success = 0;
internal const int InvalidOptions = 1;
internal const int Failed = 2;
internal const int NoInputsFound = 3;
}
}
================================================
FILE: src/Sign.Core/FileList/FileListReader.cs
================================================
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE.txt file in the project root for more information.
using Microsoft.Extensions.FileSystemGlobbing;
namespace Sign.Core
{
internal sealed class FileListReader : IFileListReader
{
private readonly IMatcherFactory _matcherFactory;
// Dependency injection requires a public constructor.
public FileListReader(IMatcherFactory matcherFactory)
{
ArgumentNullException.ThrowIfNull(matcherFactory, nameof(matcherFactory));
_matcherFactory = matcherFactory;
}
public void Read(StreamReader reader, out Matcher matcher, out Matcher antiMatcher)
{
ArgumentNullException.ThrowIfNull(reader);
List globs = new();
List antiglobs = new();
string? line;
while ((line = reader.ReadLine()) is not null)
{
// don't allow parent directory traversal
line = line.Replace(@"..\", "").Replace("../", "");
if (!string.IsNullOrWhiteSpace(line))
{
if (line.StartsWith("!", StringComparison.Ordinal))
{
antiglobs.Add(line[1..]);
}
else
{
globs.Add(line);
}
}
}
matcher = Globber.CreateMatcher(_matcherFactory, globs);
antiMatcher = Globber.CreateMatcher(_matcherFactory, antiglobs);
}
}
}
================================================
FILE: src/Sign.Core/FileList/FileMatcher.cs
================================================
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE.txt file in the project root for more information.
using Microsoft.Extensions.FileSystemGlobbing;
using Microsoft.Extensions.FileSystemGlobbing.Abstractions;
namespace Sign.Core
{
internal sealed class FileMatcher : IFileMatcher
{
private readonly Func