From ba761f0fc4b181f2c466bddd146f11dd8001406d Mon Sep 17 00:00:00 2001 From: Andrei Kireev Date: Tue, 28 Mar 2023 09:10:54 +0200 Subject: [PATCH 01/23] fix(detectExecuteScan): Fixed issue with duplication of parameters when specifying them in scanProperties (#4304) * Fixed issue with duplication of parameters search.depth/search.continue/excluded.directories --- cmd/detectExecuteScan.go | 23 ++++++++++++++++++++--- cmd/detectExecuteScan_test.go | 28 +++++++++++++++++++--------- 2 files changed, 39 insertions(+), 12 deletions(-) diff --git a/cmd/detectExecuteScan.go b/cmd/detectExecuteScan.go index 5a0101101d..6447e5512d 100644 --- a/cmd/detectExecuteScan.go +++ b/cmd/detectExecuteScan.go @@ -302,11 +302,18 @@ func addDetectArgs(args []string, config detectExecuteScanOptions, utils detectU config.ScanProperties = piperutils.SplitAndTrim(config.ScanProperties, " ") if config.BuildTool == "mta" { - args = append(args, "--detect.detector.search.depth=100") - args = append(args, "--detect.detector.search.continue=true") + + if !checkIfArgumentIsInScanProperties(config, "detect.detector.search.depth") { + args = append(args, "--detect.detector.search.depth=100") + } + + if !checkIfArgumentIsInScanProperties(config, "detect.detector.search.continue") { + args = append(args, "--detect.detector.search.continue=true") + } + } - if len(config.ExcludedDirectories) != 0 { + if len(config.ExcludedDirectories) != 0 && !checkIfArgumentIsInScanProperties(config, "detect.excluded.directories") { args = append(args, fmt.Sprintf("--detect.excluded.directories=%s", strings.Join(config.ExcludedDirectories, ","))) } @@ -424,6 +431,16 @@ func getVersionName(config detectExecuteScanOptions) string { return detectVersionName } +func checkIfArgumentIsInScanProperties(config detectExecuteScanOptions, argumentName string) bool { + for _, argument := range config.ScanProperties { + if strings.Contains(argument, argumentName) { + return true + } + } + + return false +} + func createVulnerabilityReport(config detectExecuteScanOptions, vulns *bd.Vulnerabilities, influx *detectExecuteScanInflux, sys *blackduckSystem) reporting.ScanReport { versionName := getVersionName(config) versionUrl, _ := sys.Client.GetProjectVersionLink(config.ProjectName, versionName) diff --git a/cmd/detectExecuteScan_test.go b/cmd/detectExecuteScan_test.go index 3b08cb2721..b332e8bcb9 100644 --- a/cmd/detectExecuteScan_test.go +++ b/cmd/detectExecuteScan_test.go @@ -709,20 +709,30 @@ func TestAddDetectArgs(t *testing.T) { { args: []string{"--testProp1=1"}, options: detectExecuteScanOptions{ - ServerURL: "https://server.url", - Token: "apiToken", - ProjectName: "Rapid_scan_on_PRs", - Version: "2.0", - VersioningModel: "major-minor", - CodeLocation: "", - ScanPaths: []string{"path1", "path2"}, - MinScanInterval: 4, - CustomScanVersion: "2.0", + ServerURL: "https://server.url", + BuildTool: "mta", + Token: "apiToken", + ProjectName: "Rapid_scan_on_PRs", + Version: "2.0", + VersioningModel: "major-minor", + CodeLocation: "", + ScanPaths: []string{"path1", "path2"}, + ScanProperties: []string{ + "--detect.detector.search.depth=5", + "--detect.detector.search.continue=false", + "--detect.excluded.directories=dir1,dir2", + }, + ExcludedDirectories: []string{"dir3,dir4"}, + MinScanInterval: 4, + CustomScanVersion: "2.0", }, isPullRequest: true, expected: []string{ "--testProp1=1", "--detect.blackduck.signature.scanner.arguments='--min-scan-interval=4'", + "--detect.detector.search.depth=5", + "--detect.detector.search.continue=false", + "--detect.excluded.directories=dir1,dir2", "--blackduck.url=https://server.url", "--blackduck.api.token=apiToken", "\"--detect.project.name='Rapid_scan_on_PRs'\"", From ba58d72022e18fc34a6dcaaa085fe110c8b544fa Mon Sep 17 00:00:00 2001 From: Anke Ravalitera Date: Wed, 29 Mar 2023 12:31:27 +0200 Subject: [PATCH 02/23] Update texts of gCTS steps and scenario for SSL and queryP (#4282) * Update texts of gCTS steps and scenario for SSL and queryP * delete a trailing space --- cmd/gctsCloneRepository_generated.go | 2 +- cmd/gctsCreateRepository_generated.go | 2 +- cmd/gctsDeploy_generated.go | 8 ++++---- cmd/gctsExecuteABAPQualityChecks_generated.go | 6 +++--- cmd/gctsExecuteABAPUnitTests_generated.go | 6 +++--- documentation/docs/scenarios/gCTS_Scenario.md | 10 +++++----- documentation/docs/steps/gctsCloneRepository.md | 4 ++-- documentation/docs/steps/gctsCreateRepository.md | 4 ++-- documentation/docs/steps/gctsDeploy.md | 7 +++++-- .../docs/steps/gctsExecuteABAPQualityChecks.md | 11 +++++++---- documentation/docs/steps/gctsExecuteABAPUnitTests.md | 9 +++++---- documentation/docs/steps/gctsRollback.md | 2 +- resources/metadata/gctsCloneRepository.yaml | 4 ++-- resources/metadata/gctsCreateRepository.yaml | 4 ++-- resources/metadata/gctsDeploy.yaml | 12 ++++++------ resources/metadata/gctsExecuteABAPQualityChecks.yaml | 8 ++++---- resources/metadata/gctsExecuteABAPUnitTests.yaml | 8 ++++---- 17 files changed, 57 insertions(+), 50 deletions(-) diff --git a/cmd/gctsCloneRepository_generated.go b/cmd/gctsCloneRepository_generated.go index fc70eb2519..f2d8189361 100644 --- a/cmd/gctsCloneRepository_generated.go +++ b/cmd/gctsCloneRepository_generated.go @@ -126,7 +126,7 @@ func addGctsCloneRepositoryFlags(cmd *cobra.Command, stepConfig *gctsCloneReposi cmd.Flags().StringVar(&stepConfig.Host, "host", os.Getenv("PIPER_host"), "Specifies the protocol and host address, including the port. Please provide in the format `://:`. Supported protocols are `http` and `https`.") cmd.Flags().StringVar(&stepConfig.Client, "client", os.Getenv("PIPER_client"), "Specifies the client of the ABAP system to be addressed") - cmd.Flags().BoolVar(&stepConfig.SkipSSLVerification, "skipSSLVerification", false, "You can skip the SSL (Secure Socket Layer) verification for the http client") + cmd.Flags().BoolVar(&stepConfig.SkipSSLVerification, "skipSSLVerification", false, "Skip the verification of SSL (Secure Socket Layer) certificates when using HTTPS. This parameter is **not recommended** for productive environments.") cmd.MarkFlagRequired("username") cmd.MarkFlagRequired("password") diff --git a/cmd/gctsCreateRepository_generated.go b/cmd/gctsCreateRepository_generated.go index 9924d5f290..fcb2b019fe 100644 --- a/cmd/gctsCreateRepository_generated.go +++ b/cmd/gctsCreateRepository_generated.go @@ -134,7 +134,7 @@ func addGctsCreateRepositoryFlags(cmd *cobra.Command, stepConfig *gctsCreateRepo cmd.Flags().StringVar(&stepConfig.VSID, "vSID", os.Getenv("PIPER_vSID"), "Virtual SID of the local repository. The vSID corresponds to the transport route that delivers content to the remote Git repository") cmd.Flags().StringVar(&stepConfig.Type, "type", `GIT`, "Type of the used source code management tool") - cmd.Flags().BoolVar(&stepConfig.SkipSSLVerification, "skipSSLVerification", false, "You can skip the SSL (Secure Socket Layer) verification for the http client") + cmd.Flags().BoolVar(&stepConfig.SkipSSLVerification, "skipSSLVerification", false, "Skip the verification of SSL (Secure Socket Layer) certificates when using HTTPS. This parameter is **not recommended** for productive environments.") cmd.MarkFlagRequired("username") cmd.MarkFlagRequired("password") diff --git a/cmd/gctsDeploy_generated.go b/cmd/gctsDeploy_generated.go index e148a09a5d..54e2c2200c 100644 --- a/cmd/gctsDeploy_generated.go +++ b/cmd/gctsDeploy_generated.go @@ -140,14 +140,14 @@ func addGctsDeployFlags(cmd *cobra.Command, stepConfig *gctsDeployOptions) { cmd.Flags().StringVar(&stepConfig.Client, "client", os.Getenv("PIPER_client"), "Client of the ABAP system to which you want to deploy the repository") cmd.Flags().StringVar(&stepConfig.Commit, "commit", os.Getenv("PIPER_commit"), "ID of a specific commit, if you want to deploy the content of the specified commit.") cmd.Flags().StringVar(&stepConfig.RemoteRepositoryURL, "remoteRepositoryURL", os.Getenv("PIPER_remoteRepositoryURL"), "URL of the remote repository") - cmd.Flags().StringVar(&stepConfig.Role, "role", `SOURCE`, "Role of the local repository. Possible values are 'SOURCE' (for repositories on development systems - Default) and 'TARGET' (for repositories on target systems). Local repositories with a TARGET role cannot be the source of code changes.") - cmd.Flags().StringVar(&stepConfig.VSID, "vSID", os.Getenv("PIPER_vSID"), "Virtual SID of the local repository. The vSID corresponds to the transport route that delivers content to the remote Git repository. For more information, see [Background Information - vSID](https://help.sap.com/viewer/4a368c163b08418890a406d413933ba7/latest/en-US/8edc17edfc374908bd8a1615ea5ab7b7.html) on SAP Help Portal.") + cmd.Flags().StringVar(&stepConfig.Role, "role", `SOURCE`, "Role of the local repository. Possible values are `SOURCE` (for repositories on development systems - Default) and `TARGET` (for repositories on target systems). Local repositories with a `TARGET` role cannot be the source of code changes.") + cmd.Flags().StringVar(&stepConfig.VSID, "vSID", os.Getenv("PIPER_vSID"), "Virtual SID of the local repository. The vSID corresponds to the transport route that delivers content to the remote Git repository. For more information, see [Background Information - vSID](https://help.sap.com/docs/ABAP_PLATFORM_NEW/4a368c163b08418890a406d413933ba7/8edc17edfc374908bd8a1615ea5ab7b7.html) on SAP Help Portal.") cmd.Flags().StringVar(&stepConfig.Type, "type", `GIT`, "Type of the used source code management tool") cmd.Flags().StringVar(&stepConfig.Branch, "branch", os.Getenv("PIPER_branch"), "Name of a branch, if you want to deploy the content of a specific branch to the ABAP system.") - cmd.Flags().StringVar(&stepConfig.Scope, "scope", os.Getenv("PIPER_scope"), "Scope of objects to be deployed. Possible values are CRNTCOMMIT (current commit - Default) and LASTACTION (last repository action). The default option deploys all objects that existed in the repository when the commit was created. LASTACTION only deploys the object difference of the last action in the repository.") + cmd.Flags().StringVar(&stepConfig.Scope, "scope", os.Getenv("PIPER_scope"), "Scope of objects to be deployed. Possible values are `CRNTCOMMIT` (current commit - Default) and `LASTACTION` (last repository action). The default option deploys all objects that existed in the repository when the commit was created. `LASTACTION` only deploys the object difference of the last action in the repository.") cmd.Flags().BoolVar(&stepConfig.Rollback, "rollback", false, "Indication whether you want to roll back to the last working state of the repository, if any of the step actions *switch branch* or *pull commit* fail.") - cmd.Flags().BoolVar(&stepConfig.SkipSSLVerification, "skipSSLVerification", false, "You can skip the SSL (Secure Socket Layer) verification for the http client") + cmd.Flags().BoolVar(&stepConfig.SkipSSLVerification, "skipSSLVerification", false, "Skip the verification of SSL (Secure Socket Layer) certificates when using HTTPS. This parameter is **not recommended** for productive environments.") cmd.MarkFlagRequired("username") cmd.MarkFlagRequired("password") diff --git a/cmd/gctsExecuteABAPQualityChecks_generated.go b/cmd/gctsExecuteABAPQualityChecks_generated.go index 42c8bbf0da..0ad6f5f4e3 100644 --- a/cmd/gctsExecuteABAPQualityChecks_generated.go +++ b/cmd/gctsExecuteABAPQualityChecks_generated.go @@ -135,8 +135,8 @@ You can use this step as of SAP S/4HANA 2020 with SAP Note [3159798](https://lau } func addGctsExecuteABAPQualityChecksFlags(cmd *cobra.Command, stepConfig *gctsExecuteABAPQualityChecksOptions) { - cmd.Flags().StringVar(&stepConfig.Username, "username", os.Getenv("PIPER_username"), "User that authenticates to the ABAP system. Note - Don´t provide this parameter directly. Either set it in the environment, or in the Jenkins credentials store, and provide the ID as value of the `abapCredentialsId` parameter.") - cmd.Flags().StringVar(&stepConfig.Password, "password", os.Getenv("PIPER_password"), "Password of the ABAP user that authenticates to the ABAP system. Note - Don´t provide this parameter directly. Either set it in the environment, or in the Jenkins credentials store, and provide the ID as value of the `abapCredentialsId` parameter.") + cmd.Flags().StringVar(&stepConfig.Username, "username", os.Getenv("PIPER_username"), "User that authenticates to the ABAP system. **Note** - Don´t provide this parameter directly. Either set it in the environment, or in the Jenkins credentials store, and provide the ID as value of the `abapCredentialsId` parameter.") + cmd.Flags().StringVar(&stepConfig.Password, "password", os.Getenv("PIPER_password"), "Password of the ABAP user that authenticates to the ABAP system. **Note** - Don´t provide this parameter directly. Either set it in the environment, or in the Jenkins credentials store, and provide the ID as value of the `abapCredentialsId` parameter.") cmd.Flags().StringVar(&stepConfig.Host, "host", os.Getenv("PIPER_host"), "Protocol and host of the ABAP system, including the port. Please provide it in the format `://:`. Supported protocols are `http` and `https`.") cmd.Flags().StringVar(&stepConfig.Repository, "repository", os.Getenv("PIPER_repository"), "Name (ID) of the local repository on the ABAP system") cmd.Flags().StringVar(&stepConfig.Client, "client", os.Getenv("PIPER_client"), "Client of the ABAP system in which you want to execute the checks") @@ -149,7 +149,7 @@ func addGctsExecuteABAPQualityChecksFlags(cmd *cobra.Command, stepConfig *gctsEx cmd.Flags().StringVar(&stepConfig.AtcResultsFileName, "atcResultsFileName", `ATCResults.xml`, "Specifies an output file name for the results of the ATC checks.") cmd.Flags().StringVar(&stepConfig.AUnitResultsFileName, "aUnitResultsFileName", `AUnitResults.xml`, "Specifies an output file name for the results of the ABAP Unit tests.") - cmd.Flags().BoolVar(&stepConfig.SkipSSLVerification, "skipSSLVerification", false, "You can skip the SSL (Secure Socket Layer) verification for the http client") + cmd.Flags().BoolVar(&stepConfig.SkipSSLVerification, "skipSSLVerification", false, "Skip the verification of SSL (Secure Socket Layer) certificates when using HTTPS. This parameter is **not recommended** for productive environments.") cmd.MarkFlagRequired("username") cmd.MarkFlagRequired("password") diff --git a/cmd/gctsExecuteABAPUnitTests_generated.go b/cmd/gctsExecuteABAPUnitTests_generated.go index 988b9a824c..59e43f4057 100644 --- a/cmd/gctsExecuteABAPUnitTests_generated.go +++ b/cmd/gctsExecuteABAPUnitTests_generated.go @@ -128,8 +128,8 @@ func GctsExecuteABAPUnitTestsCommand() *cobra.Command { } func addGctsExecuteABAPUnitTestsFlags(cmd *cobra.Command, stepConfig *gctsExecuteABAPUnitTestsOptions) { - cmd.Flags().StringVar(&stepConfig.Username, "username", os.Getenv("PIPER_username"), "User that authenticates to the ABAP system. Note - Don´t provide this parameter directly. Either set it in the environment, or in the Jenkins credentials store, and provide the ID as value of the `abapCredentialsId` parameter.") - cmd.Flags().StringVar(&stepConfig.Password, "password", os.Getenv("PIPER_password"), "Password of the ABAP user that authenticates to the ABAP system. Note - Don´t provide this parameter directly. Either set it in the environment, or in the Jenkins credentials store, and provide the ID as value of the `abapCredentialsId` parameter.") + cmd.Flags().StringVar(&stepConfig.Username, "username", os.Getenv("PIPER_username"), "User that authenticates to the ABAP system. **Note** - Don´t provide this parameter directly. Either set it in the environment, or in the Jenkins credentials store, and provide the ID as value of the `abapCredentialsId` parameter.") + cmd.Flags().StringVar(&stepConfig.Password, "password", os.Getenv("PIPER_password"), "Password of the ABAP user that authenticates to the ABAP system. **Note** - Don´t provide this parameter directly. Either set it in the environment, or in the Jenkins credentials store, and provide the ID as value of the `abapCredentialsId` parameter.") cmd.Flags().StringVar(&stepConfig.Host, "host", os.Getenv("PIPER_host"), "Protocol and host of the ABAP system, including the port. Please provide it in the format `://:`. Supported protocols are `http` and `https`.") cmd.Flags().StringVar(&stepConfig.Repository, "repository", os.Getenv("PIPER_repository"), "Name (ID) of the local repository on the ABAP system") cmd.Flags().StringVar(&stepConfig.Client, "client", os.Getenv("PIPER_client"), "Client of the ABAP system in which you want to execute the checks") @@ -142,7 +142,7 @@ func addGctsExecuteABAPUnitTestsFlags(cmd *cobra.Command, stepConfig *gctsExecut cmd.Flags().StringVar(&stepConfig.AtcResultsFileName, "atcResultsFileName", `ATCResults.xml`, "Specifies an output file name for the results of the ATC checks.") cmd.Flags().StringVar(&stepConfig.AUnitResultsFileName, "aUnitResultsFileName", `AUnitResults.xml`, "Specifies an output file name for the results of the ABAP Unit tests.") - cmd.Flags().BoolVar(&stepConfig.SkipSSLVerification, "skipSSLVerification", false, "You can skip the SSL (Secure Socket Layer) verification for the http client") + cmd.Flags().BoolVar(&stepConfig.SkipSSLVerification, "skipSSLVerification", false, "Skip the verification of SSL (Secure Socket Layer) certificates when using HTTPS. This parameter is **not recommended** for productive environments.") cmd.MarkFlagRequired("username") cmd.MarkFlagRequired("password") diff --git a/documentation/docs/scenarios/gCTS_Scenario.md b/documentation/docs/scenarios/gCTS_Scenario.md index 3ef34ada5c..1f957bc45e 100644 --- a/documentation/docs/scenarios/gCTS_Scenario.md +++ b/documentation/docs/scenarios/gCTS_Scenario.md @@ -2,9 +2,9 @@ ## Introduction -[Git-enabled Change & Transport System (gCTS)](https://help.sap.com/viewer/4a368c163b08418890a406d413933ba7/latest/en-US/f319b168e87e42149e25e13c08d002b9.html) enables you to manage your ABAP change and transport management processes using Git as an external version management system. It allows you to set up continuous integration processes for ABAP development. For current information about gCTS, see SAP Note [2821718 - Central Note for Git-enabled Change and Transport System (gCTS)](https://launchpad.support.sap.com/#/notes/2821718). +[Git-enabled Change & Transport System (gCTS)](https://help.sap.com/docs/ABAP_PLATFORM_NEW/4a368c163b08418890a406d413933ba7/f319b168e87e42149e25e13c08d002b9.html) enables you to manage your ABAP change and transport management processes using Git as an external version management system. It allows you to set up continuous integration processes for ABAP development. For current information about gCTS, see SAP Note [2821718 - Central Note for Git-enabled Change and Transport System (gCTS)](https://launchpad.support.sap.com/#/notes/2821718). -This scenario explains how to use a pipeline to deploy a commit to a test system, and execute [ABAP unit tests](https://help.sap.com/viewer/ba879a6e2ea04d9bb94c7ccd7cdac446/latest/en-US/491cfd8926bc14cde10000000a42189b.html) and [ATC (ABAP Test Cockpit)](https://help.sap.com/viewer/ba879a6e2ea04d9bb94c7ccd7cdac446/latest/en-US/62c41ad841554516bb06fb3620540e47.html) checks in the test system. For each new commit that arrives in the remote repository, the pipeline executes the following Piper steps in the test system: +This scenario explains how to use a pipeline to deploy a commit to a test system, and execute [ABAP unit tests](https://help.sap.com/docs/ABAP_PLATFORM_NEW/ba879a6e2ea04d9bb94c7ccd7cdac446/491cfd8926bc14cde10000000a42189b.html) and [ATC (ABAP Test Cockpit)](https://help.sap.com/docs/ABAP_PLATFORM_NEW/ba879a6e2ea04d9bb94c7ccd7cdac446/62c41ad841554516bb06fb3620540e47.html) checks in the test system. For each new commit that arrives in the remote repository, the pipeline executes the following Piper steps in the test system: 1. [gctsDeploy](../../steps/gctsDeploy/): Deploys the commit on the test system. 2. [gctsExecuteABAPQualityChecks](../../steps/gctsExecuteABAPQualityChecks/): Executes ABAP unit tests and ATC checks for the ABAP development objects of the commit. @@ -15,17 +15,17 @@ This scenario explains how to use a pipeline to deploy a commit to a test system ## Prerequisites - You have configured Git-Enabled Change and Transport System, and you use it for your ABAP development. See - [Configuring Git-enabled Change & Transport System (gCTS)](https://help.sap.com/viewer/4a368c163b08418890a406d413933ba7/latest/en-US/26c9c6c5a89244cb9506c253d36c3fda.html) + [Configuring Git-enabled Change & Transport System (gCTS)](https://help.sap.com/docs/ABAP_PLATFORM_NEW/4a368c163b08418890a406d413933ba7/26c9c6c5a89244cb9506c253d36c3fda.html) - You have a Git repository on a Git server, such as GitHub, or GitLab. The Git repository is usually created as part of the gCTS configuration. It is used to store your ABAP developments. You can use this Git repository also for the pipeline configuration. (Jenkinsfile) The repository used for the pipeline configuration needs to be accessed by the Jenkins instance. If the repository is password-protected, the user and password (or access token) should be stored in the Jenkins Credentials Store (**Manage Jenkins** > **Manage Credentials**). - You have at least two ABAP systems with a version SAP S/4HANA 2020 or higher. You need one development system that you use to push objects to the Git repository, and a test system on which you run the pipeline. You have created and cloned the Git repository on all systems, on the development system with the *Development* role, and on the others with the *Provided* role. -- You have enabled [ATC](https://help.sap.com/viewer/ba879a6e2ea04d9bb94c7ccd7cdac446/latest/en-US/62c41ad841554516bb06fb3620540e47.html) checks in transaction ATC in the test system. +- You have enabled [ATC](https://help.sap.com/docs/ABAP_PLATFORM_NEW/ba879a6e2ea04d9bb94c7ccd7cdac446/62c41ad841554516bb06fb3620540e47.html) checks in transaction ATC in the test system. - You have access to a Jenkins instance including the [Warnings-Next-Generation Plugin](https://plugins.jenkins.io/warnings-ng/). The plug-in must be installed separately. It is required to view the results of the testing after the pipeline has run. For the gCTS scenario, we recommend that you use the [Custom Jenkins setup](https://www.project-piper.io/infrastructure/customjenkins/) even though it is possible to run the gCTS scenario with [Piper´s CX server](https://www.project-piper.io/infrastructure/overview/). - You have set up a suitable Jenkins instance as described under [Getting Started with Project "Piper"](https://www.project-piper.io/guidedtour/) under *Create Your First Pipeline*. -- The user that is used for the execution of the pipeline must have the credentials entered in gCTS as described in the gCTS documentation under [Set User-Specific Authentication](https://help.sap.com/viewer/4a368c163b08418890a406d413933ba7/latest/en-US/3431ebd6fbf241778cd60587e7b5dc3e.html). +- The user that is used for the execution of the pipeline must have the credentials entered in gCTS as described in the gCTS documentation under [Set User-Specific Authentication](https://help.sap.com/docs/ABAP_PLATFORM_NEW/4a368c163b08418890a406d413933ba7/3431ebd6fbf241778cd60587e7b5dc3e.html). ## Process diff --git a/documentation/docs/steps/gctsCloneRepository.md b/documentation/docs/steps/gctsCloneRepository.md index 2b23a0bdd4..2e81a13d63 100644 --- a/documentation/docs/steps/gctsCloneRepository.md +++ b/documentation/docs/steps/gctsCloneRepository.md @@ -4,8 +4,8 @@ ## Prerequisites -With this step you can clone a remote Git repository to a local repository on an ABAP server. To be able to execute this step, the corresponding local repository has to exist on the local ABAP system. -Learn more about the SAP Git-enabled Change & Transport System (gCTS) [here](https://help.sap.com/viewer/4a368c163b08418890a406d413933ba7/201909.001/en-US/f319b168e87e42149e25e13c08d002b9.html). With gCTS, ABAP developments on ABAP servers can be maintained in Git repositories. +This step clones a remote Git repository to a local repository on an ABAP server. To execute this step, the corresponding local repository must exist on the local ABAP system. +More information about the [Git-enabled Change and Transport System (gCTS)](https://help.sap.com/docs/ABAP_PLATFORM_NEW/4a368c163b08418890a406d413933ba7/f319b168e87e42149e25e13c08d002b9.html). ## ${docGenParameters} diff --git a/documentation/docs/steps/gctsCreateRepository.md b/documentation/docs/steps/gctsCreateRepository.md index 0363e1b100..80e733e3f9 100644 --- a/documentation/docs/steps/gctsCreateRepository.md +++ b/documentation/docs/steps/gctsCreateRepository.md @@ -4,8 +4,8 @@ ## Prerequisites -With this step you can create a local git-enabled CTS (gCTS) repository on an ABAP server. -Learn more about the SAP Git-enabled Change & Transport System (gCTS) [here](https://help.sap.com/viewer/4a368c163b08418890a406d413933ba7/201909.001/en-US/f319b168e87e42149e25e13c08d002b9.html). With gCTS, ABAP developments on ABAP servers can be maintained in Git repositories. +This step creates a local Git repository on an ABAP server. +More information about the [Git-enabled Change and Transport System (gCTS)](https://help.sap.com/docs/ABAP_PLATFORM_NEW/4a368c163b08418890a406d413933ba7/f319b168e87e42149e25e13c08d002b9.html). ## ${docGenParameters} diff --git a/documentation/docs/steps/gctsDeploy.md b/documentation/docs/steps/gctsDeploy.md index fa273cee28..9f951e4e09 100644 --- a/documentation/docs/steps/gctsDeploy.md +++ b/documentation/docs/steps/gctsDeploy.md @@ -5,7 +5,7 @@ ## Prerequisites If you provide a `commit ID`, the step deploys the content of the specified commit to the ABAP system. If you provide a `branch`, the step deploys the content of the specified branch. If you set the `rollback` parameter to *true*, the step returns to a working state of the repository, if the deployment of the specified commit or branch fails. -Learn more about the SAP Git-enabled Change & Transport System (gCTS) [here](https://help.sap.com/viewer/4a368c163b08418890a406d413933ba7/latest/en-US/f319b168e87e42149e25e13c08d002b9.html). With gCTS, ABAP developments on ABAP servers can be maintained in Git repositories. +More information about the [Git-enabled Change and Transport System (gCTS)](https://help.sap.com/docs/ABAP_PLATFORM_NEW/4a368c163b08418890a406d413933ba7/f319b168e87e42149e25e13c08d002b9.html). ## ${docGenParameters} @@ -31,7 +31,8 @@ gctsDeploy( commit: '95952ec', scope: 'LASTACTION', rollback: true, - configuration: [VCS_AUTOMATIC_PULL: 'FALSE',VCS_AUTOMATIC_PUSH: 'FALSE',CLIENT_VCS_LOGLVL: 'debug'] + configuration: [VCS_AUTOMATIC_PULL: 'FALSE',VCS_AUTOMATIC_PUSH: 'FALSE',CLIENT_VCS_LOGLVL: 'debug'], + queryparameters: [saml2: 'disabled'] ) ``` @@ -56,4 +57,6 @@ steps: VCS_AUTOMATIC_PULL: "FALSE" VCS_AUTOMATIC_PUSH: "FALSE" CLIENT_VCS_LOGLVL: "debug" + queryparameters: + saml2: "disabled" ``` diff --git a/documentation/docs/steps/gctsExecuteABAPQualityChecks.md b/documentation/docs/steps/gctsExecuteABAPQualityChecks.md index 414a8fbabf..012ad2816f 100644 --- a/documentation/docs/steps/gctsExecuteABAPQualityChecks.md +++ b/documentation/docs/steps/gctsExecuteABAPQualityChecks.md @@ -4,9 +4,9 @@ ## Prerequisites -* [ATC](https://help.sap.com/viewer/ba879a6e2ea04d9bb94c7ccd7cdac446/latest/en-US/62c41ad841554516bb06fb3620540e47.html) checks are enabled in transaction ATC in the ABAP systems where you want to use the step. -* [ABAP Unit tests](https://help.sap.com/viewer/ba879a6e2ea04d9bb94c7ccd7cdac446/latest/en-US/491cfd8926bc14cde10000000a42189b.html) are available for the source code that you want to check. Note: Do not execute unit tests in client 000, and not in your production client. -* [gCTS](https://help.sap.com/viewer/4a368c163b08418890a406d413933ba7/latest/en-US/26c9c6c5a89244cb9506c253d36c3fda.html) is available and configured in the ABAP systems where you want to use the step. +* [ATC](https://help.sap.com/docs/ABAP_PLATFORM_NEW/ba879a6e2ea04d9bb94c7ccd7cdac446/62c41ad841554516bb06fb3620540e47.html) checks are enabled in transaction ATC in the ABAP systems where you want to use the step. +* [ABAP Unit tests](https://help.sap.com/docs/ABAP_PLATFORM_NEW/ba879a6e2ea04d9bb94c7ccd7cdac446/491cfd8926bc14cde10000000a42189b.html) are available for the source code that you want to check. Note: Do not execute unit tests in client 000, and not in your production client. +* [gCTS](https://help.sap.com/docs/ABAP_PLATFORM_NEW/4a368c163b08418890a406d413933ba7/f319b168e87e42149e25e13c08d002b9.html) is available and configured in the ABAP systems where you want to use the step. * If you want to use environmental variables as parameters, for example, `GIT_COMMIT`: The [Git Plugin](https://plugins.jenkins.io/git/) is installed in Jenkins. * The [Warnings-Next-Generation](https://plugins.jenkins.io/warnings-ng/) Plugin is installed in Jenkins. @@ -29,7 +29,8 @@ gctsExecuteABAPQualityChecks( repository: 'myrepo', scope: 'remoteChangedObjects', commit: "${env.GIT_COMMIT}", - workspace: "${WORKSPACE}" + workspace: "${WORKSPACE}", + queryparameters: [saml2: 'disabled'] ) ``` @@ -47,6 +48,8 @@ steps: scope: 'remoteChangedObjects' commit: '38abb4814ae46b98e8e6c3e718cf1782afa9ca90' workspace: '/var/jenkins_home/workspace/myFirstPipeline' + queryparameters: + saml2: "disabled" ``` Example configuration with the *repository* scope defined. Here, you don´t need to specify a *commit*. This sample configuration can also be used with the *packages* scope. diff --git a/documentation/docs/steps/gctsExecuteABAPUnitTests.md b/documentation/docs/steps/gctsExecuteABAPUnitTests.md index ccdce5f83f..66abc03795 100644 --- a/documentation/docs/steps/gctsExecuteABAPUnitTests.md +++ b/documentation/docs/steps/gctsExecuteABAPUnitTests.md @@ -8,9 +8,9 @@ ## Prerequisites -* [ATC](https://help.sap.com/viewer/c238d694b825421f940829321ffa326a/202110.000/en-US/4ec5711c6e391014adc9fffe4e204223.html) checks are enabled in transaction ATC in the ABAP systems where you want to use the step. -* [ABAP Unit tests](https://help.sap.com/viewer/ba879a6e2ea04d9bb94c7ccd7cdac446/latest/en-US/491cfd8926bc14cde10000000a42189b.html) are available for the source code that you want to check. Note: Do not execute unit tests in client 000, and not in your production client. -* [gCTS](https://help.sap.com/viewer/4a368c163b08418890a406d413933ba7/latest/en-US/26c9c6c5a89244cb9506c253d36c3fda.html) is available and configured in the ABAP systems where you want to use the step. +* [ATC](https://help.sap.com/docs/ABAP_PLATFORM_NEW/ba879a6e2ea04d9bb94c7ccd7cdac446/62c41ad841554516bb06fb3620540e47.html) checks are enabled in transaction ATC in the ABAP systems where you want to use the step. +* [ABAP Unit tests](https://help.sap.com/docs/ABAP_PLATFORM_NEW/ba879a6e2ea04d9bb94c7ccd7cdac446/491cfd8926bc14cde10000000a42189b.html) are available for the source code that you want to check. Note: Do not execute unit tests in client 000, and not in your production client. +* [gCTS](https://help.sap.com/docs/ABAP_PLATFORM_NEW/4a368c163b08418890a406d413933ba7/f319b168e87e42149e25e13c08d002b9.html) is available and configured in the ABAP systems where you want to use the step. * If you want to use environmental variables as parameters, for example, `GIT_COMMIT`: The [Git Plugin](https://plugins.jenkins.io/git/) is installed in Jenkins. * The [Warnings-Next-Generation](https://plugins.jenkins.io/warnings-ng/) Plugin is installed in Jenkins. @@ -33,7 +33,8 @@ gctsExecuteABAPUnitTests( repository: 'myrepo', scope: 'remoteChangedObjects', commit: "${env.GIT_COMMIT}", - workspace: "${WORKSPACE}" + workspace: "${WORKSPACE}", + queryparameters: [saml2: 'disabled'] ) ``` diff --git a/documentation/docs/steps/gctsRollback.md b/documentation/docs/steps/gctsRollback.md index fc9d4fdc60..7aa2c86025 100644 --- a/documentation/docs/steps/gctsRollback.md +++ b/documentation/docs/steps/gctsRollback.md @@ -5,7 +5,7 @@ ## Prerequisites This step performs a rollback of commit(s) in a local ABAP system repository. If a `commit` parameter is specified, it will be used as the target commit for the rollback. If no `commit` parameter is specified and the remote repository domain is 'github.com', the last commit with the status 'success' will be used for the rollback. Otherwise, `gctsRollback` will roll back to the previously active commit in the local repository. -Learn more about [Git-enabled Change and Transport System (gCTS)](https://help.sap.com/viewer/4a368c163b08418890a406d413933ba7/201909.001/en-US/f319b168e87e42149e25e13c08d002b9.html). +More information about the [Git-enabled Change and Transport System (gCTS)](https://help.sap.com/docs/ABAP_PLATFORM_NEW/4a368c163b08418890a406d413933ba7/f319b168e87e42149e25e13c08d002b9.html). ## ${docGenParameters} diff --git a/resources/metadata/gctsCloneRepository.yaml b/resources/metadata/gctsCloneRepository.yaml index 3bf6259e2a..d1d748b075 100644 --- a/resources/metadata/gctsCloneRepository.yaml +++ b/resources/metadata/gctsCloneRepository.yaml @@ -63,7 +63,7 @@ spec: mandatory: true - name: queryParameters type: "map[string]interface{}" - description: Specifies pairs of key and value query parameters for the api requests + description: Add query parameters (for API requests) that apply to all endpoints of the step. Provide the parameters as key-value pair map in the format `:`. scope: - PARAMETERS - STAGES @@ -71,7 +71,7 @@ spec: mandatory: false - name: skipSSLVerification type: bool - description: You can skip the SSL (Secure Socket Layer) verification for the http client + description: Skip the verification of SSL (Secure Socket Layer) certificates when using HTTPS. This parameter is **not recommended** for productive environments. scope: - PARAMETERS - STAGES diff --git a/resources/metadata/gctsCreateRepository.yaml b/resources/metadata/gctsCreateRepository.yaml index f6a1ef0f6f..49272a6d2e 100644 --- a/resources/metadata/gctsCreateRepository.yaml +++ b/resources/metadata/gctsCreateRepository.yaml @@ -98,7 +98,7 @@ spec: - GIT - name: queryParameters type: "map[string]interface{}" - description: Specifies pairs of key and value query parameters for the api requests + description: Add query parameters (for API requests) that apply to all endpoints of the step. Provide the parameters as key-value pair map in the format `:`. scope: - PARAMETERS - STAGES @@ -106,7 +106,7 @@ spec: mandatory: false - name: skipSSLVerification type: bool - description: You can skip the SSL (Secure Socket Layer) verification for the http client + description: Skip the verification of SSL (Secure Socket Layer) certificates when using HTTPS. This parameter is **not recommended** for productive environments. scope: - PARAMETERS - STAGES diff --git a/resources/metadata/gctsDeploy.yaml b/resources/metadata/gctsDeploy.yaml index 6434cce01c..c4310896bb 100644 --- a/resources/metadata/gctsDeploy.yaml +++ b/resources/metadata/gctsDeploy.yaml @@ -82,7 +82,7 @@ spec: - STEPS - name: role type: string - description: Role of the local repository. Possible values are 'SOURCE' (for repositories on development systems - Default) and 'TARGET' (for repositories on target systems). Local repositories with a TARGET role cannot be the source of code changes. + description: Role of the local repository. Possible values are `SOURCE` (for repositories on development systems - Default) and `TARGET` (for repositories on target systems). Local repositories with a `TARGET` role cannot be the source of code changes. scope: - PARAMETERS - STAGES @@ -93,7 +93,7 @@ spec: default: SOURCE - name: vSID type: string - description: Virtual SID of the local repository. The vSID corresponds to the transport route that delivers content to the remote Git repository. For more information, see [Background Information - vSID](https://help.sap.com/viewer/4a368c163b08418890a406d413933ba7/latest/en-US/8edc17edfc374908bd8a1615ea5ab7b7.html) on SAP Help Portal. + description: Virtual SID of the local repository. The vSID corresponds to the transport route that delivers content to the remote Git repository. For more information, see [Background Information - vSID](https://help.sap.com/docs/ABAP_PLATFORM_NEW/4a368c163b08418890a406d413933ba7/8edc17edfc374908bd8a1615ea5ab7b7.html) on SAP Help Portal. scope: - PARAMETERS - STAGES @@ -119,7 +119,7 @@ spec: - STEPS - name: scope type: string - description: Scope of objects to be deployed. Possible values are CRNTCOMMIT (current commit - Default) and LASTACTION (last repository action). The default option deploys all objects that existed in the repository when the commit was created. LASTACTION only deploys the object difference of the last action in the repository. + description: Scope of objects to be deployed. Possible values are `CRNTCOMMIT` (current commit - Default) and `LASTACTION` (last repository action). The default option deploys all objects that existed in the repository when the commit was created. `LASTACTION` only deploys the object difference of the last action in the repository. scope: - PARAMETERS - STAGES @@ -133,7 +133,7 @@ spec: - STEPS - name: configuration type: "map[string]interface{}" - description: "Configuration parameters for the repository. Provide the parameters as a key-value pair map in the following format: ``:``. For a list of available configuration parameters, see [Configuration Parameters for Repositories](https://help.sap.com/viewer/4a368c163b08418890a406d413933ba7/latest/en-US/99e471efcbee4a0faec82f9dd15897e1.html)." + description: "Configuration parameters for the repository. Provide the parameters as a key-value pair map in the following format: ``:``. For a list of available configuration parameters, see [Configuration Parameters for Repositories](https://help.sap.com/docs/ABAP_PLATFORM_NEW/4a368c163b08418890a406d413933ba7/99e471efcbee4a0faec82f9dd15897e1.html)." scope: - PARAMETERS - STAGES @@ -144,7 +144,7 @@ spec: - name: gctsRepositoryConfigurations - name: queryParameters type: "map[string]interface{}" - description: Specifies pairs of key and value query parameters for the api requests + description: Add query parameters (for API requests) that apply to all endpoints of the step. Provide the parameters as key-value pair map in the format `:`. scope: - PARAMETERS - STAGES @@ -152,7 +152,7 @@ spec: mandatory: false - name: skipSSLVerification type: bool - description: You can skip the SSL (Secure Socket Layer) verification for the http client + description: Skip the verification of SSL (Secure Socket Layer) certificates when using HTTPS. This parameter is **not recommended** for productive environments. scope: - PARAMETERS - STAGES diff --git a/resources/metadata/gctsExecuteABAPQualityChecks.yaml b/resources/metadata/gctsExecuteABAPQualityChecks.yaml index e4795156b3..8b93311d3a 100644 --- a/resources/metadata/gctsExecuteABAPQualityChecks.yaml +++ b/resources/metadata/gctsExecuteABAPQualityChecks.yaml @@ -20,7 +20,7 @@ spec: params: - name: username type: string - description: User that authenticates to the ABAP system. Note - Don´t provide this parameter directly. Either set it in the environment, or in the Jenkins credentials store, and provide the ID as value of the `abapCredentialsId` parameter. + description: User that authenticates to the ABAP system. **Note** - Don´t provide this parameter directly. Either set it in the environment, or in the Jenkins credentials store, and provide the ID as value of the `abapCredentialsId` parameter. scope: - PARAMETERS - STAGES @@ -33,7 +33,7 @@ spec: param: username - name: password type: string - description: Password of the ABAP user that authenticates to the ABAP system. Note - Don´t provide this parameter directly. Either set it in the environment, or in the Jenkins credentials store, and provide the ID as value of the `abapCredentialsId` parameter. + description: Password of the ABAP user that authenticates to the ABAP system. **Note** - Don´t provide this parameter directly. Either set it in the environment, or in the Jenkins credentials store, and provide the ID as value of the `abapCredentialsId` parameter. scope: - PARAMETERS - STAGES @@ -168,7 +168,7 @@ spec: default: "AUnitResults.xml" - name: queryParameters type: "map[string]interface{}" - description: Specifies pairs of key and value query parameters for the api requests + description: Add query parameters (for API requests) that apply to all endpoints of the step. Provide the parameters as key-value pair map in the format `:`. scope: - PARAMETERS - STAGES @@ -176,7 +176,7 @@ spec: mandatory: false - name: skipSSLVerification type: bool - description: You can skip the SSL (Secure Socket Layer) verification for the http client + description: Skip the verification of SSL (Secure Socket Layer) certificates when using HTTPS. This parameter is **not recommended** for productive environments. scope: - PARAMETERS - STAGES diff --git a/resources/metadata/gctsExecuteABAPUnitTests.yaml b/resources/metadata/gctsExecuteABAPUnitTests.yaml index 66a9e74019..7d4d8dc671 100644 --- a/resources/metadata/gctsExecuteABAPUnitTests.yaml +++ b/resources/metadata/gctsExecuteABAPUnitTests.yaml @@ -13,7 +13,7 @@ spec: params: - name: username type: string - description: User that authenticates to the ABAP system. Note - Don´t provide this parameter directly. Either set it in the environment, or in the Jenkins credentials store, and provide the ID as value of the `abapCredentialsId` parameter. + description: User that authenticates to the ABAP system. **Note** - Don´t provide this parameter directly. Either set it in the environment, or in the Jenkins credentials store, and provide the ID as value of the `abapCredentialsId` parameter. scope: - PARAMETERS - STAGES @@ -26,7 +26,7 @@ spec: param: username - name: password type: string - description: Password of the ABAP user that authenticates to the ABAP system. Note - Don´t provide this parameter directly. Either set it in the environment, or in the Jenkins credentials store, and provide the ID as value of the `abapCredentialsId` parameter. + description: Password of the ABAP user that authenticates to the ABAP system. **Note** - Don´t provide this parameter directly. Either set it in the environment, or in the Jenkins credentials store, and provide the ID as value of the `abapCredentialsId` parameter. scope: - PARAMETERS - STAGES @@ -161,7 +161,7 @@ spec: default: "AUnitResults.xml" - name: queryParameters type: "map[string]interface{}" - description: Specifies pairs of key and value query parameters for the api requests + description: Add query parameters (for API requests) that apply to all endpoints of the step. Provide the parameters as key-value pair map in the format `:`. scope: - PARAMETERS - STAGES @@ -169,7 +169,7 @@ spec: mandatory: false - name: skipSSLVerification type: bool - description: You can skip the SSL (Secure Socket Layer) verification for the http client + description: Skip the verification of SSL (Secure Socket Layer) certificates when using HTTPS. This parameter is **not recommended** for productive environments. scope: - PARAMETERS - STAGES From 81d9a0ac8c58b3a7ad3ec58ffe16211985cba18d Mon Sep 17 00:00:00 2001 From: Pavel Busko Date: Fri, 31 Mar 2023 09:36:59 +0200 Subject: [PATCH 03/23] feat(cnbBuild): support Vault general purpose secrets as a binding content source (#4281) * feat(cnbBuild): support Vault general purpose secrets as a binding content source Co-authored-by: Pavel Busko * fix npm project integration test Co-authored-by: Pavel Busko --------- Co-authored-by: Ralf Pannemans Co-authored-by: Sumit Kulhadia --- integration/integration_cnb_test.go | 11 ++ .../testdata/TestCnbIntegration/config.yml | 5 + pkg/cnbutils/bindings/bindings.go | 113 ++++++++++++------ pkg/cnbutils/bindings/bindings_test.go | 68 ++++++++--- pkg/config/vault.go | 18 +-- pkg/config/vault_test.go | 2 +- resources/metadata/cnbBuild.yaml | 12 ++ 7 files changed, 167 insertions(+), 62 deletions(-) diff --git a/integration/integration_cnb_test.go b/integration/integration_cnb_test.go index cec16cbd6e..39be5ee048 100644 --- a/integration/integration_cnb_test.go +++ b/integration/integration_cnb_test.go @@ -46,6 +46,9 @@ func TestCNBIntegrationNPMProject(t *testing.T) { User: "cnb", TestDir: []string{"testdata"}, Network: fmt.Sprintf("container:%s", registryContainer.GetContainerID()), + Environment: map[string]string{ + "PIPER_VAULTCREDENTIAL_DYNATRACE_API_KEY": "api-key-content", + }, }) defer container.terminate(t) @@ -54,6 +57,9 @@ func TestCNBIntegrationNPMProject(t *testing.T) { User: "cnb", TestDir: []string{"testdata"}, Network: fmt.Sprintf("container:%s", registryContainer.GetContainerID()), + Environment: map[string]string{ + "PIPER_VAULTCREDENTIAL_DYNATRACE_API_KEY": "api-key-content", + }, }) defer container2.terminate(t) @@ -239,6 +245,9 @@ func TestCNBIntegrationBindings(t *testing.T) { User: "cnb", TestDir: []string{"testdata"}, Network: fmt.Sprintf("container:%s", registryContainer.GetContainerID()), + Environment: map[string]string{ + "PIPER_VAULTCREDENTIAL_DYNATRACE_API_KEY": "api-key-content", + }, }) defer container.terminate(t) @@ -250,6 +259,8 @@ func TestCNBIntegrationBindings(t *testing.T) { "/tmp/platform/bindings/dummy-binding/type", "/tmp/platform/bindings/dummy-binding/dummy.yml", ) + container.assertFileContentEquals(t, "/tmp/platform/bindings/maven-settings/settings.xml", "invalid xml") + container.assertFileContentEquals(t, "/tmp/platform/bindings/dynatrace/api-key", "api-key-content") } func TestCNBIntegrationMultiImage(t *testing.T) { diff --git a/integration/testdata/TestCnbIntegration/config.yml b/integration/testdata/TestCnbIntegration/config.yml index 61738cf993..e2428c1c83 100644 --- a/integration/testdata/TestCnbIntegration/config.yml +++ b/integration/testdata/TestCnbIntegration/config.yml @@ -13,3 +13,8 @@ steps: data: - key: dummy.yml file: TestCnbIntegration/config.yml + dynatrace: + type: Dynatrace + data: + - key: api-key + vaultCredentialKey: dynatrace-api-key diff --git a/pkg/cnbutils/bindings/bindings.go b/pkg/cnbutils/bindings/bindings.go index a202078a4d..351e08eaf9 100644 --- a/pkg/cnbutils/bindings/bindings.go +++ b/pkg/cnbutils/bindings/bindings.go @@ -6,6 +6,7 @@ import ( "fmt" "io/ioutil" "net/http" + "os" "path/filepath" "strings" @@ -13,6 +14,7 @@ import ( k8sjson "sigs.k8s.io/json" "github.com/SAP/jenkins-library/pkg/cnbutils" + "github.com/SAP/jenkins-library/pkg/config" piperhttp "github.com/SAP/jenkins-library/pkg/http" ) @@ -23,39 +25,67 @@ type binding struct { } type bindingData struct { - Key string `json:"key"` - Content *string `json:"content,omitempty"` - File *string `json:"file,omitempty"` - FromURL *string `json:"fromUrl,omitempty"` + Key string `json:"key"` + Content *string `json:"content,omitempty"` + File *string `json:"file,omitempty"` + FromURL *string `json:"fromUrl,omitempty"` + VaultCredentialKey *string `json:"vaultCredentialKey,omitempty"` } type bindings map[string]binding +type bindingContentType int + +const ( + fileBinding bindingContentType = iota + contentBinding + fromURLBinding + vaultBinding +) + // Return error if: -// 1. Content is set + File or FromURL -// 2. File is set + FromURL or Content -// 3. FromURL is set + File or Content -// 4. Everything is set +// 1. Content is set + File or FromURL or VaultCredentialKey +// 2. File is set + FromURL or Content or VaultCredentialKey +// 3. FromURL is set + File or Content or VaultCredentialKey +// 4. VaultCredentialKey is set + File or FromURL or Content +// 5. Everything is set func (b *bindingData) validate() error { if !validName(b.Key) { return fmt.Errorf("invalid key: '%s'", b.Key) } - if b.Content == nil && b.File == nil && b.FromURL == nil { - return errors.New("one of 'file', 'content' or 'fromUrl' properties must be specified") + if b.Content == nil && b.File == nil && b.FromURL == nil && b.VaultCredentialKey == nil { + return errors.New("one of 'file', 'content', 'fromUrl' or 'vaultCredentialKey' properties must be specified") } - onlyOneSet := (b.Content != nil && b.File == nil && b.FromURL == nil) || - (b.Content == nil && b.File != nil && b.FromURL == nil) || - (b.Content == nil && b.File == nil && b.FromURL != nil) + onlyOneSet := (b.Content != nil && b.File == nil && b.FromURL == nil && b.VaultCredentialKey == nil) || + (b.Content == nil && b.File != nil && b.FromURL == nil && b.VaultCredentialKey == nil) || + (b.Content == nil && b.File == nil && b.FromURL != nil && b.VaultCredentialKey == nil) || + (b.Content == nil && b.File == nil && b.FromURL == nil && b.VaultCredentialKey != nil) if !onlyOneSet { - return errors.New("only one of 'content', 'file' or 'fromUrl' can be set") + return errors.New("only one of 'content', 'file', 'fromUrl' or 'vaultCredentialKey' can be set") } return nil } +func (b *bindingData) bindingContentType() bindingContentType { + if b.File != nil { + return fileBinding + } + + if b.Content != nil { + return contentBinding + } + + if b.FromURL != nil { + return fromURLBinding + } + + return vaultBinding +} + // ProcessBindings creates the given bindings in the platform directory func ProcessBindings(utils cnbutils.BuildUtils, httpClient piperhttp.Sender, platformPath string, bindings map[string]interface{}) error { typedBindings, err := toTyped(bindings) @@ -95,35 +125,41 @@ func processBinding(utils cnbutils.BuildUtils, httpClient piperhttp.Sender, plat return errors.Wrap(err, "failed to write the 'type' binding file") } - if data.File != nil { - _, err = utils.Copy(*data.File, filepath.Join(bindingDir, data.Key)) + var bindingContent []byte + + switch data.bindingContentType() { + case fileBinding: + bindingContent, err = utils.FileRead(*data.File) if err != nil { return errors.Wrap(err, "failed to copy binding file") } - } else { - var bindingContent []byte - - if data.Content == nil { - response, err := httpClient.SendRequest(http.MethodGet, *data.FromURL, nil, nil, nil) - if err != nil { - return errors.Wrap(err, "failed to load binding from url") - } - - bindingContent, err = ioutil.ReadAll(response.Body) - defer response.Body.Close() - if err != nil { - return errors.Wrap(err, "error reading response") - } - } else { - bindingContent = []byte(*data.Content) + case contentBinding: + bindingContent = []byte(*data.Content) + case fromURLBinding: + response, err := httpClient.SendRequest(http.MethodGet, *data.FromURL, nil, nil, nil) + if err != nil { + return errors.Wrap(err, "failed to load binding from url") } - err = utils.FileWrite(filepath.Join(bindingDir, data.Key), bindingContent, 0644) + bindingContent, err = ioutil.ReadAll(response.Body) + defer response.Body.Close() if err != nil { - return errors.Wrap(err, "failed to write binding") + return errors.Wrap(err, "error reading response") + } + case vaultBinding: + envVar := config.VaultCredentialEnvPrefixDefault + config.ConvertEnvVar(*data.VaultCredentialKey) + if bindingContentString, ok := os.LookupEnv(envVar); ok { + bindingContent = []byte(bindingContentString) + } else { + return fmt.Errorf("environment variable %q is not set (required by the %q binding)", envVar, name) } } + err = utils.FileWrite(filepath.Join(bindingDir, data.Key), bindingContent, 0644) + if err != nil { + return errors.Wrap(err, "failed to write binding") + } + return nil } @@ -152,10 +188,11 @@ func toTyped(rawMap map[string]interface{}) (bindings, error) { if b.Key != "" { b.Data = append(b.Data, bindingData{ - Key: b.Key, - Content: b.Content, - File: b.File, - FromURL: b.FromURL, + Key: b.Key, + Content: b.Content, + File: b.File, + FromURL: b.FromURL, + VaultCredentialKey: b.VaultCredentialKey, }) } diff --git a/pkg/cnbutils/bindings/bindings_test.go b/pkg/cnbutils/bindings/bindings_test.go index ba35ba9280..b06c320e7b 100644 --- a/pkg/cnbutils/bindings/bindings_test.go +++ b/pkg/cnbutils/bindings/bindings_test.go @@ -56,37 +56,42 @@ func TestProcessBindings(t *testing.T) { }) if assert.NoError(t, err) { - if assert.True(t, utils.HasFile("/tmp/platform/bindings/a/inline.yaml")) { + if assert.True(t, utils.HasWrittenFile("/tmp/platform/bindings/a/inline.yaml")) { content, err := utils.FileRead("/tmp/platform/bindings/a/inline.yaml") if assert.NoError(t, err) { assert.Equal(t, string(content), "my inline content") } } - if assert.True(t, utils.HasFile("/tmp/platform/bindings/a/type")) { + if assert.True(t, utils.HasWrittenFile("/tmp/platform/bindings/a/type")) { content, err := utils.FileRead("/tmp/platform/bindings/a/type") if assert.NoError(t, err) { assert.Equal(t, string(content), "inline") } } - assert.True(t, utils.HasCopiedFile("/tmp/somefile.yaml", "/tmp/platform/bindings/b/from-file.yaml")) + if assert.True(t, utils.HasWrittenFile("/tmp/platform/bindings/b/from-file.yaml")) { + content, err := utils.FileRead("/tmp/platform/bindings/b/from-file.yaml") + if assert.NoError(t, err) { + assert.Equal(t, string(content), "some file content") + } + } - if assert.True(t, utils.HasFile("/tmp/platform/bindings/b/type")) { + if assert.True(t, utils.HasWrittenFile("/tmp/platform/bindings/b/type")) { content, err := utils.FileRead("/tmp/platform/bindings/b/type") if assert.NoError(t, err) { assert.Equal(t, string(content), "file") } } - if assert.True(t, utils.HasFile("/tmp/platform/bindings/c/type")) { + if assert.True(t, utils.HasWrittenFile("/tmp/platform/bindings/c/type")) { content, err := utils.FileRead("/tmp/platform/bindings/c/type") if assert.NoError(t, err) { assert.Equal(t, string(content), "url") } } - if assert.True(t, utils.HasFile("/tmp/platform/bindings/c/from-url.yaml")) { + if assert.True(t, utils.HasWrittenFile("/tmp/platform/bindings/c/from-url.yaml")) { content, err := utils.FileRead("/tmp/platform/bindings/c/from-url.yaml") if assert.NoError(t, err) { assert.Equal(t, string(content), "from url content") @@ -96,6 +101,7 @@ func TestProcessBindings(t *testing.T) { }) t.Run("writes bindings to files", func(t *testing.T) { + t.Setenv("PIPER_VAULTCREDENTIAL_VAULT_KEY1", "test value from vault") var utils = mockUtils() httpmock.Activate() defer httpmock.DeactivateAndReset() @@ -118,6 +124,10 @@ func TestProcessBindings(t *testing.T) { "key": "from-url.yaml", "fromUrl": "http://test-url.com/binding", }, + { + "key": "from-vault.yaml", + "vaultCredentialKey": "vault-key1", + }, }, }, "b": map[string]interface{}{ @@ -132,35 +142,48 @@ func TestProcessBindings(t *testing.T) { }) if assert.NoError(t, err) { - if assert.True(t, utils.HasFile("/tmp/platform/bindings/a/type")) { + if assert.True(t, utils.HasWrittenFile("/tmp/platform/bindings/a/type")) { content, err := utils.FileRead("/tmp/platform/bindings/a/type") if assert.NoError(t, err) { assert.Equal(t, string(content), "test") } - if assert.True(t, utils.HasFile("/tmp/platform/bindings/a/inline.yaml")) { + if assert.True(t, utils.HasWrittenFile("/tmp/platform/bindings/a/inline.yaml")) { content, err = utils.FileRead("/tmp/platform/bindings/a/inline.yaml") if assert.NoError(t, err) { assert.Equal(t, string(content), "my inline content") } } - assert.True(t, utils.HasCopiedFile("/tmp/somefile.yaml", "/tmp/platform/bindings/a/from-file.yaml")) + if assert.True(t, utils.HasWrittenFile("/tmp/platform/bindings/a/from-file.yaml")) { + content, err = utils.FileRead("/tmp/platform/bindings/a/from-file.yaml") + if assert.NoError(t, err) { + assert.Equal(t, string(content), "some file content") + } + } - if assert.True(t, utils.HasFile("/tmp/platform/bindings/a/from-url.yaml")) { + if assert.True(t, utils.HasWrittenFile("/tmp/platform/bindings/a/from-url.yaml")) { content, err := utils.FileRead("/tmp/platform/bindings/a/from-url.yaml") if assert.NoError(t, err) { assert.Equal(t, string(content), "from url content") } } + + if assert.True(t, utils.HasWrittenFile("/tmp/platform/bindings/a/from-vault.yaml")) { + content, err := utils.FileRead("/tmp/platform/bindings/a/from-vault.yaml") + if assert.NoError(t, err) { + assert.Equal(t, string(content), "test value from vault") + } + } } - if assert.True(t, utils.HasFile("/tmp/platform/bindings/b/type")) { + + if assert.True(t, utils.HasWrittenFile("/tmp/platform/bindings/b/type")) { content, err := utils.FileRead("/tmp/platform/bindings/b/type") if assert.NoError(t, err) { assert.Equal(t, string(content), "test2") } - if assert.True(t, utils.HasFile("/tmp/platform/bindings/b/inline2.yaml")) { + if assert.True(t, utils.HasWrittenFile("/tmp/platform/bindings/b/inline2.yaml")) { content, err = utils.FileRead("/tmp/platform/bindings/b/inline2.yaml") if assert.NoError(t, err) { assert.Equal(t, string(content), "my inline content2") @@ -224,7 +247,7 @@ func TestProcessBindings(t *testing.T) { }) if assert.Error(t, err) { - assert.Equal(t, "failed to validate binding 'my-binding': only one of 'content', 'file' or 'fromUrl' can be set", err.Error()) + assert.Equal(t, "failed to validate binding 'my-binding': only one of 'content', 'file', 'fromUrl' or 'vaultCredentialKey' can be set", err.Error()) } }) @@ -238,7 +261,7 @@ func TestProcessBindings(t *testing.T) { }) if assert.Error(t, err) { - assert.Equal(t, "failed to validate binding 'my-binding': one of 'file', 'content' or 'fromUrl' properties must be specified", err.Error()) + assert.Equal(t, "failed to validate binding 'my-binding': one of 'file', 'content', 'fromUrl' or 'vaultCredentialKey' properties must be specified", err.Error()) } }) @@ -285,4 +308,21 @@ func TestProcessBindings(t *testing.T) { assert.Contains(t, err.Error(), "validation error", err.Error()) } }) + + t.Run("fails if vault environment variable is not set", func(t *testing.T) { + var utils = mockUtils() + err := bindings.ProcessBindings(utils, &piperhttp.Client{}, "/tmp/platform", map[string]interface{}{ + "my-binding": map[string]interface{}{ + "type": "test", + "data": []map[string]interface{}{{ + "key": "from-vault.yaml", + "vaultCredentialKey": "vault-key1", + }}, + }, + }) + + if assert.Error(t, err) { + assert.Contains(t, err.Error(), "environment variable \"PIPER_VAULTCREDENTIAL_VAULT_KEY1\" is not set (required by the \"my-binding\" binding)") + } + }) } diff --git a/pkg/config/vault.go b/pkg/config/vault.go index f847adb0c1..298c87c804 100644 --- a/pkg/config/vault.go +++ b/pkg/config/vault.go @@ -32,7 +32,7 @@ const ( vaultTestCredentialEnvPrefix = "vaultTestCredentialEnvPrefix" vaultCredentialEnvPrefix = "vaultCredentialEnvPrefix" vaultTestCredentialEnvPrefixDefault = "PIPER_TESTCREDENTIAL_" - vaultCredentialEnvPrefixDefault = "PIPER_VAULTCREDENTIAL_" + VaultCredentialEnvPrefixDefault = "PIPER_VAULTCREDENTIAL_" vaultSecretName = ".+VaultSecretName$" ) @@ -257,7 +257,7 @@ func populateTestCredentialsAsEnvs(config *StepConfig, secret map[string]string, for _, key := range keys { if secretKey == key { log.RegisterSecret(secretValue) - envVariable := vaultTestCredentialEnvPrefix + convertEnvVar(secretKey) + envVariable := vaultTestCredentialEnvPrefix + ConvertEnvVar(secretKey) log.Entry().Debugf("Exposing test credential '%v' as '%v'", key, envVariable) os.Setenv(envVariable, secretValue) matched = true @@ -273,19 +273,19 @@ func populateCredentialsAsEnvs(config *StepConfig, secret map[string]string, key isCredentialEnvPrefixDefault := false if !ok { - vaultCredentialEnvPrefix = vaultCredentialEnvPrefixDefault + vaultCredentialEnvPrefix = VaultCredentialEnvPrefixDefault isCredentialEnvPrefixDefault = true } for secretKey, secretValue := range secret { for _, key := range keys { if secretKey == key { log.RegisterSecret(secretValue) - envVariable := vaultCredentialEnvPrefix + convertEnvVar(secretKey) + envVariable := vaultCredentialEnvPrefix + ConvertEnvVar(secretKey) log.Entry().Debugf("Exposing general purpose credential '%v' as '%v'", key, envVariable) os.Setenv(envVariable, secretValue) log.RegisterSecret(piperutils.EncodeString(secretValue)) - envVariable = vaultCredentialEnvPrefix + convertEnvVar(secretKey) + "_BASE64" + envVariable = vaultCredentialEnvPrefix + ConvertEnvVar(secretKey) + "_BASE64" log.Entry().Debugf("Exposing general purpose base64 encoded credential '%v' as '%v'", key, envVariable) os.Setenv(envVariable, piperutils.EncodeString(secretValue)) matched = true @@ -300,12 +300,12 @@ func populateCredentialsAsEnvs(config *StepConfig, secret map[string]string, key for _, key := range keys { if secretKey == key { log.RegisterSecret(secretValue) - envVariable := vaultCredentialEnvPrefixDefault + convertEnvVar(secretKey) + envVariable := VaultCredentialEnvPrefixDefault + ConvertEnvVar(secretKey) log.Entry().Debugf("Exposing general purpose credential '%v' as '%v'", key, envVariable) os.Setenv(envVariable, secretValue) log.RegisterSecret(piperutils.EncodeString(secretValue)) - envVariable = vaultCredentialEnvPrefixDefault + convertEnvVar(secretKey) + "_BASE64" + envVariable = VaultCredentialEnvPrefixDefault + ConvertEnvVar(secretKey) + "_BASE64" log.Entry().Debugf("Exposing general purpose base64 encoded credential '%v' as '%v'", key, envVariable) os.Setenv(envVariable, piperutils.EncodeString(secretValue)) matched = true @@ -351,8 +351,8 @@ func getCredentialKeys(config *StepConfig) []string { return keys } -// converts to a valid environment variable string -func convertEnvVar(s string) string { +// ConvertEnvVar converts to a valid environment variable string +func ConvertEnvVar(s string) string { r := strings.ToUpper(s) r = strings.ReplaceAll(r, "-", "_") reg, err := regexp.Compile("[^a-zA-Z0-9_]*") diff --git a/pkg/config/vault_test.go b/pkg/config/vault_test.go index 9ac3f6a111..f486e4e4aa 100644 --- a/pkg/config/vault_test.go +++ b/pkg/config/vault_test.go @@ -357,7 +357,7 @@ func Test_convertEnvVar(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - if got := convertEnvVar(tt.args.s); got != tt.want { + if got := ConvertEnvVar(tt.args.s); got != tt.want { t.Errorf("convertEnvironment() = %v, want %v", got, tt.want) } }) diff --git a/resources/metadata/cnbBuild.yaml b/resources/metadata/cnbBuild.yaml index c7ef29dc39..21f86af7cd 100644 --- a/resources/metadata/cnbBuild.yaml +++ b/resources/metadata/cnbBuild.yaml @@ -207,6 +207,18 @@ spec: fromUrl: https://url-to/setting.xml ``` + using [Vault general purpose credentials](https://www.project-piper.io/infrastructure/vault/#using-vault-for-general-purpose-and-test-credentials): + ```yaml + bindings: + dynatrace: + type: Dynatrace + data: + - key: api-token + vaultCredentialKey: dynatrace-api-token + vaultCredentialPath: cnb-bindings + vaultCredentialKeys: ['dynatrace-api-token'] + ``` + Deprecated: A binding with a single key, could be written like this: ```yaml From 549b32c67505bb512e902dbe263715df705a284a Mon Sep 17 00:00:00 2001 From: Daniel Mieg <56156797+DanielMieg@users.noreply.github.com> Date: Fri, 31 Mar 2023 15:26:38 +0200 Subject: [PATCH 04/23] Adapt to backend API changes (#4309) * Remove legacy logging * Implement new requests * Improve Tests * Adapt tests * Refactor * Fix tests --- cmd/abapEnvironmentCheckoutBranch_test.go | 23 +- cmd/abapEnvironmentCloneGitRepo_test.go | 29 +- cmd/abapEnvironmentPullGitRepo_test.go | 20 +- pkg/abaputils/manageGitRepositoryUtils.go | 248 ++++++++---------- .../manageGitRepositoryUtils_test.go | 25 +- 5 files changed, 160 insertions(+), 185 deletions(-) diff --git a/cmd/abapEnvironmentCheckoutBranch_test.go b/cmd/abapEnvironmentCheckoutBranch_test.go index d7420f1b8d..abbf23e204 100644 --- a/cmd/abapEnvironmentCheckoutBranch_test.go +++ b/cmd/abapEnvironmentCheckoutBranch_test.go @@ -14,15 +14,14 @@ import ( var executionLogStringCheckout string func init() { - executionLog := abaputils.PullEntity{ - ToExecutionLog: abaputils.AbapLogs{ - Results: []abaputils.LogResults{ - { - Index: "1", - Type: "LogEntry", - Description: "S", - Timestamp: "/Date(1644332299000+0000)/", - }, + executionLog := abaputils.LogProtocolResults{ + Results: []abaputils.LogProtocol{ + { + ProtocolLine: 1, + OverviewIndex: 1, + Type: "LogEntry", + Description: "S", + Timestamp: "/Date(1644332299000+0000)/", }, }, } @@ -55,9 +54,9 @@ func TestCheckoutBranchStep(t *testing.T) { logResultSuccess := `{"d": { "sc_name": "/DMO/SWC", "status": "S", "to_Log_Overview": { "results": [ { "log_index": 1, "log_name": "Main Import", "type_of_found_issues": "Success", "timestamp": "/Date(1644332299000+0000)/", "to_Log_Protocol": { "results": [ { "log_index": 1, "index_no": "1", "log_name": "", "type": "Info", "descr": "Main import", "timestamp": null, "criticality": 0 } ] } } ] } } }` client := &abaputils.ClientMock{ BodyList: []string{ + `{"d" : [] }`, `{"d" : ` + executionLogStringCheckout + `}`, logResultSuccess, - `{"d" : { "EntitySets" : [ "LogOverviews" ] } }`, `{"d" : { "status" : "S" } }`, `{"d" : { "status" : "S" } }`, `{"d" : { "status" : "S" } }`, @@ -84,9 +83,9 @@ func TestCheckoutBranchStep(t *testing.T) { logResultError := `{"d": { "sc_name": "/DMO/SWC", "status": "S", "to_Log_Overview": { "results": [ { "log_index": 1, "log_name": "Main Import", "type_of_found_issues": "Error", "timestamp": "/Date(1644332299000+0000)/", "to_Log_Protocol": { "results": [ { "log_index": 1, "index_no": "1", "log_name": "", "type": "Info", "descr": "Main import", "timestamp": null, "criticality": 0 } ] } } ] } } }` client := &abaputils.ClientMock{ BodyList: []string{ + `{"d" : [] }`, `{"d" : ` + executionLogStringCheckout + `}`, logResultError, - `{"d" : { "EntitySets" : [ "LogOverviews" ] } }`, `{"d" : { "status" : "E" } }`, `{"d" : { "status" : "E" } }`, `{"d" : { "status" : "E" } }`, @@ -123,9 +122,9 @@ func TestCheckoutBranchStep(t *testing.T) { logResultError := `{"d": { "sc_name": "/DMO/SWC", "status": "S", "to_Log_Overview": { "results": [ { "log_index": 1, "log_name": "Main Import", "type_of_found_issues": "Error", "timestamp": "/Date(1644332299000+0000)/", "to_Log_Protocol": { "results": [ { "log_index": 1, "index_no": "1", "log_name": "", "type": "Info", "descr": "Main import", "timestamp": null, "criticality": 0 } ] } } ] } } }` client := &abaputils.ClientMock{ BodyList: []string{ + `{"d" : [] }`, `{"d" : ` + executionLogStringCheckout + `}`, logResultError, - `{"d" : { "EntitySets" : [ "LogOverviews" ] } }`, `{"d" : { "status" : "E" } }`, `{"d" : { "status" : "E" } }`, `{"d" : { "status" : "E" } }`, diff --git a/cmd/abapEnvironmentCloneGitRepo_test.go b/cmd/abapEnvironmentCloneGitRepo_test.go index fe2bba19c3..9f3b2c3bca 100644 --- a/cmd/abapEnvironmentCloneGitRepo_test.go +++ b/cmd/abapEnvironmentCloneGitRepo_test.go @@ -16,15 +16,14 @@ import ( var executionLogStringClone string func init() { - executionLog := abaputils.PullEntity{ - ToExecutionLog: abaputils.AbapLogs{ - Results: []abaputils.LogResults{ - { - Index: "1", - Type: "LogEntry", - Description: "S", - Timestamp: "/Date(1644332299000+0000)/", - }, + executionLog := abaputils.LogProtocolResults{ + Results: []abaputils.LogProtocol{ + { + ProtocolLine: 1, + OverviewIndex: 1, + Type: "LogEntry", + Description: "S", + Timestamp: "/Date(1644332299000+0000)/", }, }, } @@ -78,16 +77,16 @@ repositories: logResultSuccess := `{"d": { "sc_name": "/DMO/SWC", "status": "S", "to_Log_Overview": { "results": [ { "log_index": 1, "log_name": "Main Import", "type_of_found_issues": "Success", "timestamp": "/Date(1644332299000+0000)/", "to_Log_Protocol": { "results": [ { "log_index": 1, "index_no": "1", "log_name": "", "type": "Info", "descr": "Main import", "timestamp": null, "criticality": 0 } ] } } ] } } }` client := &abaputils.ClientMock{ BodyList: []string{ + `{"d" : [] }`, `{"d" : ` + executionLogStringClone + `}`, logResultSuccess, - `{"d" : { "EntitySets" : [ "LogOverviews" ] } }`, `{"d" : { "status" : "S" } }`, `{"d" : { "status" : "R" } }`, `{"d" : { "status" : "R" } }`, `{"d" : { "status" : "R" } }`, + `{"d" : [] }`, `{"d" : ` + executionLogStringClone + `}`, logResultSuccess, - `{"d" : { "EntitySets" : [ "LogOverviews" ] } }`, `{"d" : { "status" : "S" } }`, `{"d" : { "status" : "R" } }`, `{"d" : { "status" : "R" } }`, @@ -124,9 +123,9 @@ repositories: logResultSuccess := `{"d": { "sc_name": "/DMO/SWC", "status": "S", "to_Log_Overview": { "results": [ { "log_index": 1, "log_name": "Main Import", "type_of_found_issues": "Success", "timestamp": "/Date(1644332299000+0000)/", "to_Log_Protocol": { "results": [ { "log_index": 1, "index_no": "1", "log_name": "", "type": "Info", "descr": "Main import", "timestamp": null, "criticality": 0 } ] } } ] } } }` client := &abaputils.ClientMock{ BodyList: []string{ + `{"d" : [] }`, `{"d" : ` + executionLogStringClone + `}`, logResultSuccess, - `{"d" : { "EntitySets" : [ "LogOverviews" ] } }`, `{"d" : { "status" : "S" } }`, `{"d" : { "status" : "R" } }`, `{"d" : { "status" : "R" } }`, @@ -397,15 +396,15 @@ func TestALreadyCloned(t *testing.T) { logResultSuccess := `{"d": { "sc_name": "/DMO/SWC", "status": "S", "to_Log_Overview": { "results": [ { "log_index": 1, "log_name": "Main Import", "type_of_found_issues": "Success", "timestamp": "/Date(1644332299000+0000)/", "to_Log_Protocol": { "results": [ { "log_index": 1, "index_no": "1", "log_name": "", "type": "Info", "descr": "Main import", "timestamp": null, "criticality": 0 } ] } } ] } } }` client := &abaputils.ClientMock{ BodyList: []string{ + `{"d" : }`, `{"d" : ` + executionLogStringClone + `}`, logResultSuccess, - `{"d" : { "EntitySets" : [ "LogOverviews" ] } }`, `{"d" : { "status" : "S" } }`, `{"d" : { "status" : "R" } }`, `{"d" : { "status" : "R" } }`, + `{"d" : }`, `{"d" : ` + executionLogStringClone + `}`, logResultSuccess, - `{"d" : { "EntitySets" : [ "LogOverviews" ] } }`, `{"d" : { "status" : "S" } }`, `{"d" : { "status" : "R" } }`, `{"d" : { "status" : "R" } }`, @@ -479,7 +478,7 @@ func TestALreadyCloned(t *testing.T) { err := errors.New("Custom Error") err, _ = handleCloneError(&resp, err, autils.ReturnedConnectionDetailsHTTP, client, repo) if assert.Error(t, err, "Expected error") { - assert.Equal(t, "Pull of the repository / software component 'Test', commit 'abcd1234' failed on the ABAP system", err.Error(), "Expected different error message") + assert.Equal(t, "Pull of the repository / software component 'Test', commit 'abcd1234' failed on the ABAP system: Request to ABAP System not successful", err.Error(), "Expected different error message") } }) diff --git a/cmd/abapEnvironmentPullGitRepo_test.go b/cmd/abapEnvironmentPullGitRepo_test.go index 96ac75a681..3aad4e7619 100644 --- a/cmd/abapEnvironmentPullGitRepo_test.go +++ b/cmd/abapEnvironmentPullGitRepo_test.go @@ -16,15 +16,14 @@ var executionLogStringPull string var logResultErrorPull string func init() { - executionLog := abaputils.PullEntity{ - ToExecutionLog: abaputils.AbapLogs{ - Results: []abaputils.LogResults{ - { - Index: "1", - Type: "LogEntry", - Description: "S", - Timestamp: "/Date(1644332299000+0000)/", - }, + executionLog := abaputils.LogProtocolResults{ + Results: []abaputils.LogProtocol{ + { + ProtocolLine: 1, + OverviewIndex: 1, + Type: "LogEntry", + Description: "S", + Timestamp: "/Date(1644332299000+0000)/", }, }, } @@ -57,9 +56,9 @@ func TestPullStep(t *testing.T) { logResultSuccess := `{"d": { "sc_name": "/DMO/SWC", "status": "S", "to_Log_Overview": { "results": [ { "log_index": 1, "log_name": "Main Import", "type_of_found_issues": "Success", "timestamp": "/Date(1644332299000+0000)/", "to_Log_Protocol": { "results": [ { "log_index": 1, "index_no": "1", "log_name": "", "type": "Info", "descr": "Main import", "timestamp": null, "criticality": 0 } ] } } ] } } }` client := &abaputils.ClientMock{ BodyList: []string{ + `{"d" : [] }`, `{"d" : ` + executionLogStringPull + `}`, logResultSuccess, - `{"d" : { "EntitySets" : [ "LogOverviews" ] } }`, `{"d" : { "status" : "S" } }`, `{"d" : { "status" : "R" } }`, `{"d" : { "status" : "R" } }`, @@ -71,6 +70,7 @@ func TestPullStep(t *testing.T) { err := runAbapEnvironmentPullGitRepo(&config, &autils, client) assert.NoError(t, err, "Did not expect error") + assert.Equal(t, 0, len(client.BodyList), "Not all requests were done") }) t.Run("Run Step Failure", func(t *testing.T) { diff --git a/pkg/abaputils/manageGitRepositoryUtils.go b/pkg/abaputils/manageGitRepositoryUtils.go index 57dc5af2ca..d204c88f81 100644 --- a/pkg/abaputils/manageGitRepositoryUtils.go +++ b/pkg/abaputils/manageGitRepositoryUtils.go @@ -15,6 +15,9 @@ import ( ) const failureMessageClonePull = "Could not pull the Repository / Software Component " +const numberOfEntriesPerPage = 100 +const logOutputStatusLength = 10 +const logOutputTimestampLength = 29 // PollEntity periodically polls the pull/import entity to get the status. Check if the import is still running func PollEntity(repositoryName string, connectionDetails ConnectionDetailsHTTP, client piperhttp.Sender, pollIntervall time.Duration) (string, error) { @@ -30,17 +33,8 @@ func PollEntity(repositoryName string, connectionDetails ConnectionDetailsHTTP, status = pullEntity.Status log.Entry().WithField("StatusCode", responseStatus).Info("Status: " + pullEntity.StatusDescription) if pullEntity.Status != "R" { - printTransportLogs := true - if serviceContainsNewLogEntities(connectionDetails, client) { - PrintLogs(repositoryName, connectionDetails, client) - printTransportLogs = false - } - if pullEntity.Status == "E" { - log.SetErrorCategory(log.ErrorUndefined) - PrintLegacyLogs(repositoryName, connectionDetails, client, true, printTransportLogs) - } else { - PrintLegacyLogs(repositoryName, connectionDetails, client, false, printTransportLogs) - } + + PrintLogs(repositoryName, connectionDetails, client) break } time.Sleep(pollIntervall) @@ -48,37 +42,8 @@ func PollEntity(repositoryName string, connectionDetails ConnectionDetailsHTTP, return status, nil } -func serviceContainsNewLogEntities(connectionDetails ConnectionDetailsHTTP, client piperhttp.Sender) (newLogEntitiesAvailable bool) { - - newLogEntitiesAvailable = false - details := connectionDetails - details.URL = details.Host + "/sap/opu/odata/sap/MANAGE_GIT_REPOSITORY/" - resp, err := GetHTTPResponse("GET", details, nil, client) - if err != nil { - return - } - defer resp.Body.Close() - - var entitySet EntitySetsForManageGitRepository - - // Parse response - var abapResp map[string]*json.RawMessage - bodyText, _ := ioutil.ReadAll(resp.Body) - - json.Unmarshal(bodyText, &abapResp) - json.Unmarshal(*abapResp["d"], &entitySet) - - for _, entitySet := range entitySet.EntitySets { - if entitySet == "LogOverviews" || entitySet == "LogProtocols" { - return true - } - } - return - -} - func PrintLogs(repositoryName string, connectionDetails ConnectionDetailsHTTP, client piperhttp.Sender) { - connectionDetails.URL = connectionDetails.URL + "?$expand=to_Log_Overview,to_Log_Overview/to_Log_Protocol" + connectionDetails.URL = connectionDetails.URL + "?$expand=to_Log_Overview" entity, _, err := GetStatus(failureMessageClonePull+repositoryName, connectionDetails, client) if err != nil { return @@ -94,127 +59,102 @@ func PrintLogs(repositoryName string, connectionDetails ConnectionDetailsHTTP, c return entity.ToLogOverview.Results[i].Index < entity.ToLogOverview.Results[j].Index }) - // Get Lengths - phaseLength := 22 // minimum default length - for _, logEntry := range entity.ToLogOverview.Results { - if l := len(logEntry.Name); l > phaseLength { - phaseLength = l - } - } - statusLength := 10 - timestampLength := 29 + logOutputPhaseLength, logOutputLineLength := calculateLenghts(entity) - // Dashed Line Length - lineLength := 10 + phaseLength + statusLength + timestampLength + printOverview(logOutputLineLength, logOutputPhaseLength, entity) - // Print Overview - log.Entry().Infof("\n") - dashedLine(lineLength) - log.Entry().Infof("| %-"+fmt.Sprint(phaseLength)+"s | %"+fmt.Sprint(statusLength)+"s | %-"+fmt.Sprint(timestampLength)+"s |", "Phase", "Status", "Timestamp") - dashedLine(lineLength) - for _, logEntry := range entity.ToLogOverview.Results { - log.Entry().Infof("| %-"+fmt.Sprint(phaseLength)+"s | %"+fmt.Sprint(statusLength)+"s | %-"+fmt.Sprint(timestampLength)+"s |", logEntry.Name, logEntry.Status, ConvertTime(logEntry.Timestamp)) - } - dashedLine(lineLength) + dashedLine(logOutputLineLength) // Print Details for _, logEntryForDetails := range entity.ToLogOverview.Results { - printLog(logEntryForDetails) + printLog(logEntryForDetails, connectionDetails, client) } log.Entry().Infof("-------------------------") return } +func printOverview(logOutputLineLength int, logOutputPhaseLength int, entity PullEntity) { + log.Entry().Infof("\n") + dashedLine(logOutputLineLength) + log.Entry().Infof("| %-"+fmt.Sprint(logOutputPhaseLength)+"s | %"+fmt.Sprint(logOutputStatusLength)+"s | %-"+fmt.Sprint(logOutputTimestampLength)+"s |", "Phase", "Status", "Timestamp") + dashedLine(logOutputLineLength) + for _, logEntry := range entity.ToLogOverview.Results { + log.Entry().Infof("| %-"+fmt.Sprint(logOutputPhaseLength)+"s | %"+fmt.Sprint(logOutputStatusLength)+"s | %-"+fmt.Sprint(logOutputTimestampLength)+"s |", logEntry.Name, logEntry.Status, ConvertTime(logEntry.Timestamp)) + } +} + +func calculateLenghts(entity PullEntity) (int, int) { + phaseLength := 22 + for _, logEntry := range entity.ToLogOverview.Results { + if l := len(logEntry.Name); l > phaseLength { + phaseLength = l + } + } + + lineLength := 10 + phaseLength + logOutputStatusLength + logOutputTimestampLength + return phaseLength, lineLength +} + func dashedLine(i int) { log.Entry().Infof(strings.Repeat("-", i)) } -func printLog(logEntry LogResultsV2) { +func printLog(logEntry LogResultsV2, connectionDetails ConnectionDetailsHTTP, client piperhttp.Sender) { - sort.SliceStable(logEntry.ToLogProtocol.Results, func(i, j int) bool { - return logEntry.ToLogProtocol.Results[i].ProtocolLine < logEntry.ToLogProtocol.Results[j].ProtocolLine - }) + readNextLogEntries := true + page := 0 + + printHeader(logEntry) + + for { + query := getLogProtocolQuery(page) + connectionDetails.URL = logEntry.ToLogProtocol.Deferred.URI + query + entity, err := GetProtocol(failureMessageClonePull, connectionDetails, client) + if (err != nil || reflect.DeepEqual(entity, LogProtocolResults{})) { + readNextLogEntries = false + } + sort.SliceStable(entity.Results, func(i, j int) bool { + return entity.Results[i].ProtocolLine < entity.Results[j].ProtocolLine + }) + + if logEntry.Status != `Success` { + for _, entry := range entity.Results { + log.Entry().Info(entry.Description) + } + + } else { + for _, entry := range entity.Results { + log.Entry().Debug(entry.Description) + } + } + page += 1 + if !readNextLogEntries { + break + } + } +} + +func printHeader(logEntry LogResultsV2) { if logEntry.Status != `Success` { log.Entry().Infof("\n") log.Entry().Infof("-------------------------") log.Entry().Infof("%s (%v)", logEntry.Name, ConvertTime(logEntry.Timestamp)) log.Entry().Infof("-------------------------") - - for _, entry := range logEntry.ToLogProtocol.Results { - log.Entry().Info(entry.Description) - } - } else { log.Entry().Debugf("\n") log.Entry().Debugf("-------------------------") log.Entry().Debugf("%s (%v)", logEntry.Name, ConvertTime(logEntry.Timestamp)) log.Entry().Debugf("-------------------------") - - for _, entry := range logEntry.ToLogProtocol.Results { - log.Entry().Debug(entry.Description) - } } - } -// PrintLegacyLogs sorts and formats the received transport and execution log of an import; Deprecated with SAP BTP, ABAP Environment release 2205 -func PrintLegacyLogs(repositoryName string, connectionDetails ConnectionDetailsHTTP, client piperhttp.Sender, errorOnSystem bool, includeTransportLog bool) { - - connectionDetails.URL = connectionDetails.URL + "?$expand=to_Transport_log,to_Execution_log" - entity, _, err := GetStatus(failureMessageClonePull+repositoryName, connectionDetails, client) - if err != nil { - return - } - // Sort logs - sort.SliceStable(entity.ToExecutionLog.Results, func(i, j int) bool { - return entity.ToExecutionLog.Results[i].Index < entity.ToExecutionLog.Results[j].Index - }) - - sort.SliceStable(entity.ToTransportLog.Results, func(i, j int) bool { - return entity.ToTransportLog.Results[i].Index < entity.ToTransportLog.Results[j].Index - }) - - // Show transport and execution log if either the action was erroenous on the system or the log level is set to "debug" (verbose = true) - if errorOnSystem { - if includeTransportLog { - log.Entry().Info("-------------------------") - log.Entry().Info("Transport Log") - log.Entry().Info("-------------------------") - for _, logEntry := range entity.ToTransportLog.Results { - - log.Entry().WithField("Timestamp", ConvertTime(logEntry.Timestamp)).Info(logEntry.Description) - } - } - - log.Entry().Info("-------------------------") - log.Entry().Info("Execution Log") - log.Entry().Info("-------------------------") - for _, logEntry := range entity.ToExecutionLog.Results { - log.Entry().WithField("Timestamp", ConvertTime(logEntry.Timestamp)).Info(logEntry.Description) - } - log.Entry().Info("-------------------------") - } else { - if includeTransportLog { - log.Entry().Debug("-------------------------") - log.Entry().Debug("Transport Log") - log.Entry().Debug("-------------------------") - for _, logEntry := range entity.ToTransportLog.Results { - - log.Entry().WithField("Timestamp", ConvertTime(logEntry.Timestamp)).Debug(logEntry.Description) - } - } - - log.Entry().Debug("-------------------------") - log.Entry().Debug("Execution Log") - log.Entry().Debug("-------------------------") - for _, logEntry := range entity.ToExecutionLog.Results { - log.Entry().WithField("Timestamp", ConvertTime(logEntry.Timestamp)).Debug(logEntry.Description) - } - log.Entry().Debug("-------------------------") - } +func getLogProtocolQuery(page int) string { + skip := page * numberOfEntriesPerPage + top := numberOfEntriesPerPage + return fmt.Sprintf("?$skip=%s&$top=%s", fmt.Sprint(skip), fmt.Sprint(top)) } func GetStatus(failureMessage string, connectionDetails ConnectionDetailsHTTP, client piperhttp.Sender) (body PullEntity, status string, err error) { @@ -251,6 +191,31 @@ func GetStatus(failureMessage string, connectionDetails ConnectionDetailsHTTP, c return body, resp.Status, nil } +func GetProtocol(failureMessage string, connectionDetails ConnectionDetailsHTTP, client piperhttp.Sender) (body LogProtocolResults, err error) { + resp, err := GetHTTPResponse("GET", connectionDetails, nil, client) + if err != nil { + log.SetErrorCategory(log.ErrorInfrastructure) + err = HandleHTTPError(resp, err, failureMessage, connectionDetails) + return body, err + } + defer resp.Body.Close() + + // Parse response + var abapResp map[string]*json.RawMessage + bodyText, _ := ioutil.ReadAll(resp.Body) + + marshallError := json.Unmarshal(bodyText, &abapResp) + if marshallError != nil { + return body, errors.Wrap(marshallError, "Could not parse response from the ABAP Environment system") + } + marshallError = json.Unmarshal(*abapResp["d"], &body) + if marshallError != nil { + return body, errors.Wrap(marshallError, "Could not parse response from the ABAP Environment system") + } + + return body, nil +} + // GetRepositories for parsing one or multiple branches and repositories from repositories file or branchName and repositoryName configuration func GetRepositories(config *RepositoriesConfig, branchRequired bool) ([]Repository, error) { var repositories = make([]Repository, 0) @@ -401,12 +366,20 @@ type AbapLogsV2 struct { } type LogResultsV2 struct { - Metadata AbapMetadata `json:"__metadata"` - Index int `json:"log_index"` - Name string `json:"log_name"` - Status string `json:"type_of_found_issues"` - Timestamp string `json:"timestamp"` - ToLogProtocol LogProtocolResults `json:"to_Log_Protocol"` + Metadata AbapMetadata `json:"__metadata"` + Index int `json:"log_index"` + Name string `json:"log_name"` + Status string `json:"type_of_found_issues"` + Timestamp string `json:"timestamp"` + ToLogProtocol LogProtocolDeferred `json:"to_Log_Protocol"` +} + +type LogProtocolDeferred struct { + Deferred URI `json:"__deferred"` +} + +type URI struct { + URI string `json:"uri"` } type LogProtocolResults struct { @@ -419,6 +392,7 @@ type LogProtocol struct { ProtocolLine int `json:"index_no"` Type string `json:"type"` Description string `json:"descr"` + Timestamp string `json:"timestamp"` } // LogResults struct for Execution and Transport Log entities A4C_A2G_GHA_SC_LOG_EXE and A4C_A2G_GHA_SC_LOG_TP diff --git a/pkg/abaputils/manageGitRepositoryUtils_test.go b/pkg/abaputils/manageGitRepositoryUtils_test.go index 448aa712d8..ca70d590d7 100644 --- a/pkg/abaputils/manageGitRepositoryUtils_test.go +++ b/pkg/abaputils/manageGitRepositoryUtils_test.go @@ -14,21 +14,22 @@ import ( var executionLogString string func init() { - executionLog := PullEntity{ - ToExecutionLog: AbapLogs{ - Results: []LogResults{ - { - Index: "1", - Type: "LogEntry", - Description: "S", - Timestamp: "/Date(1644332299000+0000)/", - }, + executionLog := LogProtocolResults{ + Results: []LogProtocol{ + { + ProtocolLine: 1, + OverviewIndex: 1, + Type: "LogEntry", + Description: "S", + Timestamp: "/Date(1644332299000+0000)/", }, }, } + executionLogResponse, _ := json.Marshal(executionLog) executionLogString = string(executionLogResponse) } + func TestPollEntity(t *testing.T) { t.Run("Test poll entity - success case", func(t *testing.T) { @@ -36,9 +37,9 @@ func TestPollEntity(t *testing.T) { logResultSuccess := fmt.Sprintf(`{"d": { "sc_name": "/DMO/SWC", "status": "S", "to_Log_Overview": { "results": [ { "log_index": 1, "log_name": "Main Import", "type_of_found_issues": "Success", "timestamp": "/Date(1644332299000+0000)/", "to_Log_Protocol": { "results": [ { "log_index": 1, "index_no": "1", "log_name": "", "type": "Info", "descr": "Main import", "timestamp": null, "criticality": 0 } ] } } ] } } }`) client := &ClientMock{ BodyList: []string{ + `{"d" : [] }`, `{"d" : ` + executionLogString + `}`, logResultSuccess, - `{"d" : { "EntitySets" : [ "LogOverviews" ] } }`, `{"d" : { "status" : "S" } }`, `{"d" : { "status" : "R" } }`, }, @@ -69,15 +70,16 @@ func TestPollEntity(t *testing.T) { } status, _ := PollEntity(config.RepositoryName, con, client, 0) assert.Equal(t, "S", status) + assert.Equal(t, 0, len(client.BodyList), "Not all requests were done") }) t.Run("Test poll entity - error case", func(t *testing.T) { logResultError := fmt.Sprintf(`{"d": { "sc_name": "/DMO/SWC", "status": "S", "to_Log_Overview": { "results": [ { "log_index": 1, "log_name": "Main Import", "type_of_found_issues": "Error", "timestamp": "/Date(1644332299000+0000)/", "to_Log_Protocol": { "results": [ { "log_index": 1, "index_no": "1", "log_name": "", "type": "Info", "descr": "Main import", "timestamp": null, "criticality": 0 } ] } } ] } } }`) client := &ClientMock{ BodyList: []string{ + `{"d" : [] }`, `{"d" : ` + executionLogString + `}`, logResultError, - `{"d" : { "EntitySets" : [ "LogOverviews" ] } }`, `{"d" : { "status" : "E" } }`, `{"d" : { "status" : "R" } }`, }, @@ -108,6 +110,7 @@ func TestPollEntity(t *testing.T) { } status, _ := PollEntity(config.RepositoryName, con, client, 0) assert.Equal(t, "E", status) + assert.Equal(t, 0, len(client.BodyList), "Not all requests were done") }) } From 7147209e3e7af13caf4ae20500145d67aa801042 Mon Sep 17 00:00:00 2001 From: Daniel Mieg <56156797+DanielMieg@users.noreply.github.com> Date: Tue, 4 Apr 2023 12:46:03 +0200 Subject: [PATCH 05/23] Improve logging for ABAP steps (#4316) * Fix logs * Increase number of entries --- pkg/abaputils/manageGitRepositoryUtils.go | 91 +++++++++++-------- .../manageGitRepositoryUtils_test.go | 6 +- 2 files changed, 56 insertions(+), 41 deletions(-) diff --git a/pkg/abaputils/manageGitRepositoryUtils.go b/pkg/abaputils/manageGitRepositoryUtils.go index d204c88f81..8399b2ef15 100644 --- a/pkg/abaputils/manageGitRepositoryUtils.go +++ b/pkg/abaputils/manageGitRepositoryUtils.go @@ -6,6 +6,7 @@ import ( "io/ioutil" "reflect" "sort" + "strconv" "strings" "time" @@ -15,7 +16,7 @@ import ( ) const failureMessageClonePull = "Could not pull the Repository / Software Component " -const numberOfEntriesPerPage = 100 +const numberOfEntriesPerPage = 100000 const logOutputStatusLength = 10 const logOutputTimestampLength = 29 @@ -45,11 +46,7 @@ func PollEntity(repositoryName string, connectionDetails ConnectionDetailsHTTP, func PrintLogs(repositoryName string, connectionDetails ConnectionDetailsHTTP, client piperhttp.Sender) { connectionDetails.URL = connectionDetails.URL + "?$expand=to_Log_Overview" entity, _, err := GetStatus(failureMessageClonePull+repositoryName, connectionDetails, client) - if err != nil { - return - } - - if len(entity.ToLogOverview.Results) == 0 { + if err != nil || len(entity.ToLogOverview.Results) == 0 { // return if no logs are available return } @@ -59,11 +56,7 @@ func PrintLogs(repositoryName string, connectionDetails ConnectionDetailsHTTP, c return entity.ToLogOverview.Results[i].Index < entity.ToLogOverview.Results[j].Index }) - logOutputPhaseLength, logOutputLineLength := calculateLenghts(entity) - - printOverview(logOutputLineLength, logOutputPhaseLength, entity) - - dashedLine(logOutputLineLength) + printOverview(entity) // Print Details for _, logEntryForDetails := range entity.ToLogOverview.Results { @@ -74,14 +67,22 @@ func PrintLogs(repositoryName string, connectionDetails ConnectionDetailsHTTP, c return } -func printOverview(logOutputLineLength int, logOutputPhaseLength int, entity PullEntity) { +func printOverview(entity PullEntity) { + + logOutputPhaseLength, logOutputLineLength := calculateLenghts(entity) + log.Entry().Infof("\n") - dashedLine(logOutputLineLength) + + printDashedLine(logOutputLineLength) + log.Entry().Infof("| %-"+fmt.Sprint(logOutputPhaseLength)+"s | %"+fmt.Sprint(logOutputStatusLength)+"s | %-"+fmt.Sprint(logOutputTimestampLength)+"s |", "Phase", "Status", "Timestamp") - dashedLine(logOutputLineLength) + + printDashedLine(logOutputLineLength) + for _, logEntry := range entity.ToLogOverview.Results { log.Entry().Infof("| %-"+fmt.Sprint(logOutputPhaseLength)+"s | %"+fmt.Sprint(logOutputStatusLength)+"s | %-"+fmt.Sprint(logOutputTimestampLength)+"s |", logEntry.Name, logEntry.Status, ConvertTime(logEntry.Timestamp)) } + printDashedLine(logOutputLineLength) } func calculateLenghts(entity PullEntity) (int, int) { @@ -96,46 +97,57 @@ func calculateLenghts(entity PullEntity) (int, int) { return phaseLength, lineLength } -func dashedLine(i int) { +func printDashedLine(i int) { log.Entry().Infof(strings.Repeat("-", i)) } -func printLog(logEntry LogResultsV2, connectionDetails ConnectionDetailsHTTP, client piperhttp.Sender) { +func printLog(logOverviewEntry LogResultsV2, connectionDetails ConnectionDetailsHTTP, client piperhttp.Sender) { - readNextLogEntries := true page := 0 - printHeader(logEntry) + printHeader(logOverviewEntry) for { - query := getLogProtocolQuery(page) - connectionDetails.URL = logEntry.ToLogProtocol.Deferred.URI + query + connectionDetails.URL = logOverviewEntry.ToLogProtocol.Deferred.URI + getLogProtocolQuery(page) entity, err := GetProtocol(failureMessageClonePull, connectionDetails, client) - if (err != nil || reflect.DeepEqual(entity, LogProtocolResults{})) { - readNextLogEntries = false - } - sort.SliceStable(entity.Results, func(i, j int) bool { - return entity.Results[i].ProtocolLine < entity.Results[j].ProtocolLine - }) - - if logEntry.Status != `Success` { - for _, entry := range entity.Results { - log.Entry().Info(entry.Description) - } - - } else { - for _, entry := range entity.Results { - log.Entry().Debug(entry.Description) - } - } + + printLogProtocolEntries(logOverviewEntry, entity) + page += 1 - if !readNextLogEntries { + if allLogsHaveBeenPrinted(entity, page, err) { break } } } +func printLogProtocolEntries(logEntry LogResultsV2, entity LogProtocolResults) { + + sort.SliceStable(entity.Results, func(i, j int) bool { + return entity.Results[i].ProtocolLine < entity.Results[j].ProtocolLine + }) + + if logEntry.Status != `Success` { + for _, entry := range entity.Results { + log.Entry().Info(entry.Description) + } + + } else { + for _, entry := range entity.Results { + log.Entry().Debug(entry.Description) + } + } +} + +func allLogsHaveBeenPrinted(entity LogProtocolResults, page int, err error) bool { + allPagesHaveBeenRead := false + numberOfProtocols, errConversion := strconv.Atoi(entity.Count) + if errConversion == nil { + allPagesHaveBeenRead = numberOfProtocols <= page*numberOfEntriesPerPage + } + return (err != nil || allPagesHaveBeenRead || reflect.DeepEqual(entity.Results, LogProtocolResults{})) +} + func printHeader(logEntry LogResultsV2) { if logEntry.Status != `Success` { log.Entry().Infof("\n") @@ -154,7 +166,7 @@ func getLogProtocolQuery(page int) string { skip := page * numberOfEntriesPerPage top := numberOfEntriesPerPage - return fmt.Sprintf("?$skip=%s&$top=%s", fmt.Sprint(skip), fmt.Sprint(top)) + return fmt.Sprintf("?$skip=%s&$top=%s&$inlinecount=allpages", fmt.Sprint(skip), fmt.Sprint(top)) } func GetStatus(failureMessage string, connectionDetails ConnectionDetailsHTTP, client piperhttp.Sender) (body PullEntity, status string, err error) { @@ -384,6 +396,7 @@ type URI struct { type LogProtocolResults struct { Results []LogProtocol `json:"results"` + Count string `json:"__count"` } type LogProtocol struct { diff --git a/pkg/abaputils/manageGitRepositoryUtils_test.go b/pkg/abaputils/manageGitRepositoryUtils_test.go index ca70d590d7..abbeb92caa 100644 --- a/pkg/abaputils/manageGitRepositoryUtils_test.go +++ b/pkg/abaputils/manageGitRepositoryUtils_test.go @@ -4,6 +4,7 @@ import ( "encoding/json" "fmt" "io/ioutil" + "math" "os" "testing" @@ -15,6 +16,7 @@ var executionLogString string func init() { executionLog := LogProtocolResults{ + Count: fmt.Sprint(math.Round(numberOfEntriesPerPage * 1.5)), Results: []LogProtocol{ { ProtocolLine: 1, @@ -37,7 +39,7 @@ func TestPollEntity(t *testing.T) { logResultSuccess := fmt.Sprintf(`{"d": { "sc_name": "/DMO/SWC", "status": "S", "to_Log_Overview": { "results": [ { "log_index": 1, "log_name": "Main Import", "type_of_found_issues": "Success", "timestamp": "/Date(1644332299000+0000)/", "to_Log_Protocol": { "results": [ { "log_index": 1, "index_no": "1", "log_name": "", "type": "Info", "descr": "Main import", "timestamp": null, "criticality": 0 } ] } } ] } } }`) client := &ClientMock{ BodyList: []string{ - `{"d" : [] }`, + `{"d" : ` + executionLogString + `}`, `{"d" : ` + executionLogString + `}`, logResultSuccess, `{"d" : { "status" : "S" } }`, @@ -77,7 +79,7 @@ func TestPollEntity(t *testing.T) { logResultError := fmt.Sprintf(`{"d": { "sc_name": "/DMO/SWC", "status": "S", "to_Log_Overview": { "results": [ { "log_index": 1, "log_name": "Main Import", "type_of_found_issues": "Error", "timestamp": "/Date(1644332299000+0000)/", "to_Log_Protocol": { "results": [ { "log_index": 1, "index_no": "1", "log_name": "", "type": "Info", "descr": "Main import", "timestamp": null, "criticality": 0 } ] } } ] } } }`) client := &ClientMock{ BodyList: []string{ - `{"d" : [] }`, + `{"d" : ` + executionLogString + `}`, `{"d" : ` + executionLogString + `}`, logResultError, `{"d" : { "status" : "E" } }`, From 489adaaf9926797009afc2619b22cd28ee0b0bdf Mon Sep 17 00:00:00 2001 From: larsbrueckner Date: Tue, 4 Apr 2023 14:17:13 +0200 Subject: [PATCH 06/23] Blackduck toolrecord file: add Blackduck projectVersion name and href (#4303) * Blackduck toolrecord file: add Blackduck projectVersion name and href * fix codestyle * fix build error --- cmd/detectExecuteScan.go | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/cmd/detectExecuteScan.go b/cmd/detectExecuteScan.go index 6447e5512d..7cd1c21e0a 100644 --- a/cmd/detectExecuteScan.go +++ b/cmd/detectExecuteScan.go @@ -833,6 +833,19 @@ func createToolRecordDetect(utils detectUtils, workspace string, config detectEx if err != nil { return "", err } + projectVersionName := getVersionName(config) + projectVersion, err := sys.Client.GetProjectVersion(config.ProjectName, projectVersionName) + if err != nil { + return "", err + } + projectVersionUrl := projectVersion.Href + err = record.AddKeyData("version", + projectVersion.Name, + projectVersionUrl, + projectVersionUrl) + if err != nil { + return "", err + } _ = record.AddContext("DetectTools", config.DetectTools) err = record.Persist() if err != nil { From 6b18448124aee13a3ef3b0e902ec97e11f372f9a Mon Sep 17 00:00:00 2001 From: Marco Rosa Date: Tue, 4 Apr 2023 16:57:15 +0200 Subject: [PATCH 07/23] Add credentialdiggerScan step (#4141) * Add credentialdiggerScan metadata * Integrate new step into piper process * Add credentialdiggerScan implementation and tests * Remove duplicated code * Add doc file for credentialdiggerScan step * Regenerate metadata * Fix return type in tests * Add credentialdiggerScan to CommonStepsTest * Fix typo * Improve code style * Add support for custom rules file in stash * Regenerate metadata for credentialdiggerScan --------- Co-authored-by: Sven Merk <33895725+nevskrem@users.noreply.github.com> Co-authored-by: Anil Keshav --- cmd/credentialdiggerScan.go | 249 ++++++++++++++++ cmd/credentialdiggerScan_generated.go | 277 ++++++++++++++++++ cmd/credentialdiggerScan_generated_test.go | 17 ++ cmd/credentialdiggerScan_test.go | 124 ++++++++ cmd/metadata_generated.go | 1 + cmd/piper.go | 1 + .../docs/steps/credentialdiggerScan.md | 7 + documentation/mkdocs.yml | 1 + resources/metadata/credentialdiggerScan.yaml | 124 ++++++++ test/groovy/CommonStepsTest.groovy | 1 + vars/credentialdiggerScan.groovy | 11 + 11 files changed, 813 insertions(+) create mode 100644 cmd/credentialdiggerScan.go create mode 100644 cmd/credentialdiggerScan_generated.go create mode 100644 cmd/credentialdiggerScan_generated_test.go create mode 100644 cmd/credentialdiggerScan_test.go create mode 100644 documentation/docs/steps/credentialdiggerScan.md create mode 100644 resources/metadata/credentialdiggerScan.yaml create mode 100644 vars/credentialdiggerScan.groovy diff --git a/cmd/credentialdiggerScan.go b/cmd/credentialdiggerScan.go new file mode 100644 index 0000000000..31e84b39b1 --- /dev/null +++ b/cmd/credentialdiggerScan.go @@ -0,0 +1,249 @@ +package cmd + +import ( + "fmt" + "os" + "path/filepath" + "strconv" + + "github.com/SAP/jenkins-library/pkg/command" + piperhttp "github.com/SAP/jenkins-library/pkg/http" + "github.com/SAP/jenkins-library/pkg/log" + "github.com/SAP/jenkins-library/pkg/orchestrator" + "github.com/SAP/jenkins-library/pkg/piperutils" + "github.com/SAP/jenkins-library/pkg/telemetry" + "github.com/pkg/errors" +) + +const piperDbName string = "piper_step_db.db" +const piperReportName string = "findings.csv" + +type credentialdiggerUtils interface { + command.ExecRunner + piperutils.FileUtils +} + +type credentialdiggerUtilsBundle struct { + *command.Command + *piperutils.Files +} + +func newCDUtils() credentialdiggerUtils { + utils := credentialdiggerUtilsBundle{ + Command: &command.Command{}, + Files: &piperutils.Files{}, + } + // Reroute command output to logging framework + utils.Stdout(log.Writer()) + utils.Stderr(log.Writer()) + return &utils +} + +func credentialdiggerScan(config credentialdiggerScanOptions, telemetryData *telemetry.CustomData) error { + utils := newCDUtils() + // 0: Get attributes from orchestrator + provider, prov_err := orchestrator.NewOrchestratorSpecificConfigProvider() + if prov_err != nil { + log.Entry().WithError(prov_err).Error( + "credentialdiggerScan: unable to load orchestrator specific configuration.") + } + if config.Repository == "" { + // Get current repository from orchestrator + repoUrlOrchestrator := provider.GetRepoURL() + if repoUrlOrchestrator == "n/a" { + // Jenkins configuration error + log.Entry().WithError(errors.New( + fmt.Sprintf("Unknown repository URL %s", repoUrlOrchestrator))).Error( + "Repository URL n/a. Please verify git plugin is installed.") + } + config.Repository = repoUrlOrchestrator + log.Entry().Debug("Use current repository: ", repoUrlOrchestrator) + } + if provider.IsPullRequest() { + // set the pr number + config.PrNumber, _ = strconv.Atoi(provider.GetPullRequestConfig().Key) + log.Entry().Debug("Scan the current pull request: number ", config.PrNumber) + } + + // 1: Add rules + log.Entry().Info("Load rules") + err := credentialdiggerAddRules(&config, telemetryData, utils) + if err != nil { + log.Entry().Error("credentialdiggerScan: Failed running credentialdigger add_rules") + return err + } + log.Entry().Info("Rules added") + + // 2: Scan the repository + // Choose between scan-pr, scan-snapshot, and full-scan (with this priority + // order) + switch { + case config.PrNumber != 0: // int type is not nillable in golang + log.Entry().Debug("Scan PR") + // if a PrNumber is declared, run scan_pr + err = credentialdiggerScanPR(&config, telemetryData, utils) // scan PR with CD + case config.Snapshot != "": + log.Entry().Debug("Scan snapshot") + // if a Snapshot is declared, run scan_snapshot + err = credentialdiggerScanSnapshot(&config, telemetryData, utils) // scan Snapshot with CD + default: + // The default case is the normal full scan + log.Entry().Debug("Full scan repo") + err = credentialdiggerFullScan(&config, telemetryData, utils) // full scan with CD + } + // err is an error exit number when there are findings + if err == nil { + log.Entry().Info("No discoveries found in this repo") + // If there are no findings, there is no need to export an empty report + return nil + } + + // 3: Get discoveries + err = credentialdiggerGetDiscoveries(&config, telemetryData, utils) + if err != nil { + // The exit number is the number of discoveries + // Therefore, this error is not relevant, if raised + log.Entry().Warn("There are findings to review") + } + + // 4: Export report in workspace + reports := []piperutils.Path{} + reports = append(reports, piperutils.Path{Target: fmt.Sprintf("%v", piperReportName)}) + piperutils.PersistReportsAndLinks("credentialdiggerScan", "./", utils, reports, nil) + + return nil +} + +func executeCredentialDiggerProcess(utils credentialdiggerUtils, args []string) error { + return utils.RunExecutable("credentialdigger", args...) +} + +// hasConfigurationFile checks if the given file exists +func hasRulesFile(file string, utils credentialdiggerUtils) bool { + exists, err := utils.FileExists(file) + if err != nil { + log.Entry().WithError(err).Error() + } + return exists +} + +func credentialdiggerAddRules(config *credentialdiggerScanOptions, telemetryData *telemetry.CustomData, service credentialdiggerUtils) error { + // Credentialdigger home can be changed with local forks (e.g., for local piper runs) + cdHome := "/credential-digger-ui" // cdHome path as in docker container + if cdh := os.Getenv("CREDENTIALDIGGER_HOME"); cdh != "" { + cdHome = cdh + } + log.Entry().Debug("Use credentialdigger home ", cdHome) + // Set the rule file to the standard ruleset shipped within credential + // digger container + ruleFile := filepath.Join(cdHome, "backend", "rules.yml") + + if config.RulesDownloadURL != "" { + // Download custom rule file from this URL + log.Entry().Debugf("Download custom ruleset from %v", config.RulesDownloadURL) + dlClient := piperhttp.Client{} + ruleFile := filepath.Join(cdHome, "backend", "custom-rules.yml") + dlClient.DownloadFile(config.RulesDownloadURL, ruleFile, nil, nil) + log.Entry().Info("Download and use remote rules") + } else { + log.Entry().Debug("Use a local ruleset") + // Use rules defined in stashed file + if hasRulesFile(config.RulesFile, service) { + log.Entry().WithField("file", config.RulesFile).Info("Use stashed rules file from repository") + ruleFile = config.RulesFile + } else { + log.Entry().Info("Use standard pre-defined rules") + } + + } + cmd_list := []string{"add_rules", "--sqlite", piperDbName, ruleFile} + return executeCredentialDiggerProcess(service, cmd_list) +} + +func credentialdiggerGetDiscoveries(config *credentialdiggerScanOptions, telemetryData *telemetry.CustomData, service credentialdiggerUtils) error { + log.Entry().Info("Get discoveries") + cmd_list := []string{"get_discoveries", config.Repository, "--sqlite", piperDbName, + "--save", piperReportName} + // Export all the discoveries or export only new ones + if !config.ExportAll { + cmd_list = append(cmd_list, "--state", "new") + } + err := executeCredentialDiggerProcess(service, cmd_list) + if err != nil { + log.Entry().Error("credentialdiggerScan: Failed running credentialdigger get_discoveries") + log.Entry().Error(err) + return err + } + log.Entry().Info("Scan complete") + return nil +} + +func credentialdiggerBuildCommonArgs(config *credentialdiggerScanOptions) []string { + /*Some arguments are the same for all the scan flavors. Build them here + * not to duplicate code.*/ + scan_args := []string{} + // Repository url and sqlite db (always mandatory) + scan_args = append(scan_args, config.Repository, "--sqlite", piperDbName) + //git token is not mandatory for base credential digger tool, but in + //piper it is + scan_args = append(scan_args, "--git_token", config.Token) + //debug + if config.Debug { + log.Entry().Debug("Run the scan in debug mode") + scan_args = append(scan_args, "--debug") + } + //models + if len(config.Models) > 0 { + log.Entry().Debugf("Enable models %v", config.Models) + scan_args = append(scan_args, "--models") + scan_args = append(scan_args, config.Models...) + } + + return scan_args +} + +func credentialdiggerScanSnapshot(config *credentialdiggerScanOptions, telemetryData *telemetry.CustomData, service credentialdiggerUtils) error { + log.Entry().Infof("Scan Snapshot %v from repo %v", config.Snapshot, config.Repository) + cmd_list := []string{"scan_snapshot", + "--snapshot", config.Snapshot} + cmd_list = append(cmd_list, credentialdiggerBuildCommonArgs(config)...) + leaks := executeCredentialDiggerProcess(service, cmd_list) + if leaks != nil { + log.Entry().Warn("The scan found potential leaks in this Snapshot") + return leaks + } else { + log.Entry().Info("No leaks found") + return nil + } +} + +func credentialdiggerScanPR(config *credentialdiggerScanOptions, telemetryData *telemetry.CustomData, service credentialdiggerUtils) error { + log.Entry().Infof("Scan PR %v from repo %v", config.PrNumber, config.Repository) + cmd_list := []string{"scan_pr", + "--pr", strconv.Itoa(config.PrNumber), + "--api_endpoint", config.APIURL} + cmd_list = append(cmd_list, credentialdiggerBuildCommonArgs(config)...) + leaks := executeCredentialDiggerProcess(service, cmd_list) + if leaks != nil { + log.Entry().Warn("The scan found potential leaks in this PR") + return leaks + } else { + log.Entry().Info("No leaks found") + return nil + } +} + +func credentialdiggerFullScan(config *credentialdiggerScanOptions, telemetryData *telemetry.CustomData, service credentialdiggerUtils) error { + log.Entry().Infof("Full scan of repository %v", config.Repository) + cmd_list := []string{"scan"} + cmd_list = append(cmd_list, credentialdiggerBuildCommonArgs(config)...) + leaks := executeCredentialDiggerProcess(service, cmd_list) + if leaks != nil { + log.Entry().Warn("The scan found potential leaks") + log.Entry().Warnf("%v potential leaks found", leaks) + return leaks + } else { + log.Entry().Info("No leaks found") + return nil + } +} diff --git a/cmd/credentialdiggerScan_generated.go b/cmd/credentialdiggerScan_generated.go new file mode 100644 index 0000000000..02e1167449 --- /dev/null +++ b/cmd/credentialdiggerScan_generated.go @@ -0,0 +1,277 @@ +// Code generated by piper's step-generator. DO NOT EDIT. + +package cmd + +import ( + "fmt" + "os" + "time" + + "github.com/SAP/jenkins-library/pkg/config" + "github.com/SAP/jenkins-library/pkg/log" + "github.com/SAP/jenkins-library/pkg/splunk" + "github.com/SAP/jenkins-library/pkg/telemetry" + "github.com/SAP/jenkins-library/pkg/validation" + "github.com/spf13/cobra" +) + +type credentialdiggerScanOptions struct { + Repository string `json:"repository,omitempty"` + Snapshot string `json:"snapshot,omitempty"` + PrNumber int `json:"prNumber,omitempty"` + ExportAll bool `json:"exportAll,omitempty"` + APIURL string `json:"apiUrl,omitempty"` + Debug bool `json:"debug,omitempty"` + RulesDownloadURL string `json:"rulesDownloadUrl,omitempty"` + Models []string `json:"models,omitempty"` + Token string `json:"token,omitempty"` + RulesFile string `json:"rulesFile,omitempty"` +} + +// CredentialdiggerScanCommand Scan a repository on GitHub with Credential Digger +func CredentialdiggerScanCommand() *cobra.Command { + const STEP_NAME = "credentialdiggerScan" + + metadata := credentialdiggerScanMetadata() + var stepConfig credentialdiggerScanOptions + var startTime time.Time + var logCollector *log.CollectorHook + var splunkClient *splunk.Splunk + telemetryClient := &telemetry.Telemetry{} + + var createCredentialdiggerScanCmd = &cobra.Command{ + Use: STEP_NAME, + Short: "Scan a repository on GitHub with Credential Digger", + Long: `This step allows you to scan a repository on Github using Credential Digger. + +It can for example be used for DevSecOps scenarios to verify the source code does not contain hard-coded credentials before being merged or released for production. +It supports several scan flavors, i.e., full scans of a repo, scan of a snapshot, or scan of a pull request.`, + PreRunE: func(cmd *cobra.Command, _ []string) error { + startTime = time.Now() + log.SetStepName(STEP_NAME) + log.SetVerbose(GeneralConfig.Verbose) + + GeneralConfig.GitHubAccessTokens = ResolveAccessTokens(GeneralConfig.GitHubTokens) + + path, _ := os.Getwd() + fatalHook := &log.FatalHook{CorrelationID: GeneralConfig.CorrelationID, Path: path} + log.RegisterHook(fatalHook) + + err := PrepareConfig(cmd, &metadata, STEP_NAME, &stepConfig, config.OpenPiperFile) + if err != nil { + log.SetErrorCategory(log.ErrorConfiguration) + return err + } + log.RegisterSecret(stepConfig.Token) + + if len(GeneralConfig.HookConfig.SentryConfig.Dsn) > 0 { + sentryHook := log.NewSentryHook(GeneralConfig.HookConfig.SentryConfig.Dsn, GeneralConfig.CorrelationID) + log.RegisterHook(&sentryHook) + } + + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + splunkClient = &splunk.Splunk{} + logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} + log.RegisterHook(logCollector) + } + + if err = log.RegisterANSHookIfConfigured(GeneralConfig.CorrelationID); err != nil { + log.Entry().WithError(err).Warn("failed to set up SAP Alert Notification Service log hook") + } + + validation, err := validation.New(validation.WithJSONNamesForStructFields(), validation.WithPredefinedErrorMessages()) + if err != nil { + return err + } + if err = validation.ValidateStruct(stepConfig); err != nil { + log.SetErrorCategory(log.ErrorConfiguration) + return err + } + + return nil + }, + Run: func(_ *cobra.Command, _ []string) { + stepTelemetryData := telemetry.CustomData{} + stepTelemetryData.ErrorCode = "1" + handler := func() { + config.RemoveVaultSecretFiles() + stepTelemetryData.Duration = fmt.Sprintf("%v", time.Since(startTime).Milliseconds()) + stepTelemetryData.ErrorCategory = log.GetErrorCategory().String() + stepTelemetryData.PiperCommitHash = GitCommit + telemetryClient.SetData(&stepTelemetryData) + telemetryClient.Send() + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + splunkClient.Send(telemetryClient.GetData(), logCollector) + } + } + log.DeferExitHandler(handler) + defer handler() + telemetryClient.Initialize(GeneralConfig.NoTelemetry, STEP_NAME) + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + splunkClient.Initialize(GeneralConfig.CorrelationID, + GeneralConfig.HookConfig.SplunkConfig.Dsn, + GeneralConfig.HookConfig.SplunkConfig.Token, + GeneralConfig.HookConfig.SplunkConfig.Index, + GeneralConfig.HookConfig.SplunkConfig.SendLogs) + } + credentialdiggerScan(stepConfig, &stepTelemetryData) + stepTelemetryData.ErrorCode = "0" + log.Entry().Info("SUCCESS") + }, + } + + addCredentialdiggerScanFlags(createCredentialdiggerScanCmd, &stepConfig) + return createCredentialdiggerScanCmd +} + +func addCredentialdiggerScanFlags(cmd *cobra.Command, stepConfig *credentialdiggerScanOptions) { + cmd.Flags().StringVar(&stepConfig.Repository, "repository", os.Getenv("PIPER_repository"), "URL of the GitHub repository (was name, but we need the url). In case it's missing, use the URL of the current repository.") + cmd.Flags().StringVar(&stepConfig.Snapshot, "snapshot", os.Getenv("PIPER_snapshot"), "If set, scan the snapshot of the repository at this commit_id/branch.") + cmd.Flags().IntVar(&stepConfig.PrNumber, "prNumber", 0, "If set, scan the pull request open with this number.") + cmd.Flags().BoolVar(&stepConfig.ExportAll, "exportAll", false, "Export all the findings, i.e., including non-leaks.") + cmd.Flags().StringVar(&stepConfig.APIURL, "apiUrl", `https://api.github.com`, "Set the GitHub API url. Needed for scanning a pull request.") + cmd.Flags().BoolVar(&stepConfig.Debug, "debug", false, "Execute the scans in debug mode (i.e., print logs).") + cmd.Flags().StringVar(&stepConfig.RulesDownloadURL, "rulesDownloadUrl", os.Getenv("PIPER_rulesDownloadUrl"), "URL where to download custom rules. The file published at this URL must be formatted as the default ruleset https://raw.githubusercontent.com/SAP/credential-digger/main/ui/backend/rules.yml") + cmd.Flags().StringSliceVar(&stepConfig.Models, "models", []string{}, "Machine learning models to automatically verify the findings.") + cmd.Flags().StringVar(&stepConfig.Token, "token", os.Getenv("PIPER_token"), "GitHub personal access token as per https://help.github.com/en/github/authenticating-to-github/creating-a-personal-access-token-for-the-command-line") + cmd.Flags().StringVar(&stepConfig.RulesFile, "rulesFile", `inputs/rules.yml`, "Name of the rules file used locally within the step. If a remote files for rules is declared as `rulesDownloadUrl`, the stashed file is ignored. If you change the file's name make sure your stashing configuration also reflects this.") + + cmd.MarkFlagRequired("apiUrl") + cmd.MarkFlagRequired("token") +} + +// retrieve step metadata +func credentialdiggerScanMetadata() config.StepData { + var theMetaData = config.StepData{ + Metadata: config.StepMetadata{ + Name: "credentialdiggerScan", + Aliases: []config.Alias{}, + Description: "Scan a repository on GitHub with Credential Digger", + }, + Spec: config.StepSpec{ + Inputs: config.StepInputs{ + Secrets: []config.StepSecrets{ + {Name: "githubTokenCredentialsId", Description: "Jenkins 'Secret text' credentials ID containing token to authenticate to GitHub.", Type: "jenkins"}, + }, + Parameters: []config.StepParameters{ + { + Name: "repository", + ResourceRef: []config.ResourceReference{}, + Scope: []string{"PARAMETERS", "STAGES", "STEPS"}, + Type: "string", + Mandatory: false, + Aliases: []config.Alias{{Name: "githubRepo"}}, + Default: os.Getenv("PIPER_repository"), + }, + { + Name: "snapshot", + ResourceRef: []config.ResourceReference{}, + Scope: []string{"PARAMETERS", "STAGES", "STEPS"}, + Type: "string", + Mandatory: false, + Aliases: []config.Alias{}, + Default: os.Getenv("PIPER_snapshot"), + }, + { + Name: "prNumber", + ResourceRef: []config.ResourceReference{}, + Scope: []string{"PARAMETERS", "STAGES", "STEPS"}, + Type: "int", + Mandatory: false, + Aliases: []config.Alias{}, + Default: 0, + }, + { + Name: "exportAll", + ResourceRef: []config.ResourceReference{}, + Scope: []string{"PARAMETERS", "STAGES", "STEPS"}, + Type: "bool", + Mandatory: false, + Aliases: []config.Alias{}, + Default: false, + }, + { + Name: "apiUrl", + ResourceRef: []config.ResourceReference{}, + Scope: []string{"GENERAL", "PARAMETERS", "STAGES", "STEPS"}, + Type: "string", + Mandatory: true, + Aliases: []config.Alias{{Name: "githubApiUrl"}}, + Default: `https://api.github.com`, + }, + { + Name: "debug", + ResourceRef: []config.ResourceReference{}, + Scope: []string{"GENERAL", "PARAMETERS", "STAGES", "STEPS"}, + Type: "bool", + Mandatory: false, + Aliases: []config.Alias{{Name: "verbose"}}, + Default: false, + }, + { + Name: "rulesDownloadUrl", + ResourceRef: []config.ResourceReference{}, + Scope: []string{"PARAMETERS", "STAGES", "STEPS"}, + Type: "string", + Mandatory: false, + Aliases: []config.Alias{}, + Default: os.Getenv("PIPER_rulesDownloadUrl"), + }, + { + Name: "models", + ResourceRef: []config.ResourceReference{}, + Scope: []string{"PARAMETERS", "STAGES", "STEPS"}, + Type: "[]string", + Mandatory: false, + Aliases: []config.Alias{}, + Default: []string{}, + }, + { + Name: "token", + ResourceRef: []config.ResourceReference{ + { + Name: "githubTokenCredentialsId", + Type: "secret", + }, + + { + Name: "githubVaultSecretName", + Type: "vaultSecret", + Default: "github", + }, + }, + Scope: []string{"GENERAL", "PARAMETERS", "STAGES", "STEPS"}, + Type: "string", + Mandatory: true, + Aliases: []config.Alias{{Name: "githubToken"}, {Name: "access_token"}}, + Default: os.Getenv("PIPER_token"), + }, + { + Name: "rulesFile", + ResourceRef: []config.ResourceReference{}, + Scope: []string{"PARAMETERS", "STAGES", "STEPS"}, + Type: "string", + Mandatory: false, + Aliases: []config.Alias{}, + Default: `inputs/rules.yml`, + }, + }, + }, + Containers: []config.Container{ + {Image: "credentialdigger.int.repositories.cloud.sap/credential_digger:4.9.2"}, + }, + Outputs: config.StepOutputs{ + Resources: []config.StepResources{ + { + Name: "report", + Type: "report", + Parameters: []map[string]interface{}{ + {"filePattern": "**/report*.csv", "type": "credentialdigger-report"}, + }, + }, + }, + }, + }, + } + return theMetaData +} diff --git a/cmd/credentialdiggerScan_generated_test.go b/cmd/credentialdiggerScan_generated_test.go new file mode 100644 index 0000000000..646c3a9acd --- /dev/null +++ b/cmd/credentialdiggerScan_generated_test.go @@ -0,0 +1,17 @@ +package cmd + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestCredentialdiggerScanCommand(t *testing.T) { + t.Parallel() + + testCmd := CredentialdiggerScanCommand() + + // only high level testing performed - details are tested in step generation procedure + assert.Equal(t, "credentialdiggerScan", testCmd.Use, "command name incorrect") + +} diff --git a/cmd/credentialdiggerScan_test.go b/cmd/credentialdiggerScan_test.go new file mode 100644 index 0000000000..ae5fa85d78 --- /dev/null +++ b/cmd/credentialdiggerScan_test.go @@ -0,0 +1,124 @@ +package cmd + +import ( + "errors" + "testing" + + "github.com/SAP/jenkins-library/pkg/mock" + "github.com/stretchr/testify/assert" +) + +type credentialdiggerScanMockUtils struct { + *mock.ExecMockRunner + *mock.FilesMock + noerr bool +} + +func newCDTestsUtils() credentialdiggerScanMockUtils { + utils := credentialdiggerScanMockUtils{ + ExecMockRunner: &mock.ExecMockRunner{}, + FilesMock: &mock.FilesMock{}, + noerr: true, // flag for return value of MockRunner + } + return utils +} +func (c credentialdiggerScanMockUtils) RunExecutable(executable string, params ...string) error { + if c.noerr { + return nil + } else { + return errors.New("Some custom error") + } +} + +func TestCredentialdiggerFullScan(t *testing.T) { + t.Run("Valid full scan without discoveries", func(t *testing.T) { + config := credentialdiggerScanOptions{Repository: "testRepo", Token: "validToken"} + utils := newCDTestsUtils() + assert.Equal(t, nil, credentialdiggerFullScan(&config, nil, utils)) + + }) + t.Run("Full scan with discoveries or wrong arguments", func(t *testing.T) { + config := credentialdiggerScanOptions{Repository: "testRepo", Token: "validToken"} + utils := newCDTestsUtils() + utils.noerr = false + assert.EqualError(t, credentialdiggerFullScan(&config, nil, utils), "Some custom error") + }) +} + +func TestCredentialdiggerScanSnapshot(t *testing.T) { + t.Run("Valid scan snapshot without discoveries", func(t *testing.T) { + config := credentialdiggerScanOptions{Repository: "testRepo", Token: "validToken", Snapshot: "main"} + utils := newCDTestsUtils() + assert.Equal(t, nil, credentialdiggerScanSnapshot(&config, nil, utils)) + }) + t.Run("Scan snapshot with discoveries or wrong arguments", func(t *testing.T) { + config := credentialdiggerScanOptions{Repository: "testRepo", Token: "validToken", Snapshot: "main"} + utils := newCDTestsUtils() + utils.noerr = false + assert.EqualError(t, credentialdiggerScanSnapshot(&config, nil, utils), "Some custom error") + }) +} + +func TestCredentialdiggerScanPR(t *testing.T) { + t.Run("Valid scan pull request without discoveries", func(t *testing.T) { + config := credentialdiggerScanOptions{Repository: "testRepo", Token: "validToken", PrNumber: 1} + utils := newCDTestsUtils() + assert.Equal(t, nil, credentialdiggerScanPR(&config, nil, utils)) + }) + t.Run("Scan pull request with discoveries or wrong arguments", func(t *testing.T) { + config := credentialdiggerScanOptions{Repository: "testRepo", Token: "validToken", PrNumber: 1} + utils := newCDTestsUtils() + utils.noerr = false + assert.EqualError(t, credentialdiggerScanPR(&config, nil, utils), "Some custom error") + }) +} + +func TestCredentialdiggerAddRules(t *testing.T) { + t.Run("Valid standard or remote rules", func(t *testing.T) { + config := credentialdiggerScanOptions{} + utils := newCDTestsUtils() + assert.Equal(t, nil, credentialdiggerAddRules(&config, nil, utils)) + }) + t.Run("Broken add rules", func(t *testing.T) { + config := credentialdiggerScanOptions{} + utils := newCDTestsUtils() + utils.noerr = false + assert.EqualError(t, credentialdiggerAddRules(&config, nil, utils), "Some custom error") + }) + /* + // In case we want to test the error raised by piperhttp + t.Run("Invalid external rules link", func(t *testing.T) { + rulesExt := "https://broken-link.com/fakerules" + config := credentialdiggerScanOptions{RulesDownloadURL: rulesExt} + utils := newCDTestsUtils() + assert.Equal(t, nil, credentialdiggerAddRules(&config, nil, utils)) + }) + */ +} + +func TestCredentialdiggerGetDiscoveries(t *testing.T) { + t.Run("Empty discoveries", func(t *testing.T) { + config := credentialdiggerScanOptions{Repository: "testRepo"} + utils := newCDTestsUtils() + assert.Equal(t, nil, credentialdiggerGetDiscoveries(&config, nil, utils)) + }) + t.Run("Get discoveries non-empty", func(t *testing.T) { + config := credentialdiggerScanOptions{Repository: "testRepo"} + utils := newCDTestsUtils() + utils.noerr = false + assert.EqualError(t, credentialdiggerGetDiscoveries(&config, nil, utils), "Some custom error") + }) +} + +func TestCredentialdiggerBuildCommonArgs(t *testing.T) { + t.Run("Valid build common args", func(t *testing.T) { + arguments := []string{"repoURL", "--sqlite", "piper_step_db.db", "--git_token", "validToken", + "--debug", "--models", "model1", "model2"} + config := credentialdiggerScanOptions{Repository: "repoURL", Token: "validToken", Snapshot: "main", + Debug: true, PrNumber: 1, + Models: []string{"model1", "model2"}, + } + assert.Equal(t, arguments, credentialdiggerBuildCommonArgs(&config)) + }) + +} diff --git a/cmd/metadata_generated.go b/cmd/metadata_generated.go index 2dc26c5ec2..56f8078e6d 100644 --- a/cmd/metadata_generated.go +++ b/cmd/metadata_generated.go @@ -49,6 +49,7 @@ func GetAllStepMetadata() map[string]config.StepData { "codeqlExecuteScan": codeqlExecuteScanMetadata(), "containerExecuteStructureTests": containerExecuteStructureTestsMetadata(), "containerSaveImage": containerSaveImageMetadata(), + "credentialdiggerScan": credentialdiggerScanMetadata(), "detectExecuteScan": detectExecuteScanMetadata(), "fortifyExecuteScan": fortifyExecuteScanMetadata(), "gaugeExecuteTests": gaugeExecuteTestsMetadata(), diff --git a/cmd/piper.go b/cmd/piper.go index cd34b4e33d..fda676578b 100644 --- a/cmd/piper.go +++ b/cmd/piper.go @@ -113,6 +113,7 @@ func Execute() { rootCmd.AddCommand(CheckmarxExecuteScanCommand()) rootCmd.AddCommand(FortifyExecuteScanCommand()) rootCmd.AddCommand(CodeqlExecuteScanCommand()) + rootCmd.AddCommand(CredentialdiggerScanCommand()) rootCmd.AddCommand(MtaBuildCommand()) rootCmd.AddCommand(ProtecodeExecuteScanCommand()) rootCmd.AddCommand(MavenExecuteCommand()) diff --git a/documentation/docs/steps/credentialdiggerScan.md b/documentation/docs/steps/credentialdiggerScan.md new file mode 100644 index 0000000000..63991c1344 --- /dev/null +++ b/documentation/docs/steps/credentialdiggerScan.md @@ -0,0 +1,7 @@ +# ${docGenStepName} + +## ${docGenDescription} + +## ${docGenParameters} + +## ${docGenConfiguration} diff --git a/documentation/mkdocs.yml b/documentation/mkdocs.yml index a00df76e08..3b729edf2c 100644 --- a/documentation/mkdocs.yml +++ b/documentation/mkdocs.yml @@ -94,6 +94,7 @@ nav: - commonPipelineEnvironment: steps/commonPipelineEnvironment.md - containerExecuteStructureTests: steps/containerExecuteStructureTests.md - containerPushToRegistry: steps/containerPushToRegistry.md + - credentialdiggerScan: steps/credentialdiggerScan.md - debugReportArchive: steps/debugReportArchive.md - detectExecuteScan: steps/detectExecuteScan.md - dockerExecute: steps/dockerExecute.md diff --git a/resources/metadata/credentialdiggerScan.yaml b/resources/metadata/credentialdiggerScan.yaml new file mode 100644 index 0000000000..9ef2b696f2 --- /dev/null +++ b/resources/metadata/credentialdiggerScan.yaml @@ -0,0 +1,124 @@ +metadata: + name: credentialdiggerScan + description: Scan a repository on GitHub with Credential Digger + longDescription: | + This step allows you to scan a repository on Github using Credential Digger. + + It can for example be used for DevSecOps scenarios to verify the source code does not contain hard-coded credentials before being merged or released for production. + It supports several scan flavors, i.e., full scans of a repo, scan of a snapshot, or scan of a pull request. +spec: + inputs: + secrets: + - name: githubTokenCredentialsId + description: Jenkins 'Secret text' credentials ID containing token to authenticate to GitHub. + type: jenkins + params: + - name: repository + aliases: + - name: githubRepo + description: URL of the GitHub repository (was name, but we need the url). In case it's missing, use the URL of the current repository. + scope: + - PARAMETERS + - STAGES + - STEPS + type: string + mandatory: false + - name: snapshot + description: If set, scan the snapshot of the repository at this commit_id/branch. + scope: + - PARAMETERS + - STAGES + - STEPS + type: string + mandatory: false + - name: prNumber + description: If set, scan the pull request open with this number. + scope: + - PARAMETERS + - STAGES + - STEPS + type: int + mandatory: false + - name: exportAll + type: bool + description: Export all the findings, i.e., including non-leaks. + scope: + - PARAMETERS + - STAGES + - STEPS + default: false + - name: apiUrl + aliases: + - name: githubApiUrl + description: Set the GitHub API url. Needed for scanning a pull request. + scope: + - GENERAL + - PARAMETERS + - STAGES + - STEPS + type: string + default: https://api.github.com + mandatory: true + - name: debug + aliases: + - name: verbose + description: Execute the scans in debug mode (i.e., print logs). + scope: + - GENERAL + - PARAMETERS + - STAGES + - STEPS + type: bool + default: false + - name: rulesDownloadUrl + type: string + description: URL where to download custom rules. The file published at this URL must be formatted as the default ruleset https://raw.githubusercontent.com/SAP/credential-digger/main/ui/backend/rules.yml + scope: + - PARAMETERS + - STAGES + - STEPS + mandatory: false + - name: models + description: Machine learning models to automatically verify the findings. + scope: + - PARAMETERS + - STAGES + - STEPS + type: "[]string" + - name: token + aliases: + - name: githubToken + - name: access_token + description: GitHub personal access token as per https://help.github.com/en/github/authenticating-to-github/creating-a-personal-access-token-for-the-command-line + scope: + - GENERAL + - PARAMETERS + - STAGES + - STEPS + type: string + mandatory: true + secret: true + resourceRef: + - name: githubTokenCredentialsId + type: secret + - type: vaultSecret + default: github + name: githubVaultSecretName + - name: rulesFile + type: string + description: Name of the rules file used locally within the step. If a remote files for rules is declared as `rulesDownloadUrl`, the stashed file is ignored. If you change the file's name make sure your stashing configuration also reflects this. + mandatory: false + scope: + - PARAMETERS + - STAGES + - STEPS + default: inputs/rules.yml + outputs: + resources: + - name: report + type: report + params: + - filePattern: "**/report*.csv" + type: credentialdigger-report + containers: + - image: "credentialdigger.int.repositories.cloud.sap/credential_digger:4.9.2" diff --git a/test/groovy/CommonStepsTest.groovy b/test/groovy/CommonStepsTest.groovy index a1800f8ae9..84bfb21482 100644 --- a/test/groovy/CommonStepsTest.groovy +++ b/test/groovy/CommonStepsTest.groovy @@ -174,6 +174,7 @@ public class CommonStepsTest extends BasePiperTest{ 'gctsExecuteABAPUnitTests', //implementing new golang pattern without fields 'gctsCloneRepository', //implementing new golang pattern without fields 'codeqlExecuteScan', //implementing new golang pattern without fields + 'credentialdiggerScan', //implementing new golang pattern without fields 'fortifyExecuteScan', //implementing new golang pattern without fields 'gctsDeploy', //implementing new golang pattern without fields 'containerSaveImage', //implementing new golang pattern without fields diff --git a/vars/credentialdiggerScan.groovy b/vars/credentialdiggerScan.groovy new file mode 100644 index 0000000000..996346283c --- /dev/null +++ b/vars/credentialdiggerScan.groovy @@ -0,0 +1,11 @@ +import groovy.transform.Field + +@Field String STEP_NAME = getClass().getName() +@Field String METADATA_FILE = 'metadata/credentialdiggerScan.yaml' + +void call(Map parameters = [:]) { + List credentials = [ + [type: 'token', id: 'githubTokenCredentialsId', env: ['PIPER_token']] + ] + piperExecuteBin(parameters, STEP_NAME, METADATA_FILE, credentials) +} From 47c5a16cc0918e9d4850f5a4f7b62d68c894d895 Mon Sep 17 00:00:00 2001 From: Daria Kuznetsova Date: Tue, 4 Apr 2023 21:16:15 +0200 Subject: [PATCH 08/23] fix(codeqlExecuteStep): parsing git url with dots in repo name (#4318) * change regexp to parse repo URL with dots in repo name * added regex to cut off username and token from URL & added test cases --- cmd/codeqlExecuteScan.go | 4 ++-- cmd/codeqlExecuteScan_test.go | 30 ++++++++++++++++++++++++++++++ 2 files changed, 32 insertions(+), 2 deletions(-) diff --git a/cmd/codeqlExecuteScan.go b/cmd/codeqlExecuteScan.go index 8dc4b80e2b..206d3c656a 100644 --- a/cmd/codeqlExecuteScan.go +++ b/cmd/codeqlExecuteScan.go @@ -91,12 +91,12 @@ func getGitRepoInfo(repoUri string, repoInfo *RepoInfo) error { return errors.New("repository param is not set or it cannot be auto populated") } - pat := regexp.MustCompile(`^(https|git)(:\/\/|@)([^\/:]+)[\/:]([^\/:]+\/[^.]+)(.git)*$`) + pat := regexp.MustCompile(`^(https|git):\/\/([\S]+:[\S]+@)?([^\/:]+)[\/:]([^\/:]+\/[\S]+)$`) matches := pat.FindAllStringSubmatch(repoUri, -1) if len(matches) > 0 { match := matches[0] repoInfo.serverUrl = "https://" + match[3] - repoInfo.repo = match[4] + repoInfo.repo = strings.TrimSuffix(match[4], ".git") return nil } diff --git a/cmd/codeqlExecuteScan_test.go b/cmd/codeqlExecuteScan_test.go index 90db13e04a..4e81ab0ca2 100644 --- a/cmd/codeqlExecuteScan_test.go +++ b/cmd/codeqlExecuteScan_test.go @@ -79,6 +79,36 @@ func TestGetGitRepoInfo(t *testing.T) { assert.Equal(t, "https://github.hello.test", repoInfo.serverUrl) assert.Equal(t, "Testing/fortify", repoInfo.repo) }) + t.Run("Valid URL1 with dots", func(t *testing.T) { + var repoInfo RepoInfo + err := getGitRepoInfo("https://github.hello.test/Testing/com.sap.fortify.git", &repoInfo) + assert.NoError(t, err) + assert.Equal(t, "https://github.hello.test", repoInfo.serverUrl) + assert.Equal(t, "Testing/com.sap.fortify", repoInfo.repo) + }) + + t.Run("Valid URL2 with dots", func(t *testing.T) { + var repoInfo RepoInfo + err := getGitRepoInfo("https://github.hello.test/Testing/com.sap.fortify", &repoInfo) + assert.NoError(t, err) + assert.Equal(t, "https://github.hello.test", repoInfo.serverUrl) + assert.Equal(t, "Testing/com.sap.fortify", repoInfo.repo) + }) + t.Run("Valid URL1 with username and token", func(t *testing.T) { + var repoInfo RepoInfo + err := getGitRepoInfo("https://username:token@github.hello.test/Testing/fortify.git", &repoInfo) + assert.NoError(t, err) + assert.Equal(t, "https://github.hello.test", repoInfo.serverUrl) + assert.Equal(t, "Testing/fortify", repoInfo.repo) + }) + + t.Run("Valid URL2 with username and token", func(t *testing.T) { + var repoInfo RepoInfo + err := getGitRepoInfo("https://username:token@github.hello.test/Testing/fortify", &repoInfo) + assert.NoError(t, err) + assert.Equal(t, "https://github.hello.test", repoInfo.serverUrl) + assert.Equal(t, "Testing/fortify", repoInfo.repo) + }) t.Run("Invalid URL as no org/owner passed", func(t *testing.T) { var repoInfo RepoInfo From bee1ffe4f169705d2cac91613fdbcde983003c6a Mon Sep 17 00:00:00 2001 From: Christopher Fenner <26137398+CCFenner@users.noreply.github.com> Date: Wed, 5 Apr 2023 15:07:54 +0200 Subject: [PATCH 09/23] feat(golangBuild): create test report json file (#4306) * feat(golangBuild): create test report json file * Update golangBuild_test.go * Update golangBuild_test.go * Update integration_golang_test.go * Update integration_golang_test.go * Update golangBuild.go * rename test files * rename --- cmd/golangBuild.go | 6 ++++-- cmd/golangBuild_test.go | 10 +++++----- integration/integration_golang_test.go | 12 ++++++------ 3 files changed, 15 insertions(+), 13 deletions(-) diff --git a/cmd/golangBuild.go b/cmd/golangBuild.go index d3760e0e38..b7327fcc29 100644 --- a/cmd/golangBuild.go +++ b/cmd/golangBuild.go @@ -31,6 +31,8 @@ const ( coverageFile = "cover.out" golangUnitTestOutput = "TEST-go.xml" golangIntegrationTestOutput = "TEST-integration.xml" + unitJsonReport = "unit-report.out" + integrationJsonReport = "integration-report.out" golangCoberturaPackage = "github.com/boumenot/gocover-cobertura@latest" golangTestsumPackage = "gotest.tools/gotestsum@latest" golangCycloneDXPackage = "github.com/CycloneDX/cyclonedx-gomod/cmd/cyclonedx-gomod@latest" @@ -364,7 +366,7 @@ func prepareGolangEnvironment(config *golangBuildOptions, goModFile *modfile.Fil func runGolangTests(config *golangBuildOptions, utils golangBuildUtils) (bool, error) { // execute gotestsum in order to have more output options - testOptions := []string{"--junitfile", golangUnitTestOutput, "--", fmt.Sprintf("-coverprofile=%v", coverageFile), "./..."} + testOptions := []string{"--junitfile", golangUnitTestOutput, "--jsonfile", unitJsonReport, "--", fmt.Sprintf("-coverprofile=%v", coverageFile), "./..."} testOptions = append(testOptions, config.TestOptions...) if err := utils.RunExecutable("gotestsum", testOptions...); err != nil { exists, fileErr := utils.FileExists(golangUnitTestOutput) @@ -385,7 +387,7 @@ func runGolangTests(config *golangBuildOptions, utils golangBuildUtils) (bool, e func runGolangIntegrationTests(config *golangBuildOptions, utils golangBuildUtils) (bool, error) { // execute gotestsum in order to have more output options // for integration tests coverage data is not meaningful and thus not being created - if err := utils.RunExecutable("gotestsum", "--junitfile", golangIntegrationTestOutput, "--", "-tags=integration", "./..."); err != nil { + if err := utils.RunExecutable("gotestsum", "--junitfile", golangIntegrationTestOutput, "--jsonfile", integrationJsonReport, "--", "-tags=integration", "./..."); err != nil { exists, fileErr := utils.FileExists(golangIntegrationTestOutput) if !exists || fileErr != nil { log.SetErrorCategory(log.ErrorBuild) diff --git a/cmd/golangBuild_test.go b/cmd/golangBuild_test.go index 45eac1593d..016920e85f 100644 --- a/cmd/golangBuild_test.go +++ b/cmd/golangBuild_test.go @@ -139,7 +139,7 @@ go 1.17` assert.Equal(t, "go", utils.ExecMockRunner.Calls[0].Exec) assert.Equal(t, []string{"install", "gotest.tools/gotestsum@latest"}, utils.ExecMockRunner.Calls[0].Params) assert.Equal(t, "gotestsum", utils.ExecMockRunner.Calls[1].Exec) - assert.Equal(t, []string{"--junitfile", "TEST-go.xml", "--", fmt.Sprintf("-coverprofile=%v", coverageFile), "./..."}, utils.ExecMockRunner.Calls[1].Params) + assert.Equal(t, []string{"--junitfile", "TEST-go.xml", "--jsonfile", "unit-report.out", "--", fmt.Sprintf("-coverprofile=%v", coverageFile), "./..."}, utils.ExecMockRunner.Calls[1].Params) assert.Equal(t, "go", utils.ExecMockRunner.Calls[2].Exec) assert.Equal(t, []string{"build", "-trimpath", "-ldflags", "test", "package/foo"}, utils.ExecMockRunner.Calls[2].Params) }) @@ -160,7 +160,7 @@ go 1.17` assert.Equal(t, "go", utils.ExecMockRunner.Calls[0].Exec) assert.Equal(t, []string{"install", "gotest.tools/gotestsum@latest"}, utils.ExecMockRunner.Calls[0].Params) assert.Equal(t, "gotestsum", utils.ExecMockRunner.Calls[1].Exec) - assert.Equal(t, []string{"--junitfile", "TEST-go.xml", "--", fmt.Sprintf("-coverprofile=%v", coverageFile), "./...", "--foo", "--bar"}, utils.ExecMockRunner.Calls[1].Params) + assert.Equal(t, []string{"--junitfile", "TEST-go.xml", "--jsonfile", "unit-report.out", "--", fmt.Sprintf("-coverprofile=%v", coverageFile), "./...", "--foo", "--bar"}, utils.ExecMockRunner.Calls[1].Params) assert.Equal(t, "go", utils.ExecMockRunner.Calls[2].Exec) assert.Equal(t, []string{"build", "-trimpath", "package/foo"}, utils.ExecMockRunner.Calls[2].Params) }) @@ -195,7 +195,7 @@ go 1.17` assert.Equal(t, "go", utils.ExecMockRunner.Calls[0].Exec) assert.Equal(t, []string{"install", "gotest.tools/gotestsum@latest"}, utils.ExecMockRunner.Calls[0].Params) assert.Equal(t, "gotestsum", utils.ExecMockRunner.Calls[1].Exec) - assert.Equal(t, []string{"--junitfile", "TEST-integration.xml", "--", "-tags=integration", "./..."}, utils.ExecMockRunner.Calls[1].Params) + assert.Equal(t, []string{"--junitfile", "TEST-integration.xml", "--jsonfile", "integration-report.out", "--", "-tags=integration", "./..."}, utils.ExecMockRunner.Calls[1].Params) assert.Equal(t, "go", utils.ExecMockRunner.Calls[2].Exec) assert.Equal(t, []string{"build", "-trimpath"}, utils.ExecMockRunner.Calls[2].Params) }) @@ -528,7 +528,7 @@ func TestRunGolangTests(t *testing.T) { assert.NoError(t, err) assert.True(t, success) assert.Equal(t, "gotestsum", utils.ExecMockRunner.Calls[0].Exec) - assert.Equal(t, []string{"--junitfile", "TEST-go.xml", "--", fmt.Sprintf("-coverprofile=%v", coverageFile), "./..."}, utils.ExecMockRunner.Calls[0].Params) + assert.Equal(t, []string{"--junitfile", "TEST-go.xml", "--jsonfile", "unit-report.out", "--", fmt.Sprintf("-coverprofile=%v", coverageFile), "./..."}, utils.ExecMockRunner.Calls[0].Params) }) t.Run("success - failed tests", func(t *testing.T) { @@ -579,7 +579,7 @@ func TestRunGolangIntegrationTests(t *testing.T) { assert.NoError(t, err) assert.True(t, success) assert.Equal(t, "gotestsum", utils.ExecMockRunner.Calls[0].Exec) - assert.Equal(t, []string{"--junitfile", "TEST-integration.xml", "--", "-tags=integration", "./..."}, utils.ExecMockRunner.Calls[0].Params) + assert.Equal(t, []string{"--junitfile", "TEST-integration.xml", "--jsonfile", "integration-report.out", "--", "-tags=integration", "./..."}, utils.ExecMockRunner.Calls[0].Params) }) t.Run("success - failed tests", func(t *testing.T) { diff --git a/integration/integration_golang_test.go b/integration/integration_golang_test.go index a811b5d040..d5a23c8efb 100644 --- a/integration/integration_golang_test.go +++ b/integration/integration_golang_test.go @@ -30,10 +30,10 @@ func TestGolangIntegrationBuildProject1(t *testing.T) { container.assertHasOutput(t, "info golangBuild - running command: go install gotest.tools/gotestsum@latest", "info golangBuild - running command: go install github.com/CycloneDX/cyclonedx-gomod/cmd/cyclonedx-gomod@latest", - "info golangBuild - running command: gotestsum --junitfile TEST-go.xml -- -coverprofile=cover.out ./...", + "info golangBuild - running command: gotestsum --junitfile TEST-go.xml --jsonfile unit-report.out -- -coverprofile=cover.out ./...", "info golangBuild - DONE 8 tests", "info golangBuild - running command: go tool cover -html cover.out -o coverage.html", - "info golangBuild - running command: gotestsum --junitfile TEST-integration.xml -- -tags=integration ./...", + "info golangBuild - running command: gotestsum --junitfile TEST-integration.xml --jsonfile integration-report.out -- -tags=integration ./...", "info golangBuild - running command: cyclonedx-gomod mod -licenses -test -output bom-golang.xml", "info golangBuild - running command: go build -trimpath -o golang-app-linux.amd64 cmd/server/server.go", "info golangBuild - SUCCESS", @@ -65,10 +65,10 @@ func TestGolangIntegrationBuildProject1MultiPackage(t *testing.T) { container.assertHasOutput(t, "info golangBuild - running command: go install gotest.tools/gotestsum@latest", "info golangBuild - running command: go install github.com/CycloneDX/cyclonedx-gomod/cmd/cyclonedx-gomod@latest", - "info golangBuild - running command: gotestsum --junitfile TEST-go.xml -- -coverprofile=cover.out ./...", + "info golangBuild - running command: gotestsum --junitfile TEST-go.xml --jsonfile unit-report.out -- -coverprofile=cover.out ./...", "info golangBuild - DONE 8 tests", "info golangBuild - running command: go tool cover -html cover.out -o coverage.html", - "info golangBuild - running command: gotestsum --junitfile TEST-integration.xml -- -tags=integration ./...", + "info golangBuild - running command: gotestsum --junitfile TEST-integration.xml --jsonfile integration-report.out -- -tags=integration ./...", "info golangBuild - running command: cyclonedx-gomod mod -licenses -test -output bom-golang.xml", "info golangBuild - running command: go build -trimpath -o golang-app-linux-amd64/ github.com/example/golang-app/cmd/server github.com/example/golang-app/cmd/helper", "info golangBuild - SUCCESS", @@ -103,9 +103,9 @@ func TestGolangIntegrationBuildProject2(t *testing.T) { container.assertHasNoOutput(t, "info golangBuild - running command: go install gotest.tools/gotestsum@latest", "info golangBuild - running command: go install github.com/CycloneDX/cyclonedx-gomod/cmd/cyclonedx-gomod@latest", - "info golangBuild - running command: gotestsum --junitfile TEST-go.xml -- -coverprofile=cover.out ./...", + "info golangBuild - running command: gotestsum --junitfile TEST-go.xml --jsonfile unit-report.out -- -coverprofile=cover.out ./...", "info golangBuild - running command: go tool cover -html cover.out -o coverage.html", - "info golangBuild - running command: gotestsum --junitfile TEST-integration.xml -- -tags=integration ./...", + "info golangBuild - running command: gotestsum --junitfile TEST-integration.xml --jsonfile integration-report.out -- -tags=integration ./...", "info golangBuild - running command: cyclonedx-gomod mod -licenses -test -output bom-golang.xml", ) From a2c0f8922225fa1a1d72437a61543685a1c81354 Mon Sep 17 00:00:00 2001 From: tiloKo <70266685+tiloKo@users.noreply.github.com> Date: Wed, 5 Apr 2023 15:39:56 +0200 Subject: [PATCH 10/23] remove obscuring log output (#4320) --- cmd/abapEnvironmentBuild.go | 20 +++++++++++++++++--- cmd/abapEnvironmentBuild_test.go | 13 ++++++++++++- 2 files changed, 29 insertions(+), 4 deletions(-) diff --git a/cmd/abapEnvironmentBuild.go b/cmd/abapEnvironmentBuild.go index 44d1030ee3..7791a971bc 100644 --- a/cmd/abapEnvironmentBuild.go +++ b/cmd/abapEnvironmentBuild.go @@ -155,7 +155,7 @@ func runBuilds(conn *abapbuild.Connector, config *abapEnvironmentBuildOptions, u } finalValuesForOneBuild, err := runBuild(conn, config, utils, cummulatedValues) if err != nil { - err = errors.Wrapf(err, "Build with input values %s failed", values) + err = errors.Wrapf(err, "Build with input values %s failed", values2string(values)) if config.StopOnFirstError { return finalValues, err } @@ -406,12 +406,15 @@ func (vE *valuesEvaluator) appendStringValuesIfNotPresent(stringValues string, t func (vE *valuesEvaluator) appendValuesIfNotPresent(values []abapbuild.Value, throwErrorIfPresent bool) error { for _, value := range values { + if value.ValueID == "PHASE" || value.ValueID == "BUILD_FRAMEWORK_MODE" { + continue + } _, present := vE.m[value.ValueID] - if present || (value.ValueID == "PHASE") { + if present { if throwErrorIfPresent { return errors.Errorf("Value_id %s already existed in the config", value.ValueID) } - log.Entry().Infof("Value %s already existed -> discard this value", value) + log.Entry().Infof("Value '%s':'%s' already existed -> discard this value", value.ValueID, value.Value) } else { vE.m[value.ValueID] = value.Value } @@ -573,3 +576,14 @@ func Equal(a, b string) bool { func Unequal(a, b string) bool { return a != b } + +func values2string(values []abapbuild.Value) string { + var result string + for index, value := range values { + if index > 0 { + result = result + "; " + } + result = result + value.ValueID + " = " + value.Value + } + return result +} diff --git a/cmd/abapEnvironmentBuild_test.go b/cmd/abapEnvironmentBuild_test.go index 510fb4ebde..0b3ba813f1 100644 --- a/cmd/abapEnvironmentBuild_test.go +++ b/cmd/abapEnvironmentBuild_test.go @@ -129,7 +129,7 @@ func TestRunAbapEnvironmentBuild(t *testing.T) { // test err := runAbapEnvironmentBuild(&config, nil, utils, &cpe) // assert - finalValues := `[{"value_id":"PACKAGES","value":"/BUILD/AUNIT_DUMMY_TESTS"},{"value_id":"BUILD_FRAMEWORK_MODE","value":"P"}]` + finalValues := `[{"value_id":"PACKAGES","value":"/BUILD/AUNIT_DUMMY_TESTS"}]` err = json.Unmarshal([]byte(finalValues), &expectedValueList) assert.NoError(t, err) err = json.Unmarshal([]byte(cpe.abap.buildValues), &recordedValueList) @@ -388,6 +388,17 @@ func TestEvaluateAddonDescriptor(t *testing.T) { }) } +func TestValues2String(t *testing.T) { + t.Run("dito", func(t *testing.T) { + var myValues []abapbuild.Value + myValues = append(myValues, abapbuild.Value{ValueID: "Name", Value: "Hugo"}) + myValues = append(myValues, abapbuild.Value{ValueID: "Age", Value: "43"}) + myValues = append(myValues, abapbuild.Value{ValueID: "Hight", Value: "17cm"}) + myString := values2string(myValues) + assert.Equal(t, "Name = Hugo; Age = 43; Hight = 17cm", myString) + }) +} + var addonDescriptor = `{ "addonProduct":"/ITAPC1/I_CURRENCZPRODUCT", "addonVersion":"1.0.0", From 126fbbcc69f55332a52d4df7635d2d34b2c17e85 Mon Sep 17 00:00:00 2001 From: tiloKo <70266685+tiloKo@users.noreply.github.com> Date: Thu, 6 Apr 2023 08:31:22 +0200 Subject: [PATCH 11/23] reduce Log Output (Info -> Debug) (#4322) Co-authored-by: ffeldmann --- cmd/piper.go | 10 +++++----- pkg/config/config.go | 2 +- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/cmd/piper.go b/cmd/piper.go index fda676578b..556980ff87 100644 --- a/cmd/piper.go +++ b/cmd/piper.go @@ -272,7 +272,7 @@ func initStageName(outputToLog bool) { var stageNameSource string if outputToLog { defer func() { - log.Entry().Infof("Using stageName '%s' from %s", GeneralConfig.StageName, stageNameSource) + log.Entry().Debugf("Using stageName '%s' from %s", GeneralConfig.StageName, stageNameSource) }() } @@ -363,7 +363,7 @@ func PrepareConfig(cmd *cobra.Command, metadata *config.StepData, stepName strin { projectConfigFile := getProjectConfigFile(GeneralConfig.CustomConfig) if exists, err := piperutils.FileExists(projectConfigFile); exists { - log.Entry().Infof("Project config: '%s'", projectConfigFile) + log.Entry().Debugf("Project config: '%s'", projectConfigFile) if customConfig, err = openFile(projectConfigFile, GeneralConfig.GitHubAccessTokens); err != nil { return errors.Wrapf(err, "Cannot read '%s'", projectConfigFile) } @@ -381,11 +381,11 @@ func PrepareConfig(cmd *cobra.Command, metadata *config.StepData, stepName strin // only create error for non-default values if err != nil { if projectDefaultFile != ".pipeline/defaults.yaml" { - log.Entry().Infof("Project defaults: '%s'", projectDefaultFile) + log.Entry().Debugf("Project defaults: '%s'", projectDefaultFile) return errors.Wrapf(err, "Cannot read '%s'", projectDefaultFile) } } else { - log.Entry().Infof("Project defaults: '%s'", projectDefaultFile) + log.Entry().Debugf("Project defaults: '%s'", projectDefaultFile) defaultConfig = append(defaultConfig, fc) } } @@ -430,7 +430,7 @@ func PrepareConfig(cmd *cobra.Command, metadata *config.StepData, stepName strin func retrieveHookConfig(source map[string]interface{}, target *HookConfiguration) { if source != nil { - log.Entry().Info("Retrieving hook configuration") + log.Entry().Debug("Retrieving hook configuration") b, err := json.Marshal(source) if err != nil { log.Entry().Warningf("Failed to marshal source hook configuration: %v", err) diff --git a/pkg/config/config.go b/pkg/config/config.go index 41568b78d1..7133b72e34 100644 --- a/pkg/config/config.go +++ b/pkg/config/config.go @@ -145,7 +145,7 @@ func (c *Config) InitializeConfig(configuration io.ReadCloser, defaults []io.Rea // consider custom defaults defined in config.yml unless told otherwise if ignoreCustomDefaults { - log.Entry().Info("Ignoring custom defaults from pipeline config") + log.Entry().Debug("Ignoring custom defaults from pipeline config") } else if c.CustomDefaults != nil && len(c.CustomDefaults) > 0 { if c.openFile == nil { c.openFile = OpenPiperFile From fd61f34ec21002d64316b02d11d4f25e081bd544 Mon Sep 17 00:00:00 2001 From: gerstneralex <116895809+gerstneralex@users.noreply.github.com> Date: Thu, 6 Apr 2023 10:09:57 +0200 Subject: [PATCH 12/23] Activate go implementation of tmsUpload (#4308) * Activate go implementation * Add unstash * Comment some when conditions * Remove unstash * Print config parameter * Add useGoStep parameter * Uncomment when conditions * Add traces * Remove traces * Remove useGoStep from metadata * remove trace * Add comment * namedUser handling --- vars/tmsUpload.groovy | 28 ++++++++++++++++++++++++++-- 1 file changed, 26 insertions(+), 2 deletions(-) diff --git a/vars/tmsUpload.groovy b/vars/tmsUpload.groovy index 54a0a40d6b..817569a173 100644 --- a/vars/tmsUpload.groovy +++ b/vars/tmsUpload.groovy @@ -9,6 +9,7 @@ import groovy.transform.Field import static com.sap.piper.Prerequisites.checkScript @Field String STEP_NAME = getClass().getName() +@Field String METADATA_FILE = 'metadata/tmsUpload.yaml' @Field Set GENERAL_CONFIG_KEYS = [ /** @@ -49,7 +50,12 @@ import static com.sap.piper.Prerequisites.checkScript /** * Proxy which should be used for the communication with the Transport Management Service Backend. */ - 'proxy' + 'proxy', + /** + * Toggle to activate a new Golang implementation of the step. Off by default. + * @possibleValues true, false + */ + 'useGoStep' ]) @Field Set PARAMETER_KEYS = STEP_CONFIG_KEYS + GENERAL_CONFIG_KEYS @@ -86,6 +92,22 @@ void call(Map parameters = [:]) { .withMandatoryProperty('credentialsId') .use() + def namedUser = jenkinsUtils.getJobStartedByUserId() + + if (config.useGoStep == true) { + List credentials = [ + [type: 'token', id: 'credentialsId', env: ['PIPER_tmsServiceKey']] + ] + + if (namedUser) { + parameters.namedUser = namedUser + } + + utils.unstashAll(config.stashContent) + piperExecuteBin(parameters, STEP_NAME, METADATA_FILE, credentials) + return + } + // telemetry reporting new Utils().pushToSWA([ step : STEP_NAME, @@ -103,7 +125,9 @@ void call(Map parameters = [:]) { def customDescription = config.customDescription ? "${config.customDescription}" : "Git CommitId: ${script.commonPipelineEnvironment.getGitCommitId()}" def description = customDescription - def namedUser = jenkinsUtils.getJobStartedByUserId() ?: config.namedUser + if (!namedUser) { + namedUser = config.namedUser + } def nodeName = config.nodeName def mtaPath = config.mtaPath From 99603cdee9f870393e815ed46dc32170f046bec7 Mon Sep 17 00:00:00 2001 From: Christopher Fenner <26137398+CCFenner@users.noreply.github.com> Date: Tue, 11 Apr 2023 23:15:00 +0200 Subject: [PATCH 13/23] chore(tests): correct test assertions (#4276) --- pkg/config/vault_test.go | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/pkg/config/vault_test.go b/pkg/config/vault_test.go index f486e4e4aa..0f41352fd3 100644 --- a/pkg/config/vault_test.go +++ b/pkg/config/vault_test.go @@ -132,7 +132,7 @@ func TestVaultConfigLoad(t *testing.T) { stepConfig := StepConfig{Config: map[string]interface{}{}} stepParams := []StepParameters{stepParam(secretName, "vaultSecret", secretNameOverrideKey, secretName)} resolveAllVaultReferences(&stepConfig, vaultMock, stepParams) - assert.Equal(t, nil, stepConfig.Config[secretName]) + assert.Nil(t, stepConfig.Config[secretName]) vaultMock.AssertNotCalled(t, "GetKvSecret", mock.AnythingOfType("string")) }) } @@ -154,7 +154,7 @@ func TestVaultSecretFiles(t *testing.T) { contentByte, err := ioutil.ReadFile(path) assert.NoError(t, err) content := string(contentByte) - assert.Equal(t, content, "value1") + assert.Equal(t, "value1", content) }) os.RemoveAll(VaultSecretFileDirectory) @@ -248,10 +248,10 @@ func TestResolveVaultTestCredentials(t *testing.T) { resolveVaultTestCredentials(&stepConfig, vaultMock) // assert - for k, v := range vaultData { + for k, expectedValue := range vaultData { env := envPrefix + strings.ToUpper(k) assert.NotEmpty(t, os.Getenv(env)) - assert.Equal(t, os.Getenv(env), v) + assert.Equal(t, expectedValue, os.Getenv(env)) } }) @@ -283,13 +283,13 @@ func TestResolveVaultTestCredentials(t *testing.T) { resolveVaultCredentials(&stepConfig, vaultMock) // assert - for k, v := range vaultData { + for k, expectedValue := range vaultData { env := envPrefix + strings.ToUpper(k) assert.NotEmpty(t, os.Getenv(env)) - assert.Equal(t, os.Getenv(env), v) + assert.Equal(t, expectedValue, os.Getenv(env)) standardEnv := standardEnvPrefix + strings.ToUpper(k) assert.NotEmpty(t, os.Getenv(standardEnv)) - assert.Equal(t, os.Getenv(standardEnv), v) + assert.Equal(t, expectedValue, os.Getenv(standardEnv)) } }) } @@ -317,10 +317,10 @@ func TestResolveVaultTestCredentials(t *testing.T) { resolveVaultTestCredentials(&stepConfig, vaultMock) // assert - for k, v := range vaultData { + for k, expectedValue := range vaultData { env := envPrefix + strings.ToUpper(k) assert.NotEmpty(t, os.Getenv(env)) - assert.Equal(t, os.Getenv(env), v) + assert.Equal(t, expectedValue, os.Getenv(env)) } }) } From f9617f531506c6ce381c3e1d7602881805b914d6 Mon Sep 17 00:00:00 2001 From: Akramdzhon Azamov <58902906+akram8008@users.noreply.github.com> Date: Thu, 13 Apr 2023 15:10:26 +0500 Subject: [PATCH 14/23] feat(blackduck): Npm extra parameters (#4327) * added two new parameters for npm --------- Co-authored-by: Andrei Kireev --- cmd/detectExecuteScan.go | 10 ++++++++++ cmd/detectExecuteScan_generated.go | 22 +++++++++++++++++++++ resources/metadata/detectExecuteScan.yaml | 24 +++++++++++++++++++++++ 3 files changed, 56 insertions(+) diff --git a/cmd/detectExecuteScan.go b/cmd/detectExecuteScan.go index 7cd1c21e0a..3000615091 100644 --- a/cmd/detectExecuteScan.go +++ b/cmd/detectExecuteScan.go @@ -389,6 +389,16 @@ func addDetectArgs(args []string, config detectExecuteScanOptions, utils detectU args = append(args, fmt.Sprintf("--detect.tools=%v", strings.Join(config.DetectTools, ","))) } + // to exclude dependency types for npm + if len(config.NpmDependencyTypesExcluded) > 0 && !checkIfArgumentIsInScanProperties(config, "detect.npm.dependency.types.excluded") { + args = append(args, fmt.Sprintf("--detect.npm.dependency.types.excluded=%v", strings.ToUpper(strings.Join(config.NpmDependencyTypesExcluded, ",")))) + } + + // A space-separated list of additional arguments that Detect will add at then end of the npm ls command line + if len(config.NpmArguments) > 0 && !checkIfArgumentIsInScanProperties(config, "detect.npm.arguments") { + args = append(args, fmt.Sprintf("--detect.npm.arguments=%v", strings.ToUpper(strings.Join(config.NpmArguments, " ")))) + } + mavenArgs, err := maven.DownloadAndGetMavenParameters(config.GlobalSettingsFile, config.ProjectSettingsFile, utils) if err != nil { return nil, err diff --git a/cmd/detectExecuteScan_generated.go b/cmd/detectExecuteScan_generated.go index 6219a02437..0e30055569 100644 --- a/cmd/detectExecuteScan_generated.go +++ b/cmd/detectExecuteScan_generated.go @@ -57,6 +57,8 @@ type detectExecuteScanOptions struct { FailOnSevereVulnerabilities bool `json:"failOnSevereVulnerabilities,omitempty"` BuildTool string `json:"buildTool,omitempty"` ExcludedDirectories []string `json:"excludedDirectories,omitempty"` + NpmDependencyTypesExcluded []string `json:"npmDependencyTypesExcluded,omitempty" validate:"possible-values=NONE DEV PEER"` + NpmArguments []string `json:"npmArguments,omitempty"` } type detectExecuteScanInflux struct { @@ -289,6 +291,8 @@ func addDetectExecuteScanFlags(cmd *cobra.Command, stepConfig *detectExecuteScan cmd.Flags().BoolVar(&stepConfig.FailOnSevereVulnerabilities, "failOnSevereVulnerabilities", true, "Whether to fail the step on severe vulnerabilties or not") cmd.Flags().StringVar(&stepConfig.BuildTool, "buildTool", os.Getenv("PIPER_buildTool"), "Defines the tool which is used for building the artifact.") cmd.Flags().StringSliceVar(&stepConfig.ExcludedDirectories, "excludedDirectories", []string{}, "List of directories which should be excluded from the scan.") + cmd.Flags().StringSliceVar(&stepConfig.NpmDependencyTypesExcluded, "npmDependencyTypesExcluded", []string{}, "List of npm dependency types which Detect should exclude from the BOM.") + cmd.Flags().StringSliceVar(&stepConfig.NpmArguments, "npmArguments", []string{}, "List of additional arguments that Detect will add at then end of the npm ls command line when Detect executes the NPM CLI Detector on an NPM project.") cmd.MarkFlagRequired("token") cmd.MarkFlagRequired("projectName") @@ -676,6 +680,24 @@ func detectExecuteScanMetadata() config.StepData { Aliases: []config.Alias{{Name: "detect/excludedDirectories"}}, Default: []string{}, }, + { + Name: "npmDependencyTypesExcluded", + ResourceRef: []config.ResourceReference{}, + Scope: []string{"PARAMETERS", "STAGES", "STEPS"}, + Type: "[]string", + Mandatory: false, + Aliases: []config.Alias{{Name: "detect/npmDependencyTypesExcluded"}}, + Default: []string{}, + }, + { + Name: "npmArguments", + ResourceRef: []config.ResourceReference{}, + Scope: []string{"PARAMETERS", "STAGES", "STEPS"}, + Type: "[]string", + Mandatory: false, + Aliases: []config.Alias{{Name: "detect/npmArguments"}}, + Default: []string{}, + }, }, }, Containers: []config.Container{ diff --git a/resources/metadata/detectExecuteScan.yaml b/resources/metadata/detectExecuteScan.yaml index 1fca67e59f..25cf4cf102 100644 --- a/resources/metadata/detectExecuteScan.yaml +++ b/resources/metadata/detectExecuteScan.yaml @@ -427,6 +427,30 @@ spec: - PARAMETERS - STAGES - STEPS + - name: npmDependencyTypesExcluded + description: + "List of npm dependency types which Detect should exclude from the BOM." + aliases: + - name: detect/npmDependencyTypesExcluded + type: "[]string" + scope: + - PARAMETERS + - STAGES + - STEPS + possibleValues: + - NONE + - DEV + - PEER + - name: npmArguments + description: + "List of additional arguments that Detect will add at then end of the npm ls command line when Detect executes the NPM CLI Detector on an NPM project." + aliases: + - name: detect/npmArguments + type: "[]string" + scope: + - PARAMETERS + - STAGES + - STEPS outputs: resources: - name: influx From e3935ca088fc05d93b32f7e1ad92daf7eef8e94c Mon Sep 17 00:00:00 2001 From: Jordi van Liempt <35920075+jliempt@users.noreply.github.com> Date: Mon, 17 Apr 2023 08:35:13 +0200 Subject: [PATCH 15/23] feat(vault): Vault secret rotation for GH Actions (#4280) * rotate Vault secret on GH Actions * test alternative sodium package * try doing it without libsodium * disable validity check for testing purposes * basic unit test * re-enable secret validity check * tidy * tidy parameters * forgot to update param names in code * apply review feedback * improve error logging * update step metadata * apply metadata suggestion from review Co-authored-by: Christopher Fenner <26137398+CCFenner@users.noreply.github.com> * align githubToken param * Fix secretStore * Add alias for githubToken * Move logic to separate file --------- Co-authored-by: I557621 Co-authored-by: Christopher Fenner <26137398+CCFenner@users.noreply.github.com> Co-authored-by: Vyacheslav Starostin --- cmd/vaultRotateSecretId.go | 28 +++++++++ cmd/vaultRotateSecretId_generated.go | 65 ++++++++++++++++++++- go.mod | 2 +- pkg/github/secret.go | 39 +++++++++++++ pkg/github/secret_test.go | 30 ++++++++++ resources/metadata/vaultRotateSecretId.yaml | 55 ++++++++++++++++- 6 files changed, 215 insertions(+), 4 deletions(-) create mode 100644 pkg/github/secret.go create mode 100644 pkg/github/secret_test.go diff --git a/cmd/vaultRotateSecretId.go b/cmd/vaultRotateSecretId.go index f6f460ea33..5f2f9a0b88 100644 --- a/cmd/vaultRotateSecretId.go +++ b/cmd/vaultRotateSecretId.go @@ -9,6 +9,7 @@ import ( "github.com/hashicorp/vault/api" "github.com/SAP/jenkins-library/pkg/ado" + "github.com/SAP/jenkins-library/pkg/github" "github.com/SAP/jenkins-library/pkg/jenkins" "github.com/SAP/jenkins-library/pkg/vault" @@ -131,6 +132,33 @@ func writeVaultSecretIDToStore(config *vaultRotateSecretIdOptions, secretID stri log.Entry().Warn("Could not write secret ID back to Azure DevOps") return err } + case "github": + // Additional info: + // https://github.com/google/go-github/blob/master/example/newreposecretwithxcrypto/main.go + + ctx, client, err := github.NewClient(config.GithubToken, config.GithubAPIURL, "", []string{}) + if err != nil { + log.Entry().Warnf("Could not write secret ID back to GitHub Actions: GitHub client not created: %v", err) + return err + } + + publicKey, _, err := client.Actions.GetRepoPublicKey(ctx, config.Owner, config.Repository) + if err != nil { + log.Entry().Warnf("Could not write secret ID back to GitHub Actions: repository's public key not retrieved: %v", err) + return err + } + + encryptedSecret, err := github.CreateEncryptedSecret(config.VaultAppRoleSecretTokenCredentialsID, secretID, publicKey) + if err != nil { + log.Entry().Warnf("Could not write secret ID back to GitHub Actions: secret encryption failed: %v", err) + return err + } + + _, err = client.Actions.CreateOrUpdateRepoSecret(ctx, config.Owner, config.Repository, encryptedSecret) + if err != nil { + log.Entry().Warnf("Could not write secret ID back to GitHub Actions: submission to GitHub failed: %v", err) + return err + } default: return fmt.Errorf("error: invalid secret store: %s", config.SecretStore) } diff --git a/cmd/vaultRotateSecretId_generated.go b/cmd/vaultRotateSecretId_generated.go index 88b6c1b30b..44cfde2f11 100644 --- a/cmd/vaultRotateSecretId_generated.go +++ b/cmd/vaultRotateSecretId_generated.go @@ -16,7 +16,7 @@ import ( ) type vaultRotateSecretIdOptions struct { - SecretStore string `json:"secretStore,omitempty" validate:"possible-values=jenkins ado"` + SecretStore string `json:"secretStore,omitempty" validate:"possible-values=jenkins ado github"` JenkinsURL string `json:"jenkinsUrl,omitempty"` JenkinsCredentialDomain string `json:"jenkinsCredentialDomain,omitempty"` JenkinsUsername string `json:"jenkinsUsername,omitempty"` @@ -29,6 +29,10 @@ type vaultRotateSecretIdOptions struct { AdoPersonalAccessToken string `json:"adoPersonalAccessToken,omitempty" validate:"required_if=SecretStore ado"` AdoProject string `json:"adoProject,omitempty"` AdoPipelineID int `json:"adoPipelineId,omitempty"` + GithubToken string `json:"githubToken,omitempty" validate:"required_if=SecretStore github"` + GithubAPIURL string `json:"githubApiUrl,omitempty"` + Owner string `json:"owner,omitempty"` + Repository string `json:"repository,omitempty"` } // VaultRotateSecretIdCommand Rotate Vault AppRole Secret ID @@ -66,6 +70,7 @@ func VaultRotateSecretIdCommand() *cobra.Command { log.RegisterSecret(stepConfig.JenkinsUsername) log.RegisterSecret(stepConfig.JenkinsToken) log.RegisterSecret(stepConfig.AdoPersonalAccessToken) + log.RegisterSecret(stepConfig.GithubToken) if len(GeneralConfig.HookConfig.SentryConfig.Dsn) > 0 { sentryHook := log.NewSentryHook(GeneralConfig.HookConfig.SentryConfig.Dsn, GeneralConfig.CorrelationID) @@ -133,7 +138,7 @@ func addVaultRotateSecretIdFlags(cmd *cobra.Command, stepConfig *vaultRotateSecr cmd.Flags().StringVar(&stepConfig.JenkinsCredentialDomain, "jenkinsCredentialDomain", `_`, "The jenkins credential domain which should be used") cmd.Flags().StringVar(&stepConfig.JenkinsUsername, "jenkinsUsername", os.Getenv("PIPER_jenkinsUsername"), "The jenkins username") cmd.Flags().StringVar(&stepConfig.JenkinsToken, "jenkinsToken", os.Getenv("PIPER_jenkinsToken"), "The jenkins token") - cmd.Flags().StringVar(&stepConfig.VaultAppRoleSecretTokenCredentialsID, "vaultAppRoleSecretTokenCredentialsId", os.Getenv("PIPER_vaultAppRoleSecretTokenCredentialsId"), "The Jenkins credential ID or Azure DevOps variable name for the Vault AppRole Secret ID credential") + cmd.Flags().StringVar(&stepConfig.VaultAppRoleSecretTokenCredentialsID, "vaultAppRoleSecretTokenCredentialsId", os.Getenv("PIPER_vaultAppRoleSecretTokenCredentialsId"), "The Jenkins credential ID, Azure DevOps variable name, or GitHub Actions secret name for the Vault AppRole Secret ID credential") cmd.Flags().StringVar(&stepConfig.VaultServerURL, "vaultServerUrl", os.Getenv("PIPER_vaultServerUrl"), "The URL for the Vault server to use") cmd.Flags().StringVar(&stepConfig.VaultNamespace, "vaultNamespace", os.Getenv("PIPER_vaultNamespace"), "The Vault namespace that should be used (optional)") cmd.Flags().IntVar(&stepConfig.DaysBeforeExpiry, "daysBeforeExpiry", 15, "The amount of days before expiry until the secret ID gets rotated") @@ -141,6 +146,10 @@ func addVaultRotateSecretIdFlags(cmd *cobra.Command, stepConfig *vaultRotateSecr cmd.Flags().StringVar(&stepConfig.AdoPersonalAccessToken, "adoPersonalAccessToken", os.Getenv("PIPER_adoPersonalAccessToken"), "The Azure DevOps personal access token") cmd.Flags().StringVar(&stepConfig.AdoProject, "adoProject", os.Getenv("PIPER_adoProject"), "The Azure DevOps project ID. Project name also can be used") cmd.Flags().IntVar(&stepConfig.AdoPipelineID, "adoPipelineId", 0, "The Azure DevOps pipeline ID. Also called as definition ID") + cmd.Flags().StringVar(&stepConfig.GithubToken, "githubToken", os.Getenv("PIPER_githubToken"), "GitHub personal access token as per https://help.github.com/en/github/authenticating-to-github/creating-a-personal-access-token-for-the-command-line with the scope 'repo'") + cmd.Flags().StringVar(&stepConfig.GithubAPIURL, "githubApiUrl", `https://api.github.com`, "Set the GitHub API URL that corresponds to the pipeline repository") + cmd.Flags().StringVar(&stepConfig.Owner, "owner", os.Getenv("PIPER_owner"), "Owner of the pipeline GitHub repository") + cmd.Flags().StringVar(&stepConfig.Repository, "repository", os.Getenv("PIPER_repository"), "Name of the pipeline GitHub repository") cmd.MarkFlagRequired("vaultAppRoleSecretTokenCredentialsId") cmd.MarkFlagRequired("vaultServerUrl") @@ -298,6 +307,58 @@ func vaultRotateSecretIdMetadata() config.StepData { Aliases: []config.Alias{}, Default: 0, }, + { + Name: "githubToken", + ResourceRef: []config.ResourceReference{ + { + Name: "githubVaultSecretName", + Type: "vaultSecret", + Default: "github", + }, + }, + Scope: []string{"GENERAL", "PARAMETERS", "STAGES", "STEPS"}, + Type: "string", + Mandatory: false, + Aliases: []config.Alias{{Name: "access_token"}, {Name: "token"}}, + Default: os.Getenv("PIPER_githubToken"), + }, + { + Name: "githubApiUrl", + ResourceRef: []config.ResourceReference{}, + Scope: []string{"GENERAL", "PARAMETERS", "STAGES", "STEPS"}, + Type: "string", + Mandatory: false, + Aliases: []config.Alias{}, + Default: `https://api.github.com`, + }, + { + Name: "owner", + ResourceRef: []config.ResourceReference{ + { + Name: "commonPipelineEnvironment", + Param: "github/owner", + }, + }, + Scope: []string{"GENERAL", "PARAMETERS", "STAGES", "STEPS"}, + Type: "string", + Mandatory: false, + Aliases: []config.Alias{}, + Default: os.Getenv("PIPER_owner"), + }, + { + Name: "repository", + ResourceRef: []config.ResourceReference{ + { + Name: "commonPipelineEnvironment", + Param: "github/repository", + }, + }, + Scope: []string{"GENERAL", "PARAMETERS", "STAGES", "STEPS"}, + Type: "string", + Mandatory: false, + Aliases: []config.Alias{}, + Default: os.Getenv("PIPER_repository"), + }, }, }, }, diff --git a/go.mod b/go.mod index d0245c9a2f..50e48dc45f 100644 --- a/go.mod +++ b/go.mod @@ -303,7 +303,7 @@ require ( go.opencensus.io v0.23.0 // indirect go.starlark.net v0.0.0-20200306205701-8dd3e2ee1dd5 // indirect go.uber.org/atomic v1.9.0 // indirect - golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d // indirect + golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d golang.org/x/net v0.7.0 // indirect golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4 golang.org/x/sys v0.5.0 // indirect diff --git a/pkg/github/secret.go b/pkg/github/secret.go new file mode 100644 index 0000000000..7a29813ea5 --- /dev/null +++ b/pkg/github/secret.go @@ -0,0 +1,39 @@ +package github + +import ( + "crypto/rand" + "encoding/base64" + + "github.com/google/go-github/v45/github" + "golang.org/x/crypto/nacl/box" + + "github.com/SAP/jenkins-library/pkg/log" +) + +// CreateEncryptedSecret creates an encrypted secret using a public key from a GitHub repository, which can be sent through the GitHub API +// https://github.com/google/go-github/blob/master/example/newreposecretwithxcrypto/main.go +func CreateEncryptedSecret(secretName, secretValue string, publicKey *github.PublicKey) (*github.EncryptedSecret, error) { + decodedPublicKey, err := base64.StdEncoding.DecodeString(publicKey.GetKey()) + if err != nil { + log.Entry().Warn("Could not decode public key from base64") + return nil, err + } + + var boxKey [32]byte + copy(boxKey[:], decodedPublicKey) + secretBytes := []byte(secretValue) + encryptedSecretBytes, err := box.SealAnonymous([]byte{}, secretBytes, &boxKey, rand.Reader) + if err != nil { + log.Entry().Warn("Could not encrypt secret using public key") + return nil, err + } + + encryptedSecretString := base64.StdEncoding.EncodeToString(encryptedSecretBytes) + + githubSecret := &github.EncryptedSecret{ + Name: secretName, + KeyID: publicKey.GetKeyID(), + EncryptedValue: encryptedSecretString, + } + return githubSecret, nil +} diff --git a/pkg/github/secret_test.go b/pkg/github/secret_test.go new file mode 100644 index 0000000000..66c64e45d1 --- /dev/null +++ b/pkg/github/secret_test.go @@ -0,0 +1,30 @@ +package github + +import ( + "encoding/base64" + "testing" + + "github.com/google/go-github/v45/github" + "github.com/stretchr/testify/assert" +) + +func TestRunGithubCreateEncryptedSecret(t *testing.T) { + t.Parallel() + + t.Run("Success", func(t *testing.T) { + mockKeyID := "1" + mockB64Key := base64.StdEncoding.EncodeToString([]byte("testPublicKey")) + mockPubKey := github.PublicKey{KeyID: &mockKeyID, Key: &mockB64Key} + + mockName := "testSecret" + mockValue := "testValue" + + // test + githubSecret, err := CreateEncryptedSecret(mockName, mockValue, &mockPubKey) + + // assert + assert.NoError(t, err) + assert.Equal(t, mockName, githubSecret.Name) + assert.Equal(t, mockKeyID, githubSecret.KeyID) + }) +} diff --git a/resources/metadata/vaultRotateSecretId.yaml b/resources/metadata/vaultRotateSecretId.yaml index aa24fb27de..4c8880b53f 100644 --- a/resources/metadata/vaultRotateSecretId.yaml +++ b/resources/metadata/vaultRotateSecretId.yaml @@ -16,6 +16,7 @@ spec: possibleValues: - jenkins - ado + - github - name: jenkinsUrl type: string description: "The jenkins url" @@ -68,7 +69,7 @@ spec: default: jenkins - name: vaultAppRoleSecretTokenCredentialsId type: string - description: The Jenkins credential ID or Azure DevOps variable name for the Vault AppRole Secret ID credential + description: The Jenkins credential ID, Azure DevOps variable name, or GitHub Actions secret name for the Vault AppRole Secret ID credential scope: - GENERAL - PARAMETERS @@ -139,3 +140,55 @@ spec: - STAGES - STEPS description: The Azure DevOps pipeline ID. Also called as definition ID + - name: githubToken + aliases: + - name: access_token + - name: token + type: string + scope: + - GENERAL + - PARAMETERS + - STAGES + - STEPS + description: "GitHub personal access token as per + https://help.github.com/en/github/authenticating-to-github/creating-a-personal-access-token-for-the-command-line + with the scope 'repo'" + secret: true + mandatoryIf: + - name: secretStore + value: github + resourceRef: + - type: vaultSecret + default: github + name: githubVaultSecretName + - name: githubApiUrl + description: Set the GitHub API URL that corresponds to the pipeline repository + scope: + - GENERAL + - PARAMETERS + - STAGES + - STEPS + type: string + default: "https://api.github.com" + - name: owner + description: Owner of the pipeline GitHub repository + resourceRef: + - name: commonPipelineEnvironment + param: github/owner + scope: + - GENERAL + - PARAMETERS + - STAGES + - STEPS + type: string + - name: repository + description: Name of the pipeline GitHub repository + resourceRef: + - name: commonPipelineEnvironment + param: github/repository + scope: + - GENERAL + - PARAMETERS + - STAGES + - STEPS + type: string From 4f4e667797696bf687a8ec2fd01b017701efbca4 Mon Sep 17 00:00:00 2001 From: Maurice Breit Date: Tue, 18 Apr 2023 08:56:32 +0200 Subject: [PATCH 16/23] add ascAppUpload step (#4286) * add ascAppUpload step add step that uploads an app binary to Application Support Center (ASC) * re-run go generate * fix typo in CODEOWNERS * Update CODEOWNERS --- .github/CODEOWNERS | 2 + cmd/ascAppUpload.go | 106 +++++++++++ cmd/ascAppUpload_generated.go | 253 +++++++++++++++++++++++++++ cmd/ascAppUpload_generated_test.go | 17 ++ cmd/ascAppUpload_test.go | 185 ++++++++++++++++++++ cmd/integrationArtifactTransport.go | 10 +- cmd/metadata_generated.go | 1 + cmd/piper.go | 1 + pkg/asc/asc.go | 198 +++++++++++++++++++++ resources/metadata/ascAppUpload.yaml | 93 ++++++++++ 10 files changed, 861 insertions(+), 5 deletions(-) create mode 100644 cmd/ascAppUpload.go create mode 100644 cmd/ascAppUpload_generated.go create mode 100644 cmd/ascAppUpload_generated_test.go create mode 100644 cmd/ascAppUpload_test.go create mode 100644 pkg/asc/asc.go create mode 100644 resources/metadata/ascAppUpload.yaml diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index f4dcf9f843..76178c5890 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -60,6 +60,8 @@ AbapAddonAssemblyKit* @SAP/jenkins-library-abap abapEnvironment* @SAP/jenkins-library-abap AbapEnvironment* @SAP/jenkins-library-abap +ascAppUpload* @Mau04 @inf2381 +AscAppUpload* @Mau04 @inf2381 ##################### # Integration tests # diff --git a/cmd/ascAppUpload.go b/cmd/ascAppUpload.go new file mode 100644 index 0000000000..5c37659952 --- /dev/null +++ b/cmd/ascAppUpload.go @@ -0,0 +1,106 @@ +package cmd + +import ( + "github.com/SAP/jenkins-library/pkg/asc" + "github.com/SAP/jenkins-library/pkg/command" + piperHttp "github.com/SAP/jenkins-library/pkg/http" + "github.com/SAP/jenkins-library/pkg/log" + "github.com/SAP/jenkins-library/pkg/piperutils" + "github.com/SAP/jenkins-library/pkg/telemetry" + "github.com/pkg/errors" +) + +type ascAppUploadUtils interface { + command.ExecRunner +} + +type ascAppUploadUtilsBundle struct { + *command.Command + *piperutils.Files +} + +func newAscAppUploadUtils() ascAppUploadUtils { + utils := ascAppUploadUtilsBundle{ + Command: &command.Command{}, + Files: &piperutils.Files{}, + } + // Reroute command output to logging framework + utils.Stdout(log.Writer()) + utils.Stderr(log.Writer()) + return &utils +} + +func ascAppUpload(config ascAppUploadOptions, telemetryData *telemetry.CustomData) { + utils := newAscAppUploadUtils() + client := &piperHttp.Client{} + + ascClient, err := asc.NewSystemInstance(client, config.ServerURL, config.AppToken) + if err != nil { + log.Entry().WithError(err).Fatalf("Failed to create ASC client talking to URL %v", config.ServerURL) + } else { + err = runAscAppUpload(&config, telemetryData, utils, ascClient) + } + + if err != nil { + log.Entry().WithError(err).Fatal("step execution failed") + } +} + +func runAscAppUpload(config *ascAppUploadOptions, telemetryData *telemetry.CustomData, utils ascAppUploadUtils, ascClient asc.System) error { + + if len(config.JamfTargetSystem) == 0 { + return errors.Errorf("jamfTargetSystem must be set") + } + + log.Entry().Infof("Collect data to create new release in ASC") + + app, err := ascClient.GetAppById(config.AppID) + if err != nil { + log.SetErrorCategory(log.ErrorConfiguration) + return errors.Wrapf(err, "failed to get app information") + } + + log.Entry().Debugf("Found App with name %v", app.AppName) + + log.Entry().Infof("Create release for %v in ASC (AppID %v)", app.AppName, app.AppId) + + releaseResponse, err := ascClient.CreateRelease(app.AppId, config.ReleaseAppVersion, config.ReleaseDescription, config.ReleaseDate, config.ReleaseVisible) + + if err != nil { + log.SetErrorCategory(log.ErrorService) + return errors.Wrapf(err, "failed to create release") + } + + if releaseResponse.Status != "success" { + log.SetErrorCategory(log.ErrorService) + return errors.New(releaseResponse.Message) + } + + log.Entry().Infof("Collect data to upload app to ASC & Jamf") + + jamfAppInformationResponse, err := ascClient.GetJamfAppInfo(app.BundleId, config.JamfTargetSystem) + if err != nil { + log.SetErrorCategory(log.ErrorService) + return errors.Wrapf(err, "failed to get jamf app info") + } + + jamfAppId := jamfAppInformationResponse.MobileDeviceApplication.General.Id + + if jamfAppId == 0 { + return errors.Errorf("failed to get jamf app id") + } + + log.Entry().Debugf("Got Jamf info for app %v, jamfId: %v", app.AppName, jamfAppId) + + log.Entry().Infof("Upload ipa %v to ASC & Jamf", config.FilePath) + + err = ascClient.UploadIpa(config.FilePath, jamfAppId, config.JamfTargetSystem, app.BundleId, releaseResponse.Data) + if err != nil { + log.SetErrorCategory(log.ErrorService) + return errors.Wrapf(err, "failed to upload ipa") + } + + log.Entry().Infof("Successfully uploaded %v to ASC (AppId %v) & Jamf (Id %v)", config.FilePath, app.AppId, jamfAppId) + + return nil +} diff --git a/cmd/ascAppUpload_generated.go b/cmd/ascAppUpload_generated.go new file mode 100644 index 0000000000..0550ff8f16 --- /dev/null +++ b/cmd/ascAppUpload_generated.go @@ -0,0 +1,253 @@ +// Code generated by piper's step-generator. DO NOT EDIT. + +package cmd + +import ( + "fmt" + "os" + "time" + + "github.com/SAP/jenkins-library/pkg/config" + "github.com/SAP/jenkins-library/pkg/log" + "github.com/SAP/jenkins-library/pkg/splunk" + "github.com/SAP/jenkins-library/pkg/telemetry" + "github.com/SAP/jenkins-library/pkg/validation" + "github.com/spf13/cobra" +) + +type ascAppUploadOptions struct { + ServerURL string `json:"serverUrl,omitempty"` + AppToken string `json:"appToken,omitempty"` + AppID string `json:"appId,omitempty"` + FilePath string `json:"filePath,omitempty"` + JamfTargetSystem string `json:"jamfTargetSystem,omitempty"` + ReleaseAppVersion string `json:"releaseAppVersion,omitempty"` + ReleaseDescription string `json:"releaseDescription,omitempty"` + ReleaseDate string `json:"releaseDate,omitempty"` + ReleaseVisible bool `json:"releaseVisible,omitempty"` +} + +// AscAppUploadCommand Upload an app to ASC +func AscAppUploadCommand() *cobra.Command { + const STEP_NAME = "ascAppUpload" + + metadata := ascAppUploadMetadata() + var stepConfig ascAppUploadOptions + var startTime time.Time + var logCollector *log.CollectorHook + var splunkClient *splunk.Splunk + telemetryClient := &telemetry.Telemetry{} + + var createAscAppUploadCmd = &cobra.Command{ + Use: STEP_NAME, + Short: "Upload an app to ASC", + Long: `With this step you can upload an app to ASC. +It creates a new release note in ASC and uploads the binary to ASC and therewith to Jamf. +For more information about ASC, check out [Application Support Center](https://github.com/SAP/application-support-center).`, + PreRunE: func(cmd *cobra.Command, _ []string) error { + startTime = time.Now() + log.SetStepName(STEP_NAME) + log.SetVerbose(GeneralConfig.Verbose) + + GeneralConfig.GitHubAccessTokens = ResolveAccessTokens(GeneralConfig.GitHubTokens) + + path, _ := os.Getwd() + fatalHook := &log.FatalHook{CorrelationID: GeneralConfig.CorrelationID, Path: path} + log.RegisterHook(fatalHook) + + err := PrepareConfig(cmd, &metadata, STEP_NAME, &stepConfig, config.OpenPiperFile) + if err != nil { + log.SetErrorCategory(log.ErrorConfiguration) + return err + } + log.RegisterSecret(stepConfig.AppToken) + + if len(GeneralConfig.HookConfig.SentryConfig.Dsn) > 0 { + sentryHook := log.NewSentryHook(GeneralConfig.HookConfig.SentryConfig.Dsn, GeneralConfig.CorrelationID) + log.RegisterHook(&sentryHook) + } + + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + splunkClient = &splunk.Splunk{} + logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} + log.RegisterHook(logCollector) + } + + if err = log.RegisterANSHookIfConfigured(GeneralConfig.CorrelationID); err != nil { + log.Entry().WithError(err).Warn("failed to set up SAP Alert Notification Service log hook") + } + + validation, err := validation.New(validation.WithJSONNamesForStructFields(), validation.WithPredefinedErrorMessages()) + if err != nil { + return err + } + if err = validation.ValidateStruct(stepConfig); err != nil { + log.SetErrorCategory(log.ErrorConfiguration) + return err + } + + return nil + }, + Run: func(_ *cobra.Command, _ []string) { + stepTelemetryData := telemetry.CustomData{} + stepTelemetryData.ErrorCode = "1" + handler := func() { + config.RemoveVaultSecretFiles() + stepTelemetryData.Duration = fmt.Sprintf("%v", time.Since(startTime).Milliseconds()) + stepTelemetryData.ErrorCategory = log.GetErrorCategory().String() + stepTelemetryData.PiperCommitHash = GitCommit + telemetryClient.SetData(&stepTelemetryData) + telemetryClient.Send() + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + splunkClient.Send(telemetryClient.GetData(), logCollector) + } + } + log.DeferExitHandler(handler) + defer handler() + telemetryClient.Initialize(GeneralConfig.NoTelemetry, STEP_NAME) + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + splunkClient.Initialize(GeneralConfig.CorrelationID, + GeneralConfig.HookConfig.SplunkConfig.Dsn, + GeneralConfig.HookConfig.SplunkConfig.Token, + GeneralConfig.HookConfig.SplunkConfig.Index, + GeneralConfig.HookConfig.SplunkConfig.SendLogs) + } + ascAppUpload(stepConfig, &stepTelemetryData) + stepTelemetryData.ErrorCode = "0" + log.Entry().Info("SUCCESS") + }, + } + + addAscAppUploadFlags(createAscAppUploadCmd, &stepConfig) + return createAscAppUploadCmd +} + +func addAscAppUploadFlags(cmd *cobra.Command, stepConfig *ascAppUploadOptions) { + cmd.Flags().StringVar(&stepConfig.ServerURL, "serverUrl", os.Getenv("PIPER_serverUrl"), "The URL to the ASC backend") + cmd.Flags().StringVar(&stepConfig.AppToken, "appToken", os.Getenv("PIPER_appToken"), "App token used to authenticate with the ASC backend") + cmd.Flags().StringVar(&stepConfig.AppID, "appId", os.Getenv("PIPER_appId"), "The app ID in ASC") + cmd.Flags().StringVar(&stepConfig.FilePath, "filePath", os.Getenv("PIPER_filePath"), "The path to the app binary") + cmd.Flags().StringVar(&stepConfig.JamfTargetSystem, "jamfTargetSystem", os.Getenv("PIPER_jamfTargetSystem"), "The jamf target system") + cmd.Flags().StringVar(&stepConfig.ReleaseAppVersion, "releaseAppVersion", `Pending Release`, "The new app version name to be created in ASC") + cmd.Flags().StringVar(&stepConfig.ReleaseDescription, "releaseDescription", `

TBD

`, "The new release description") + cmd.Flags().StringVar(&stepConfig.ReleaseDate, "releaseDate", os.Getenv("PIPER_releaseDate"), "The new release date (Format: MM/DD/YYYY) Default is the current date") + cmd.Flags().BoolVar(&stepConfig.ReleaseVisible, "releaseVisible", false, "The new release visible flag") + + cmd.MarkFlagRequired("serverUrl") + cmd.MarkFlagRequired("appId") + cmd.MarkFlagRequired("filePath") + cmd.MarkFlagRequired("jamfTargetSystem") +} + +// retrieve step metadata +func ascAppUploadMetadata() config.StepData { + var theMetaData = config.StepData{ + Metadata: config.StepMetadata{ + Name: "ascAppUpload", + Aliases: []config.Alias{}, + Description: "Upload an app to ASC", + }, + Spec: config.StepSpec{ + Inputs: config.StepInputs{ + Secrets: []config.StepSecrets{ + {Name: "ascAppTokenCredentialsId", Description: "Jenkins secret text credential ID containing the authentication token for the ASC app", Type: "jenkins"}, + }, + Parameters: []config.StepParameters{ + { + Name: "serverUrl", + ResourceRef: []config.ResourceReference{}, + Scope: []string{"PARAMETERS", "STAGES", "STEPS"}, + Type: "string", + Mandatory: true, + Aliases: []config.Alias{{Name: "ascServerUrl"}}, + Default: os.Getenv("PIPER_serverUrl"), + }, + { + Name: "appToken", + ResourceRef: []config.ResourceReference{ + { + Name: "ascVaultSecretName", + Type: "vaultSecret", + Default: "asc", + }, + + { + Name: "ascAppTokenCredentialsId", + Type: "secret", + }, + }, + Scope: []string{"PARAMETERS"}, + Type: "string", + Mandatory: false, + Aliases: []config.Alias{{Name: "ascAppToken"}}, + Default: os.Getenv("PIPER_appToken"), + }, + { + Name: "appId", + ResourceRef: []config.ResourceReference{}, + Scope: []string{"PARAMETERS", "STAGES", "STEPS"}, + Type: "string", + Mandatory: true, + Aliases: []config.Alias{}, + Default: os.Getenv("PIPER_appId"), + }, + { + Name: "filePath", + ResourceRef: []config.ResourceReference{}, + Scope: []string{"PARAMETERS", "STAGES", "STEPS"}, + Type: "string", + Mandatory: true, + Aliases: []config.Alias{}, + Default: os.Getenv("PIPER_filePath"), + }, + { + Name: "jamfTargetSystem", + ResourceRef: []config.ResourceReference{}, + Scope: []string{"PARAMETERS", "STAGES", "STEPS"}, + Type: "string", + Mandatory: true, + Aliases: []config.Alias{}, + Default: os.Getenv("PIPER_jamfTargetSystem"), + }, + { + Name: "releaseAppVersion", + ResourceRef: []config.ResourceReference{}, + Scope: []string{"PARAMETERS", "STAGES", "STEPS"}, + Type: "string", + Mandatory: false, + Aliases: []config.Alias{}, + Default: `Pending Release`, + }, + { + Name: "releaseDescription", + ResourceRef: []config.ResourceReference{}, + Scope: []string{"PARAMETERS", "STAGES", "STEPS"}, + Type: "string", + Mandatory: false, + Aliases: []config.Alias{}, + Default: `

TBD

`, + }, + { + Name: "releaseDate", + ResourceRef: []config.ResourceReference{}, + Scope: []string{"PARAMETERS", "STAGES", "STEPS"}, + Type: "string", + Mandatory: false, + Aliases: []config.Alias{}, + Default: os.Getenv("PIPER_releaseDate"), + }, + { + Name: "releaseVisible", + ResourceRef: []config.ResourceReference{}, + Scope: []string{"PARAMETERS", "STAGES", "STEPS"}, + Type: "bool", + Mandatory: false, + Aliases: []config.Alias{}, + Default: false, + }, + }, + }, + }, + } + return theMetaData +} diff --git a/cmd/ascAppUpload_generated_test.go b/cmd/ascAppUpload_generated_test.go new file mode 100644 index 0000000000..aabb64b74f --- /dev/null +++ b/cmd/ascAppUpload_generated_test.go @@ -0,0 +1,17 @@ +package cmd + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestAscAppUploadCommand(t *testing.T) { + t.Parallel() + + testCmd := AscAppUploadCommand() + + // only high level testing performed - details are tested in step generation procedure + assert.Equal(t, "ascAppUpload", testCmd.Use, "command name incorrect") + +} diff --git a/cmd/ascAppUpload_test.go b/cmd/ascAppUpload_test.go new file mode 100644 index 0000000000..95ad8d6c13 --- /dev/null +++ b/cmd/ascAppUpload_test.go @@ -0,0 +1,185 @@ +package cmd + +import ( + "fmt" + "github.com/SAP/jenkins-library/pkg/asc" + "github.com/SAP/jenkins-library/pkg/mock" + "github.com/pkg/errors" + "github.com/stretchr/testify/assert" + "testing" + "time" +) + +type ascAppUploadMockUtils struct { + *mock.ExecMockRunner + *mock.FilesMock +} + +func newAscAppUploadTestsUtils() ascAppUploadMockUtils { + utils := ascAppUploadMockUtils{ + ExecMockRunner: &mock.ExecMockRunner{}, + FilesMock: &mock.FilesMock{}, + } + return utils +} + +type ascSystemMock struct { + app asc.App + appError error + createReleaseResponse asc.CreateReleaseResponse + createReleaseResponseError error + jamfAppInfo asc.JamfAppInformationResponse + jamfAppInfoError error + uploadIpaError error +} + +func (sys *ascSystemMock) GetAppById(appId string) (asc.App, error) { + return sys.app, sys.appError +} + +func (sys *ascSystemMock) CreateRelease(ascAppId int, version string, description string, releaseDate string, visible bool) (asc.CreateReleaseResponse, error) { + return sys.createReleaseResponse, sys.createReleaseResponseError +} + +func (sys *ascSystemMock) GetJamfAppInfo(bundleId string, jamfTargetSystem string) (asc.JamfAppInformationResponse, error) { + return sys.jamfAppInfo, sys.jamfAppInfoError +} + +func (sys *ascSystemMock) UploadIpa(path string, jamfAppId int, jamfTargetSystem string, bundleId string, ascRelease asc.Release) error { + return sys.uploadIpaError +} + +func TestRunAscAppUpload(t *testing.T) { + t.Parallel() + + t.Run("succesfull upload", func(t *testing.T) { + t.Parallel() + // init + config := ascAppUploadOptions{ + FilePath: "./sample-app.ipa", + JamfTargetSystem: "test", + AppID: "1", + } + + utils := newAscAppUploadTestsUtils() + utils.AddFile("sample-app.ipa", []byte("dummy content")) + + ascClient := &ascSystemMock{ + app: asc.App{ + AppId: 1, + AppName: "Sample App", + BundleId: "sample.bundle.id", + JamfId: "1", + }, + createReleaseResponse: asc.CreateReleaseResponse{ + Status: "success", + Data: asc.Release{ReleaseID: 1, AppID: 1, Version: "version", Description: "description", ReleaseDate: time.Now(), Visible: true}, + }, + jamfAppInfo: asc.JamfAppInformationResponse{ + MobileDeviceApplication: asc.JamfMobileDeviceApplication{ + General: asc.JamfMobileDeviceApplicationGeneral{ + Id: 1, + }, + }, + }, + } + // test + err := runAscAppUpload(&config, nil, utils, ascClient) + + // assert + assert.NoError(t, err) + }) + + t.Run("error during release creation", func(t *testing.T) { + t.Parallel() + // init + config := ascAppUploadOptions{ + FilePath: "./sample-app.ipa", + JamfTargetSystem: "test", + AppID: "1", + } + + utils := newAscAppUploadTestsUtils() + + errorMessage := "Error while creating release" + + ascClient := &ascSystemMock{ + app: asc.App{ + AppId: 1, + AppName: "Sample App", + BundleId: "sample.bundle.id", + JamfId: "1", + }, + createReleaseResponse: asc.CreateReleaseResponse{Status: "failure", Message: errorMessage}, + } + // test + err := runAscAppUpload(&config, nil, utils, ascClient) + + // assert + assert.EqualError(t, err, errorMessage) + }) + + t.Run("error while fetching jamf app info", func(t *testing.T) { + t.Parallel() + // init + config := ascAppUploadOptions{ + FilePath: "./sample-app.ipa", + JamfTargetSystem: "test", + AppID: "1", + } + + utils := newAscAppUploadTestsUtils() + + errorMessage := "Error while fetching jamf app info" + + ascClient := &ascSystemMock{ + app: asc.App{ + AppId: 1, + AppName: "Sample App", + BundleId: "sample.bundle.id", + JamfId: "1", + }, + createReleaseResponse: asc.CreateReleaseResponse{Status: "success", Data: asc.Release{ReleaseID: 1}}, + jamfAppInfoError: errors.New(errorMessage), + } + // test + err := runAscAppUpload(&config, nil, utils, ascClient) + + // assert + assert.EqualError(t, err, fmt.Sprintf("failed to get jamf app info: %s", errorMessage)) + }) + + t.Run("error if jamf app id is 0", func(t *testing.T) { + t.Parallel() + // init + config := ascAppUploadOptions{ + FilePath: "./sample-app.ipa", + JamfTargetSystem: "test", + AppID: "1", + } + + utils := newAscAppUploadTestsUtils() + + ascClient := &ascSystemMock{ + app: asc.App{ + AppId: 1, + AppName: "Sample App", + BundleId: "sample.bundle.id", + JamfId: "1", + }, + createReleaseResponse: asc.CreateReleaseResponse{Status: "success", Data: asc.Release{ReleaseID: 1}}, + jamfAppInfo: asc.JamfAppInformationResponse{ + MobileDeviceApplication: asc.JamfMobileDeviceApplication{ + General: asc.JamfMobileDeviceApplicationGeneral{ + Id: 0, + }, + }, + }, + } + // test + err := runAscAppUpload(&config, nil, utils, ascClient) + + // assert + assert.EqualError(t, err, fmt.Sprintf("failed to get jamf app id")) + }) +} diff --git a/cmd/integrationArtifactTransport.go b/cmd/integrationArtifactTransport.go index cb37bc3f75..fa0435aa68 100644 --- a/cmd/integrationArtifactTransport.go +++ b/cmd/integrationArtifactTransport.go @@ -34,7 +34,7 @@ func runIntegrationArtifactTransport(config *integrationArtifactTransportOptions return CreateIntegrationArtifactTransportRequest(config, apimData) } -//CreateIntegrationArtifactTransportRequest - Create a transport request for Integration Package +// CreateIntegrationArtifactTransportRequest - Create a transport request for Integration Package func CreateIntegrationArtifactTransportRequest(config *integrationArtifactTransportOptions, apistruct apim.Bundle) error { httpMethod := http.MethodPost httpClient := apistruct.Client @@ -90,7 +90,7 @@ func CreateIntegrationArtifactTransportRequest(config *integrationArtifactTransp return errors.Errorf("integration flow deployment failed, response Status code: %v", createTransportRequestResp.StatusCode) } -//pollTransportStatus - Poll the integration package transport processing, return status or error details +// pollTransportStatus - Poll the integration package transport processing, return status or error details func pollTransportStatus(processId string, remainingRetries int, config *integrationArtifactTransportOptions, httpClient piperhttp.Sender, apiHost string) error { if remainingRetries <= 0 { @@ -126,7 +126,7 @@ func pollTransportStatus(processId string, remainingRetries int, config *integra return nil } -//GetJSONPayload -return http payload as byte array +// GetJSONPayload -return http payload as byte array func GetCPITransportReqPayload(config *integrationArtifactTransportOptions) (*bytes.Buffer, error) { jsonObj := gabs.New() jsonObj.Set(rand.Intn(5000), "id") @@ -153,7 +153,7 @@ func GetCPITransportReqPayload(config *integrationArtifactTransportOptions) (*by return bytes.NewBuffer(jsonBody), nil } -//getIntegrationTransportProcessingStatus - Get integration package transport request processing Status +// getIntegrationTransportProcessingStatus - Get integration package transport request processing Status func getIntegrationTransportProcessingStatus(config *integrationArtifactTransportOptions, httpClient piperhttp.Sender, apiHost string, processId string) (string, error) { httpMethod := "GET" header := make(http.Header) @@ -192,7 +192,7 @@ func getIntegrationTransportProcessingStatus(config *integrationArtifactTranspor return "", errors.Errorf("failed to get transport request processing status, response Status code: %v", transportProcStatusResp.StatusCode) } -//getTransportError - Get integration package transport failures error details +// getTransportError - Get integration package transport failures error details func getIntegrationTransportError(config *integrationArtifactTransportOptions, httpClient piperhttp.Sender, apiHost string, processId string) (string, error) { httpMethod := "GET" header := make(http.Header) diff --git a/cmd/metadata_generated.go b/cmd/metadata_generated.go index 56f8078e6d..d71d5b79c4 100644 --- a/cmd/metadata_generated.go +++ b/cmd/metadata_generated.go @@ -35,6 +35,7 @@ func GetAllStepMetadata() map[string]config.StepData { "apiProxyList": apiProxyListMetadata(), "apiProxyUpload": apiProxyUploadMetadata(), "artifactPrepareVersion": artifactPrepareVersionMetadata(), + "ascAppUpload": ascAppUploadMetadata(), "awsS3Upload": awsS3UploadMetadata(), "azureBlobUpload": azureBlobUploadMetadata(), "batsExecuteTests": batsExecuteTestsMetadata(), diff --git a/cmd/piper.go b/cmd/piper.go index 556980ff87..23d2ff6389 100644 --- a/cmd/piper.go +++ b/cmd/piper.go @@ -195,6 +195,7 @@ func Execute() { rootCmd.AddCommand(TmsUploadCommand()) rootCmd.AddCommand(TmsExportCommand()) rootCmd.AddCommand(IntegrationArtifactTransportCommand()) + rootCmd.AddCommand(AscAppUploadCommand()) addRootFlags(rootCmd) diff --git a/pkg/asc/asc.go b/pkg/asc/asc.go new file mode 100644 index 0000000000..b68d35dcee --- /dev/null +++ b/pkg/asc/asc.go @@ -0,0 +1,198 @@ +package asc + +import ( + "bytes" + "encoding/json" + "fmt" + piperHttp "github.com/SAP/jenkins-library/pkg/http" + "github.com/SAP/jenkins-library/pkg/log" + "github.com/pkg/errors" + "github.com/sirupsen/logrus" + "io" + "io/ioutil" + "net/http" + url2 "net/url" + "strconv" + "strings" + "time" +) + +type App struct { + AppId int `json:"app_id"` + AppName string `json:"app_name"` + BundleId string `json:"bundle_id"` + JamfId string `json:"jamf_id"` +} + +type JamfAppInformationResponse struct { + MobileDeviceApplication JamfMobileDeviceApplication `json:"mobile_device_application"` +} + +type JamfMobileDeviceApplication struct { + General JamfMobileDeviceApplicationGeneral `json:"general"` +} + +type JamfMobileDeviceApplicationGeneral struct { + Id int `json:"id"` +} + +type CreateReleaseResponse struct { + Status string `json:"status"` + Message string `json:"message"` + LastID int `json:"lastID"` + Data Release `json:"data"` +} + +type Release struct { + ReleaseID int `json:"release_id"` + AppID int `json:"app_id"` + Version string `json:"version"` + Description string `json:"description"` + ReleaseDate time.Time `json:"release_date"` + SortOrder any `json:"sort_order"` + Visible bool `json:"visible"` + Created time.Time `json:"created"` + FileMetadata any `json:"file_metadata"` +} + +// SystemInstance is the client communicating with the ASC backend +type SystemInstance struct { + serverURL string + token string + client *piperHttp.Client + logger *logrus.Entry +} + +type System interface { + GetAppById(appId string) (App, error) + CreateRelease(ascAppId int, version string, description string, releaseDate string, visible bool) (CreateReleaseResponse, error) + GetJamfAppInfo(bundleId string, jamfTargetSystem string) (JamfAppInformationResponse, error) + UploadIpa(path string, jamfAppId int, jamfTargetSystem string, bundleId string, ascRelease Release) error +} + +// NewSystemInstance returns a new ASC client for communicating with the backend +func NewSystemInstance(client *piperHttp.Client, serverURL, token string) (*SystemInstance, error) { + loggerInstance := log.Entry().WithField("package", "SAP/jenkins-library/pkg/asc") + + if len(serverURL) == 0 { + return nil, errors.New("serverUrl is not set but required") + } + + if len(token) == 0 { + return nil, errors.New("AppToken is not set but required") + } + + sys := &SystemInstance{ + serverURL: strings.TrimSuffix(serverURL, "/"), + token: token, + client: client, + logger: loggerInstance, + } + + log.RegisterSecret(token) + + options := piperHttp.ClientOptions{ + Token: fmt.Sprintf("Bearer %s", sys.token), + TransportTimeout: time.Second * 15, + } + sys.client.SetOptions(options) + + return sys, nil +} + +func sendRequest(sys *SystemInstance, method, url string, body io.Reader, header http.Header) ([]byte, error) { + var requestBody io.Reader + if body != nil { + closer := ioutil.NopCloser(body) + bodyBytes, _ := ioutil.ReadAll(closer) + requestBody = bytes.NewBuffer(bodyBytes) + defer closer.Close() + } + response, err := sys.client.SendRequest(method, fmt.Sprintf("%v/%v", sys.serverURL, url), requestBody, header, nil) + if err != nil && (response == nil) { + sys.logger.Errorf("HTTP request failed with error: %s", err) + return nil, err + } + + data, _ := ioutil.ReadAll(response.Body) + sys.logger.Debugf("Valid response body: %v", string(data)) + defer response.Body.Close() + return data, nil +} + +// GetAppById returns the app addressed by appId from the ASC backend +func (sys *SystemInstance) GetAppById(appId string) (App, error) { + sys.logger.Debugf("Getting ASC App with ID %v...", appId) + var app App + + data, err := sendRequest(sys, http.MethodGet, fmt.Sprintf("api/v1/apps/%v", appId), nil, nil) + if err != nil { + return app, errors.Wrapf(err, "fetching app %v failed", appId) + } + + json.Unmarshal(data, &app) + return app, nil +} + +// CreateRelease creates a release in ASC +func (sys *SystemInstance) CreateRelease(ascAppId int, version string, description string, releaseDate string, visible bool) (CreateReleaseResponse, error) { + + var createReleaseResponse CreateReleaseResponse + + if len(releaseDate) == 0 { + currentTime := time.Now() + releaseDate = currentTime.Format("01/02/2006") + } + + jsonData := map[string]string{ + "version": version, + "description": description, + "release_date": releaseDate, + "visible": strconv.FormatBool(visible), + } + + jsonValue, err := json.Marshal(jsonData) + if err != nil { + return createReleaseResponse, errors.Wrap(err, "error marshalling release payload") + } + + header := http.Header{} + header.Set("Content-Type", "application/json") + + response, err := sendRequest(sys, http.MethodPost, fmt.Sprintf("api/v1/apps/%v/releases", ascAppId), bytes.NewBuffer(jsonValue), header) + if err != nil { + return createReleaseResponse, errors.Wrapf(err, "creating release") + } + + json.Unmarshal(response, &createReleaseResponse) + return createReleaseResponse, nil +} + +// GetJamfAppInfo fetches information about the app from Jamf +func (sys *SystemInstance) GetJamfAppInfo(bundleId string, jamfTargetSystem string) (JamfAppInformationResponse, error) { + + sys.logger.Debugf("Getting Jamf App Info by ID %v from jamf %v system...", bundleId, jamfTargetSystem) + var jamfAppInformationResponse JamfAppInformationResponse + + data, err := sendRequest(sys, http.MethodPost, fmt.Sprintf("api/v1/jamf/%v/info?system=%v", bundleId, url2.QueryEscape(jamfTargetSystem)), nil, nil) + if err != nil { + return jamfAppInformationResponse, errors.Wrapf(err, "fetching jamf %v app info for %v failed", jamfTargetSystem, bundleId) + } + + json.Unmarshal(data, &jamfAppInformationResponse) + return jamfAppInformationResponse, nil + +} + +// UploadIpa uploads the ipa to ASC and therewith to Jamf +func (sys *SystemInstance) UploadIpa(path string, jamfAppId int, jamfTargetSystem string, bundleId string, ascRelease Release) error { + + url := fmt.Sprintf("%v/api/v1/jamf/%v/ipa?app_id=%v&version=%v&system=%v&release_id=%v&bundle_id=%v", sys.serverURL, jamfAppId, ascRelease.AppID, url2.QueryEscape(ascRelease.Version), url2.QueryEscape(jamfTargetSystem), ascRelease.ReleaseID, url2.QueryEscape(bundleId)) + _, err := sys.client.UploadFile(url, path, "file", nil, nil, "form") + + if err != nil { + return errors.Wrap(err, "failed to upload ipa to asc") + } + + return nil +} diff --git a/resources/metadata/ascAppUpload.yaml b/resources/metadata/ascAppUpload.yaml new file mode 100644 index 0000000000..00f5ce51b6 --- /dev/null +++ b/resources/metadata/ascAppUpload.yaml @@ -0,0 +1,93 @@ +metadata: + name: ascAppUpload + description: Upload an app to ASC + longDescription: | + With this step you can upload an app to ASC. + It creates a new release note in ASC and uploads the binary to ASC and therewith to Jamf. + For more information about ASC, check out [Application Support Center](https://github.com/SAP/application-support-center). +spec: + inputs: + secrets: + - name: ascAppTokenCredentialsId + description: "Jenkins secret text credential ID containing the authentication token for the ASC app" + type: jenkins + params: + - name: serverUrl + type: string + description: "The URL to the ASC backend" + aliases: + - name: ascServerUrl + scope: + - PARAMETERS + - STAGES + - STEPS + mandatory: true + - name: appToken + type: string + description: "App token used to authenticate with the ASC backend" + scope: + - PARAMETERS + secret: true + resourceRef: + - type: vaultSecret + name: ascVaultSecretName + default: asc + - name: ascAppTokenCredentialsId + type: secret + aliases: + - name: ascAppToken + - name: appId + type: string + description: "The app ID in ASC" + scope: + - PARAMETERS + - STAGES + - STEPS + mandatory: true + - name: filePath + type: string + description: "The path to the app binary" + scope: + - PARAMETERS + - STAGES + - STEPS + mandatory: true + - name: jamfTargetSystem + type: string + description: "The jamf target system" + scope: + - PARAMETERS + - STAGES + - STEPS + mandatory: true + - name: releaseAppVersion + type: string + description: "The new app version name to be created in ASC" + scope: + - PARAMETERS + - STAGES + - STEPS + default: "Pending Release" + - name: releaseDescription + type: string + description: "The new release description" + scope: + - PARAMETERS + - STAGES + - STEPS + default: "

TBD

" + - name: releaseDate + type: string + description: "The new release date (Format: MM/DD/YYYY) Default is the current date" + scope: + - PARAMETERS + - STAGES + - STEPS + - name: releaseVisible + type: bool + description: "The new release visible flag" + scope: + - PARAMETERS + - STAGES + - STEPS + default: false From 5d8e89b08ab5ce78dc00645593b93abaf6deaf12 Mon Sep 17 00:00:00 2001 From: Pavel Busko Date: Tue, 18 Apr 2023 09:10:38 +0200 Subject: [PATCH 17/23] feat(cnbBuild): use SHA256 hashed values for redacted telemetry properties (#4328) * feat(cnbBuild): use SHA256 hashed values for redacted telemetry properties * update unit tests --- cmd/cnbBuild_test.go | 5 ++--- pkg/cnbutils/privacy/privacy.go | 8 +++++++- pkg/cnbutils/privacy/privacy_test.go | 13 +++++++++---- 3 files changed, 18 insertions(+), 8 deletions(-) diff --git a/cmd/cnbBuild_test.go b/cmd/cnbBuild_test.go index cff1195784..e24de5ea47 100644 --- a/cmd/cnbBuild_test.go +++ b/cmd/cnbBuild_test.go @@ -518,8 +518,7 @@ uri = "some-buildpack"`)) assert.Contains(t, customData.Data[0].Buildpacks.FromConfig, "paketobuildpacks/java") assert.NotContains(t, customData.Data[0].Buildpacks.FromProjectDescriptor, "paketobuildpacks/java") - assert.Contains(t, customData.Data[0].Buildpacks.FromProjectDescriptor, "") - assert.NotContains(t, customData.Data[0].Buildpacks.Overall, "") + assert.Contains(t, customData.Data[0].Buildpacks.FromProjectDescriptor, "bcc73ab1f0a0d3fb0d1bf2b6df5510a25ccd14a761dbc0f5044ea24ead30452b") assert.Contains(t, customData.Data[0].Buildpacks.Overall, "paketobuildpacks/java") assert.True(t, customData.Data[0].ProjectDescriptor.Used) @@ -639,7 +638,7 @@ uri = "some-buildpack" assert.Equal(t, "11", customData.Data[0].BuildEnv.KeyValues["BP_NODE_VERSION"]) assert.NotContains(t, customData.Data[0].BuildEnv.KeyValues, "PROJECT_KEY") - assert.Contains(t, customData.Data[0].Buildpacks.Overall, "") + assert.Contains(t, customData.Data[0].Buildpacks.Overall, "bcc73ab1f0a0d3fb0d1bf2b6df5510a25ccd14a761dbc0f5044ea24ead30452b") }) t.Run("success case (multiple images configured)", func(t *testing.T) { diff --git a/pkg/cnbutils/privacy/privacy.go b/pkg/cnbutils/privacy/privacy.go index 98f4093896..d6ae656d50 100644 --- a/pkg/cnbutils/privacy/privacy.go +++ b/pkg/cnbutils/privacy/privacy.go @@ -1,6 +1,8 @@ package privacy import ( + "crypto/sha256" + "fmt" "strings" containerName "github.com/google/go-containerregistry/pkg/name" @@ -37,6 +39,8 @@ func FilterBuilder(builder string) string { // FilterBuildpacks filters a list of buildpacks to redact Personally Identifiable Information (PII) like the hostname of a personal registry func FilterBuildpacks(buildpacks []string) []string { result := make([]string, 0, len(buildpacks)) + hash := sha256.New() + for _, buildpack := range buildpacks { ref, err := containerName.ParseReference(strings.ToLower(buildpack)) if err != nil { @@ -58,7 +62,9 @@ func FilterBuildpacks(buildpacks []string) []string { if allowed { result = append(result, buildpack) } else { - result = append(result, "") + hash.Write([]byte(buildpack)) + result = append(result, fmt.Sprintf("%x", hash.Sum(nil))) + hash.Reset() } } return result diff --git a/pkg/cnbutils/privacy/privacy_test.go b/pkg/cnbutils/privacy/privacy_test.go index 65107cf1cb..07d1370729 100644 --- a/pkg/cnbutils/privacy/privacy_test.go +++ b/pkg/cnbutils/privacy/privacy_test.go @@ -57,6 +57,7 @@ func TestCnbPrivacy_FilterBuildpacks(t *testing.T) { t.Run("filters others", func(t *testing.T) { images := []string{ "test/nodejs:v1", + "test/nodejs:v1", // SHA should be the same for multiple occurences "my-mirror.de/paketobuildpacks/nodejs:v1", "gcr.io/my-project/paketo-buildpacks/nodejs:v1", } @@ -64,9 +65,13 @@ func TestCnbPrivacy_FilterBuildpacks(t *testing.T) { filtered := privacy.FilterBuildpacks(images) require.Len(t, filtered, len(images)) - for _, image := range filtered { - assert.Equal(t, "", image) - } + + assert.ElementsMatch(t, filtered, []string{ + "6ea013d746199ccc0e48e0b4984a6d9357105b82f936ecf18d15786805ac892f", + "6ea013d746199ccc0e48e0b4984a6d9357105b82f936ecf18d15786805ac892f", + "66131ef922cf26b1500e54a74827f051b43857bcf8d0596593c182548f7d4bd6", + "4fd8f0a950aacd7e428c79fce6f51bb1fbf0ab15caf4aca7accc18609acd79b1", + }) }) t.Run("fails gracefully on parse error", func(t *testing.T) { @@ -133,7 +138,7 @@ func TestCnbPrivacy_FilterBuilder(t *testing.T) { filteredBuilder := privacy.FilterBuilder(builder) - assert.Equal(t, "", filteredBuilder) + assert.Equal(t, "70278d9360533fa4978e5c50aa79bc35a8c0167a353e00521202feeaa09a305b", filteredBuilder) }) } From 52944953eb8b2d0923ca7bd7745be8d126efe7d1 Mon Sep 17 00:00:00 2001 From: Maurice Breit Date: Tue, 18 Apr 2023 16:50:15 +0200 Subject: [PATCH 18/23] docs: add ascAppUpload (#4330) --- documentation/docs/steps/ascAppUpload.md | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) create mode 100644 documentation/docs/steps/ascAppUpload.md diff --git a/documentation/docs/steps/ascAppUpload.md b/documentation/docs/steps/ascAppUpload.md new file mode 100644 index 0000000000..3cfd714b93 --- /dev/null +++ b/documentation/docs/steps/ascAppUpload.md @@ -0,0 +1,20 @@ +# ${docGenStepName} + +## ${docGenDescription} + +## Prerequisites + +* **ASC Instance** - have an ASC instance set up +* **ASC App** - have an app in ASC + +## ${docGenParameters} + +## ${docGenConfiguration} + +## ${docJenkinsPluginDependencies} + +## Exceptions + +none + +## Examples From 01cfb07d15f159b0c8467d0bafd6db2643fc82fb Mon Sep 17 00:00:00 2001 From: Ashly Mathew Date: Mon, 24 Apr 2023 09:09:31 +0200 Subject: [PATCH 19/23] feat(gradle): Support for more than one task/flags (#4329) * feat(gradle) support task list * Change parameter name to buildFlags to align with other piper steps' --- cmd/gradleExecuteBuild.go | 2 ++ cmd/gradleExecuteBuild_generated.go | 15 +++++++-- cmd/gradleExecuteBuild_test.go | 39 ++++++++++++++++++++++ pkg/gradle/gradle.go | 10 ++++-- resources/metadata/gradleExecuteBuild.yaml | 23 +++++++++++-- 5 files changed, 83 insertions(+), 6 deletions(-) diff --git a/cmd/gradleExecuteBuild.go b/cmd/gradleExecuteBuild.go index 3daf77bb01..6535e4d5b1 100644 --- a/cmd/gradleExecuteBuild.go +++ b/cmd/gradleExecuteBuild.go @@ -177,9 +177,11 @@ func runGradleExecuteBuild(config *gradleExecuteBuildOptions, telemetryData *tel } // gradle build + // if user provides BuildFlags, it is respected over a single Task gradleOptions := &gradle.ExecuteOptions{ BuildGradlePath: config.Path, Task: config.Task, + BuildFlags: config.BuildFlags, UseWrapper: config.UseWrapper, } if _, err := gradle.Execute(gradleOptions, utils); err != nil { diff --git a/cmd/gradleExecuteBuild_generated.go b/cmd/gradleExecuteBuild_generated.go index dc90111206..fbe22fd65e 100644 --- a/cmd/gradleExecuteBuild_generated.go +++ b/cmd/gradleExecuteBuild_generated.go @@ -36,6 +36,7 @@ type gradleExecuteBuildOptions struct { ApplyPublishingForAllProjects bool `json:"applyPublishingForAllProjects,omitempty"` ExcludeCreateBOMForProjects []string `json:"excludeCreateBOMForProjects,omitempty"` ExcludePublishingForProjects []string `json:"excludePublishingForProjects,omitempty"` + BuildFlags []string `json:"buildFlags,omitempty"` } type gradleExecuteBuildReports struct { @@ -118,7 +119,7 @@ func GradleExecuteBuildCommand() *cobra.Command { var createGradleExecuteBuildCmd = &cobra.Command{ Use: STEP_NAME, Short: "This step runs a gradle build command with parameters provided to the step.", - Long: `This step runs a gradle build command with parameters provided to the step.`, + Long: `This step runs a gradle build command with parameters provided to the step.Supports execution of gradle tasks with or without wrapper.Gradle tasks and flags can be specified via 'task' or 'buildFlags' parameter. If both are not specified 'build' task will run by default.`, PreRunE: func(cmd *cobra.Command, _ []string) error { startTime = time.Now() log.SetStepName(STEP_NAME) @@ -202,7 +203,7 @@ func GradleExecuteBuildCommand() *cobra.Command { func addGradleExecuteBuildFlags(cmd *cobra.Command, stepConfig *gradleExecuteBuildOptions) { cmd.Flags().StringVar(&stepConfig.Path, "path", os.Getenv("PIPER_path"), "Path to the folder with build.gradle (or build.gradle.kts) file which should be executed.") - cmd.Flags().StringVar(&stepConfig.Task, "task", `build`, "Gradle task that should be executed.") + cmd.Flags().StringVar(&stepConfig.Task, "task", `build`, "A single gradle task that should be executed. If you prefer more than one, use 'buildFlags' parameter. If 'buildFlags' parameter is specified, this parameter will be ignored.") cmd.Flags().BoolVar(&stepConfig.Publish, "publish", false, "Configures gradle to publish the artifact to a repository.") cmd.Flags().StringVar(&stepConfig.RepositoryURL, "repositoryUrl", os.Getenv("PIPER_repositoryUrl"), "Url to the repository to which the project artifacts should be published.") cmd.Flags().StringVar(&stepConfig.RepositoryPassword, "repositoryPassword", os.Getenv("PIPER_repositoryPassword"), "Password for the repository to which the project artifacts should be published.") @@ -215,6 +216,7 @@ func addGradleExecuteBuildFlags(cmd *cobra.Command, stepConfig *gradleExecuteBui cmd.Flags().BoolVar(&stepConfig.ApplyPublishingForAllProjects, "applyPublishingForAllProjects", false, "If set to false publishing logic will be applied in 'rootProject' directive, otherwise 'allprojects' will be directive used") cmd.Flags().StringSliceVar(&stepConfig.ExcludeCreateBOMForProjects, "excludeCreateBOMForProjects", []string{}, "Defines which projects/subprojects will be ignored during bom creation. Only if applyCreateBOMForAllProjects is set to true") cmd.Flags().StringSliceVar(&stepConfig.ExcludePublishingForProjects, "excludePublishingForProjects", []string{}, "Defines which projects/subprojects will be ignored during publishing. Only if applyCreateBOMForAllProjects is set to true") + cmd.Flags().StringSliceVar(&stepConfig.BuildFlags, "buildFlags", []string{}, "Defines a list of tasks and/or arguments to be provided for gradle in the respective order to be executed. This list takes precedence if specified over 'task' parameter") } @@ -385,6 +387,15 @@ func gradleExecuteBuildMetadata() config.StepData { Aliases: []config.Alias{}, Default: []string{}, }, + { + Name: "buildFlags", + ResourceRef: []config.ResourceReference{}, + Scope: []string{"PARAMETERS", "STAGES", "STEPS"}, + Type: "[]string", + Mandatory: false, + Aliases: []config.Alias{}, + Default: []string{}, + }, }, }, Containers: []config.Container{ diff --git a/cmd/gradleExecuteBuild_test.go b/cmd/gradleExecuteBuild_test.go index 3b53699f2a..957c430416 100644 --- a/cmd/gradleExecuteBuild_test.go +++ b/cmd/gradleExecuteBuild_test.go @@ -75,6 +75,25 @@ func TestRunGradleExecuteBuild(t *testing.T) { assert.Equal(t, mock.ExecCall{Exec: "gradle", Params: []string{"build", "-p", "path/to"}}, utils.Calls[0]) }) + t.Run("success case - build with flags", func(t *testing.T) { + utils := gradleExecuteBuildMockUtils{ + ExecMockRunner: &mock.ExecMockRunner{}, + FilesMock: &mock.FilesMock{}, + } + utils.FilesMock.AddFile("path/to/build.gradle", []byte{}) + options := &gradleExecuteBuildOptions{ + Path: "path/to", + Task: "build", + BuildFlags: []string{"clean", "build", "-x", "test"}, + UseWrapper: false, + } + + err := runGradleExecuteBuild(options, nil, utils, pipelineEnv) + assert.NoError(t, err) + assert.Equal(t, 1, len(utils.Calls)) + assert.Equal(t, mock.ExecCall{Exec: "gradle", Params: []string{"clean", "build", "-x", "test", "-p", "path/to"}}, utils.Calls[0]) + }) + t.Run("success case - bom creation", func(t *testing.T) { utils := gradleExecuteBuildMockUtils{ ExecMockRunner: &mock.ExecMockRunner{}, @@ -168,6 +187,26 @@ func TestRunGradleExecuteBuild(t *testing.T) { assert.Contains(t, err.Error(), "failed to build") }) + t.Run("failed case - build with flags", func(t *testing.T) { + utils := gradleExecuteBuildMockUtils{ + ExecMockRunner: &mock.ExecMockRunner{ + ShouldFailOnCommand: map[string]error{"gradle clean build -x test -p path/to": errors.New("failed to build with flags")}, + }, + FilesMock: &mock.FilesMock{}, + } + utils.FilesMock.AddFile("path/to/build.gradle", []byte{}) + options := &gradleExecuteBuildOptions{ + Path: "path/to", + Task: "build", + BuildFlags: []string{"clean", "build", "-x", "test"}, + UseWrapper: false, + } + + err := runGradleExecuteBuild(options, nil, utils, pipelineEnv) + assert.Error(t, err) + assert.Contains(t, err.Error(), "failed to build with flags") + }) + t.Run("failed case - bom creation", func(t *testing.T) { utils := gradleExecuteBuildMockUtils{ ExecMockRunner: &mock.ExecMockRunner{ diff --git a/pkg/gradle/gradle.go b/pkg/gradle/gradle.go index 3cf253b2dd..50ff3f98a2 100644 --- a/pkg/gradle/gradle.go +++ b/pkg/gradle/gradle.go @@ -35,6 +35,7 @@ type Utils interface { type ExecuteOptions struct { BuildGradlePath string `json:"path,omitempty"` Task string `json:"task,omitempty"` + BuildFlags []string `json:"buildFlags,omitempty"` InitScriptContent string `json:"initScriptContent,omitempty"` UseWrapper bool `json:"useWrapper,omitempty"` ProjectProperties map[string]string `json:"projectProperties,omitempty"` @@ -100,8 +101,13 @@ func Execute(options *ExecuteOptions, utils Utils) (string, error) { func getParametersFromOptions(options *ExecuteOptions) []string { var parameters []string - // default value for task is 'build', so no necessary to checking for empty parameter - parameters = append(parameters, options.Task) + if len(options.BuildFlags) > 0 { + // respect the list of tasks/flags user wants to execute + parameters = append(parameters, options.BuildFlags...) + } else { + // default value for task is 'build', so no necessary to checking for empty parameter + parameters = append(parameters, options.Task) + } // resolve path for build.gradle execution if options.BuildGradlePath != "" { diff --git a/resources/metadata/gradleExecuteBuild.yaml b/resources/metadata/gradleExecuteBuild.yaml index 3c957c609e..248fea9ba7 100644 --- a/resources/metadata/gradleExecuteBuild.yaml +++ b/resources/metadata/gradleExecuteBuild.yaml @@ -1,7 +1,7 @@ metadata: name: gradleExecuteBuild description: This step runs a gradle build command with parameters provided to the step. - longDescription: This step runs a gradle build command with parameters provided to the step. + longDescription: This step runs a gradle build command with parameters provided to the step.Supports execution of gradle tasks with or without wrapper.Gradle tasks and flags can be specified via 'task' or 'buildFlags' parameter. If both are not specified 'build' task will run by default. spec: inputs: params: @@ -18,7 +18,7 @@ spec: mandatory: false - name: task type: string - description: Gradle task that should be executed. + description: A single gradle task that should be executed. If you prefer more than one, use 'buildFlags' parameter. If 'buildFlags' parameter is specified, this parameter will be ignored. scope: - PARAMETERS - STAGES @@ -137,6 +137,25 @@ spec: - STAGES - STEPS type: "[]string" + - name: buildFlags + type: "[]string" + description: Defines a list of tasks and/or arguments to be provided for gradle in the respective order to be executed. This list takes precedence if specified over 'task' parameter + longDescription: | + To run command `gradle clean build -x test` , it can be achieved as follows + + ``` + steps: + gradleExecuteBuild: + buildFlags: + - clean + - build + - -x + - test + ``` + scope: + - PARAMETERS + - STAGES + - STEPS outputs: resources: - name: reports From 54d543a2c36b16760713f070d8e0ce5e662001e8 Mon Sep 17 00:00:00 2001 From: kamil7108 <43809604+kamil7108@users.noreply.github.com> Date: Mon, 24 Apr 2023 10:26:50 +0200 Subject: [PATCH 20/23] A retry has been added to the curl that downloads the piper (#4315) Co-authored-by: Oliver Feldmann --- src/com/sap/piper/PiperGoUtils.groovy | 2 +- .../com/sap/piper/PiperGoUtilsTest.groovy | 24 +++++++++---------- 2 files changed, 13 insertions(+), 13 deletions(-) diff --git a/src/com/sap/piper/PiperGoUtils.groovy b/src/com/sap/piper/PiperGoUtils.groovy index e10a2bf569..e027afbe5a 100644 --- a/src/com/sap/piper/PiperGoUtils.groovy +++ b/src/com/sap/piper/PiperGoUtils.groovy @@ -84,7 +84,7 @@ class PiperGoUtils implements Serializable { private boolean downloadGoBinary(url) { try { - def httpStatus = steps.sh(returnStdout: true, script: "curl --insecure --silent --location --write-out '%{http_code}' --output ${piperExecutable} '${url}'") + def httpStatus = steps.sh(returnStdout: true, script: "curl --insecure --silent --retry 5 --retry-max-time 240 --location --write-out '%{http_code}' --output ${piperExecutable} '${url}'") if (httpStatus == '200') { steps.sh(script: "chmod +x ${piperExecutable}") diff --git a/test/groovy/com/sap/piper/PiperGoUtilsTest.groovy b/test/groovy/com/sap/piper/PiperGoUtilsTest.groovy index c2ac81365f..461c91dfac 100644 --- a/test/groovy/com/sap/piper/PiperGoUtilsTest.groovy +++ b/test/groovy/com/sap/piper/PiperGoUtilsTest.groovy @@ -63,11 +63,11 @@ class PiperGoUtilsTest extends BasePiperTest { }) shellCallRule.setReturnValue('[ -x ./piper ]', 1) - shellCallRule.setReturnValue('curl --insecure --silent --location --write-out \'%{http_code}\' --output piper \'https://github.com/SAP/jenkins-library/releases/latest/download/piper_master\'', '200') + shellCallRule.setReturnValue('curl --insecure --silent --retry 5 --retry-max-time 240 --location --write-out \'%{http_code}\' --output piper \'https://github.com/SAP/jenkins-library/releases/latest/download/piper_master\'', '200') piperGoUtils.unstashPiperBin() assertThat(shellCallRule.shell.size(), is(4)) - assertThat(shellCallRule.shell[1].toString(), is('curl --insecure --silent --location --write-out \'%{http_code}\' --output piper \'https://github.com/SAP/jenkins-library/releases/latest/download/piper_master\'')) + assertThat(shellCallRule.shell[1].toString(), is('curl --insecure --silent --retry 5 --retry-max-time 240 --location --write-out \'%{http_code}\' --output piper \'https://github.com/SAP/jenkins-library/releases/latest/download/piper_master\'')) assertThat(shellCallRule.shell[2].toString(), is('chmod +x piper')) assertThat(shellCallRule.shell[3].toString(), is('./piper version')) } @@ -84,11 +84,11 @@ class PiperGoUtilsTest extends BasePiperTest { }) shellCallRule.setReturnValue('[ -x ./piper ]', 1) - shellCallRule.setReturnValue('curl --insecure --silent --location --write-out \'%{http_code}\' --output piper \'https://github.com/SAP/jenkins-library/releases/download/testTag/piper\'', '200') + shellCallRule.setReturnValue('curl --insecure --silent --retry 5 --retry-max-time 240 --location --write-out \'%{http_code}\' --output piper \'https://github.com/SAP/jenkins-library/releases/download/testTag/piper\'', '200') piperGoUtils.unstashPiperBin() assertThat(shellCallRule.shell.size(), is(4)) - assertThat(shellCallRule.shell[1].toString(), is('curl --insecure --silent --location --write-out \'%{http_code}\' --output piper \'https://github.com/SAP/jenkins-library/releases/download/testTag/piper\'')) + assertThat(shellCallRule.shell[1].toString(), is('curl --insecure --silent --retry 5 --retry-max-time 240 --location --write-out \'%{http_code}\' --output piper \'https://github.com/SAP/jenkins-library/releases/download/testTag/piper\'')) assertThat(shellCallRule.shell[2].toString(), is('chmod +x piper')) assertThat(shellCallRule.shell[3].toString(), is('./piper version')) } @@ -101,8 +101,8 @@ class PiperGoUtilsTest extends BasePiperTest { shellCallRule.setReturnValue('[ -x ./piper ]', 1) shellCallRule.setReturnValue('./piper version', "1.2.3") - shellCallRule.setReturnValue('curl --insecure --silent --location --write-out \'%{http_code}\' --output piper \'https://github.com/SAP/jenkins-library/releases/download/notAvailable/piper\'', '404') - shellCallRule.setReturnValue('curl --insecure --silent --location --write-out \'%{http_code}\' --output piper \'https://github.com/SAP/jenkins-library/releases/latest/download/piper_master\'', '200') + shellCallRule.setReturnValue('curl --insecure --silent --retry 5 --retry-max-time 240 --location --write-out \'%{http_code}\' --output piper \'https://github.com/SAP/jenkins-library/releases/download/notAvailable/piper\'', '404') + shellCallRule.setReturnValue('curl --insecure --silent --retry 5 --retry-max-time 240 --location --write-out \'%{http_code}\' --output piper \'https://github.com/SAP/jenkins-library/releases/latest/download/piper_master\'', '200') // this mocks utils.unstash - mimic stash not existing helper.registerAllowedMethod("unstash", [String.class], { stashFileName -> @@ -112,8 +112,8 @@ class PiperGoUtilsTest extends BasePiperTest { piperGoUtils.unstashPiperBin() assertThat(shellCallRule.shell.size(), is(5)) assertThat(shellCallRule.shell[0].toString(), is('[ -x ./piper ]')) - assertThat(shellCallRule.shell[1].toString(), is('curl --insecure --silent --location --write-out \'%{http_code}\' --output piper \'https://github.com/SAP/jenkins-library/releases/download/notAvailable/piper\'')) - assertThat(shellCallRule.shell[2].toString(), is('curl --insecure --silent --location --write-out \'%{http_code}\' --output piper \'https://github.com/SAP/jenkins-library/releases/latest/download/piper_master\'')) + assertThat(shellCallRule.shell[1].toString(), is('curl --insecure --silent --retry 5 --retry-max-time 240 --location --write-out \'%{http_code}\' --output piper \'https://github.com/SAP/jenkins-library/releases/download/notAvailable/piper\'')) + assertThat(shellCallRule.shell[2].toString(), is('curl --insecure --silent --retry 5 --retry-max-time 240 --location --write-out \'%{http_code}\' --output piper \'https://github.com/SAP/jenkins-library/releases/latest/download/piper_master\'')) assertThat(shellCallRule.shell[3].toString(), is('chmod +x piper')) assertThat(shellCallRule.shell[4].toString(), is ('./piper version')) } @@ -124,8 +124,8 @@ class PiperGoUtilsTest extends BasePiperTest { piperGoUtils.metaClass.getLibrariesInfo = {-> return [[name: 'piper-lib-os', version: 'notAvailable']]} shellCallRule.setReturnValue('[ -x ./piper ]', 1) - shellCallRule.setReturnValue('curl --insecure --silent --location --write-out \'%{http_code}\' --output piper \'https://github.com/SAP/jenkins-library/releases/download/notAvailable/piper\'', '404') - shellCallRule.setReturnValue('curl --insecure --silent --location --write-out \'%{http_code}\' --output piper \'https://github.com/SAP/jenkins-library/releases/latest/download/piper_master\'', '500') + shellCallRule.setReturnValue('curl --insecure --silent --retry 5 --retry-max-time 240 --location --write-out \'%{http_code}\' --output piper \'https://github.com/SAP/jenkins-library/releases/download/notAvailable/piper\'', '404') + shellCallRule.setReturnValue('curl --insecure --silent --retry 5 --retry-max-time 240 --location --write-out \'%{http_code}\' --output piper \'https://github.com/SAP/jenkins-library/releases/latest/download/piper_master\'', '500') helper.registerAllowedMethod("unstash", [String.class], { stashFileName -> return [] @@ -141,8 +141,8 @@ class PiperGoUtilsTest extends BasePiperTest { piperGoUtils.metaClass.getLibrariesInfo = {-> return [[name: 'piper-lib-os', version: 'notAvailable']]} shellCallRule.setReturnValue('[ -x ./piper ]', 1) - shellCallRule.setReturnValue('curl --insecure --silent --location --write-out \'%{http_code}\' --output piper \'https://github.com/SAP/jenkins-library/releases/download/notAvailable/piper\'', '404') - shellCallRule.setReturnValue('curl --insecure --silent --location --write-out \'%{http_code}\' --output piper \'https://github.com/SAP/jenkins-library/releases/latest/download/piper_master\'', '500') + shellCallRule.setReturnValue('curl --insecure --silent --retry 5 --retry-max-time 240 --location --write-out \'%{http_code}\' --output piper \'https://github.com/SAP/jenkins-library/releases/download/notAvailable/piper\'', '404') + shellCallRule.setReturnValue('curl --insecure --silent --retry 5 --retry-max-time 240 --location --write-out \'%{http_code}\' --output piper \'https://github.com/SAP/jenkins-library/releases/latest/download/piper_master\'', '500') helper.registerAllowedMethod("unstash", [String.class], { stashFileName -> return [] From 17c9f5ca65ac6abd20bbc8ad0b0eb142b8e31622 Mon Sep 17 00:00:00 2001 From: Anil Keshav Date: Mon, 24 Apr 2023 14:58:24 +0200 Subject: [PATCH 21/23] feat (kuberenetesDeploy) allow adding a timeout to the helm test commands (#4310) * adding a timeout for helm test * extending test cases * Upadate the helmTestWaitSeconds parameter * Add timeout parameter for helm test command * Update tests --------- Co-authored-by: Vyacheslav Starostin --- cmd/kubernetesDeploy.go | 8 ++++++++ cmd/kubernetesDeploy_generated.go | 11 +++++++++++ cmd/kubernetesDeploy_test.go | 6 ++++++ resources/metadata/kubernetesDeploy.yaml | 8 ++++++++ 4 files changed, 33 insertions(+) diff --git a/cmd/kubernetesDeploy.go b/cmd/kubernetesDeploy.go index 2544834d78..c1659117a5 100644 --- a/cmd/kubernetesDeploy.go +++ b/cmd/kubernetesDeploy.go @@ -226,6 +226,14 @@ func runHelmDeploy(config kubernetesDeployOptions, utils kubernetes.DeployUtils, "--namespace", config.Namespace, } + if config.DeployTool == "helm" { + testParams = append(testParams, "--timeout", strconv.Itoa(config.HelmTestWaitSeconds)) + } + + if config.DeployTool == "helm3" { + testParams = append(testParams, "--timeout", fmt.Sprintf("%vs", config.HelmTestWaitSeconds)) + } + if config.ShowTestLogs { testParams = append( testParams, diff --git a/cmd/kubernetesDeploy_generated.go b/cmd/kubernetesDeploy_generated.go index cf4165cccc..0a3a27acc2 100644 --- a/cmd/kubernetesDeploy_generated.go +++ b/cmd/kubernetesDeploy_generated.go @@ -31,6 +31,7 @@ type kubernetesDeployOptions struct { DeployTool string `json:"deployTool,omitempty" validate:"possible-values=kubectl helm helm3"` ForceUpdates bool `json:"forceUpdates,omitempty"` HelmDeployWaitSeconds int `json:"helmDeployWaitSeconds,omitempty"` + HelmTestWaitSeconds int `json:"helmTestWaitSeconds,omitempty"` HelmValues []string `json:"helmValues,omitempty"` ValuesMapping map[string]interface{} `json:"valuesMapping,omitempty"` GithubToken string `json:"githubToken,omitempty"` @@ -185,6 +186,7 @@ func addKubernetesDeployFlags(cmd *cobra.Command, stepConfig *kubernetesDeployOp cmd.Flags().StringVar(&stepConfig.DeployTool, "deployTool", `kubectl`, "Defines the tool which should be used for deployment.") cmd.Flags().BoolVar(&stepConfig.ForceUpdates, "forceUpdates", true, "Adds `--force` flag to a helm resource update command or to a kubectl replace command") cmd.Flags().IntVar(&stepConfig.HelmDeployWaitSeconds, "helmDeployWaitSeconds", 300, "Number of seconds before helm deploy returns.") + cmd.Flags().IntVar(&stepConfig.HelmTestWaitSeconds, "helmTestWaitSeconds", 300, "Number of seconds to wait for any individual Kubernetes operation (like Jobs for hooks). See https://helm.sh/docs/helm/helm_test/#options for further details") cmd.Flags().StringSliceVar(&stepConfig.HelmValues, "helmValues", []string{}, "List of helm values as YAML file reference or URL (as per helm parameter description for `-f` / `--values`)") cmd.Flags().StringVar(&stepConfig.GithubToken, "githubToken", os.Getenv("PIPER_githubToken"), "GitHub personal access token as per https://help.github.com/en/github/authenticating-to-github/creating-a-personal-access-token-for-the-command-line") @@ -416,6 +418,15 @@ func kubernetesDeployMetadata() config.StepData { Aliases: []config.Alias{}, Default: 300, }, + { + Name: "helmTestWaitSeconds", + ResourceRef: []config.ResourceReference{}, + Scope: []string{"PARAMETERS", "STAGES", "STEPS"}, + Type: "int", + Mandatory: false, + Aliases: []config.Alias{}, + Default: 300, + }, { Name: "helmValues", ResourceRef: []config.ResourceReference{}, diff --git a/cmd/kubernetesDeploy_test.go b/cmd/kubernetesDeploy_test.go index 0fd3f2f12f..f8796192ed 100644 --- a/cmd/kubernetesDeploy_test.go +++ b/cmd/kubernetesDeploy_test.go @@ -402,6 +402,7 @@ func TestRunKubernetesDeploy(t *testing.T) { Namespace: "deploymentNamespace", DockerConfigJSON: ".pipeline/docker/config.json", RunHelmTests: true, + HelmTestWaitSeconds: 400, } dockerConfigJSON := `{"kind": "Secret","data":{".dockerconfigjson": "ThisIsOurBase64EncodedSecret=="}}` @@ -459,6 +460,8 @@ func TestRunKubernetesDeploy(t *testing.T) { "deploymentName", "--namespace", "deploymentNamespace", + "--timeout", + "400s", }, mockUtils.Calls[2].Params, "Wrong test parameters") }) @@ -481,6 +484,7 @@ func TestRunKubernetesDeploy(t *testing.T) { DockerConfigJSON: ".pipeline/docker/config.json", RunHelmTests: true, ShowTestLogs: true, + HelmTestWaitSeconds: 400, } dockerConfigJSON := `{"kind": "Secret","data":{".dockerconfigjson": "ThisIsOurBase64EncodedSecret=="}}` @@ -538,6 +542,8 @@ func TestRunKubernetesDeploy(t *testing.T) { "deploymentName", "--namespace", "deploymentNamespace", + "--timeout", + "400s", "--logs", }, mockUtils.Calls[2].Params, "Wrong test parameters") }) diff --git a/resources/metadata/kubernetesDeploy.yaml b/resources/metadata/kubernetesDeploy.yaml index 8bc1ea5336..ab0024cf21 100644 --- a/resources/metadata/kubernetesDeploy.yaml +++ b/resources/metadata/kubernetesDeploy.yaml @@ -279,6 +279,14 @@ spec: - STAGES - STEPS default: 300 + - name: helmTestWaitSeconds + type: int + description: Number of seconds to wait for any individual Kubernetes operation (like Jobs for hooks). See https://helm.sh/docs/helm/helm_test/#options for further details + scope: + - PARAMETERS + - STAGES + - STEPS + default: 300 - name: helmValues type: "[]string" description: List of helm values as YAML file reference or URL (as per helm parameter description for `-f` / `--values`) From 58e9344eb08b3c6f979849939d0a57979e026f27 Mon Sep 17 00:00:00 2001 From: Ashly Mathew Date: Thu, 27 Apr 2023 09:09:52 +0200 Subject: [PATCH 22/23] Add description to long description for gradle step (#4336) --- resources/metadata/gradleExecuteBuild.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/resources/metadata/gradleExecuteBuild.yaml b/resources/metadata/gradleExecuteBuild.yaml index 248fea9ba7..e44c92f46e 100644 --- a/resources/metadata/gradleExecuteBuild.yaml +++ b/resources/metadata/gradleExecuteBuild.yaml @@ -141,6 +141,7 @@ spec: type: "[]string" description: Defines a list of tasks and/or arguments to be provided for gradle in the respective order to be executed. This list takes precedence if specified over 'task' parameter longDescription: | + Defines a list of tasks and/or arguments to be provided for gradle in the respective order to be executed. This list takes precedence if specified over 'task' parameter To run command `gradle clean build -x test` , it can be achieved as follows ``` From 6dad12436707be202cc5de95808bfed58b18f8aa Mon Sep 17 00:00:00 2001 From: sumeet patil Date: Fri, 28 Apr 2023 15:47:05 +0200 Subject: [PATCH 23/23] feat(codeqlExecuteScan): CodeQL compliance report and check (#4335) * CodeQL compliance report and check * fix test cases --------- Co-authored-by: Daria Kuznetsova --- cmd/codeqlExecuteScan.go | 307 +++++++++++++--------- cmd/codeqlExecuteScan_generated.go | 59 ++++- cmd/codeqlExecuteScan_test.go | 275 +++++++++---------- pkg/codeql/codeql.go | 65 +++++ pkg/codeql/codeql_test.go | 47 ++++ pkg/codeql/reporting.go | 44 ++++ resources/metadata/codeqlExecuteScan.yaml | 25 ++ 7 files changed, 547 insertions(+), 275 deletions(-) create mode 100644 pkg/codeql/codeql.go create mode 100644 pkg/codeql/codeql_test.go create mode 100644 pkg/codeql/reporting.go diff --git a/cmd/codeqlExecuteScan.go b/cmd/codeqlExecuteScan.go index 206d3c656a..312414db0e 100644 --- a/cmd/codeqlExecuteScan.go +++ b/cmd/codeqlExecuteScan.go @@ -3,9 +3,11 @@ package cmd import ( "fmt" "os" + "path/filepath" "regexp" "strings" + "github.com/SAP/jenkins-library/pkg/codeql" "github.com/SAP/jenkins-library/pkg/command" "github.com/SAP/jenkins-library/pkg/log" "github.com/SAP/jenkins-library/pkg/orchestrator" @@ -26,6 +28,7 @@ type RepoInfo struct { repo string commitId string ref string + owner string } type codeqlExecuteScanUtilsBundle struct { @@ -48,7 +51,9 @@ func codeqlExecuteScan(config codeqlExecuteScanOptions, telemetryData *telemetry utils := newCodeqlExecuteScanUtils() - err := runCodeqlExecuteScan(&config, telemetryData, utils) + reports, err := runCodeqlExecuteScan(&config, telemetryData, utils) + piperutils.PersistReportsAndLinks("codeqlExecuteScan", "./", utils, reports, nil) + if err != nil { log.Entry().WithError(err).Fatal("Codeql scan failed") } @@ -96,82 +101,99 @@ func getGitRepoInfo(repoUri string, repoInfo *RepoInfo) error { if len(matches) > 0 { match := matches[0] repoInfo.serverUrl = "https://" + match[3] - repoInfo.repo = strings.TrimSuffix(match[4], ".git") + repoData := strings.Split(strings.TrimSuffix(match[4], ".git"), "/") + if len(repoData) != 2 { + return fmt.Errorf("Invalid repository %s", repoUri) + } + + repoInfo.owner = repoData[0] + repoInfo.repo = repoData[1] return nil } return fmt.Errorf("Invalid repository %s", repoUri) } -func uploadResults(config *codeqlExecuteScanOptions, utils codeqlExecuteScanUtils) error { - if config.UploadResults { - if len(config.GithubToken) == 0 { - return errors.New("failed running upload-results as github token was not specified") - } +func initGitInfo(config *codeqlExecuteScanOptions) RepoInfo { + var repoInfo RepoInfo + err := getGitRepoInfo(config.Repository, &repoInfo) + if err != nil { + log.Entry().Error(err) + } + repoInfo.ref = config.AnalyzedRef + repoInfo.commitId = config.CommitID - if config.CommitID == "NA" { - return errors.New("failed running upload-results as gitCommitId is not available") + provider, err := orchestrator.NewOrchestratorSpecificConfigProvider() + if err != nil { + log.Entry().Warn("No orchestrator found. We assume piper is running locally.") + } else { + if repoInfo.ref == "" { + repoInfo.ref = provider.GetReference() } - var repoInfo RepoInfo - err := getGitRepoInfo(config.Repository, &repoInfo) - if err != nil { - log.Entry().Error(err) + if repoInfo.commitId == "" || repoInfo.commitId == "NA" { + repoInfo.commitId = provider.GetCommit() } - repoInfo.ref = config.AnalyzedRef - repoInfo.commitId = config.CommitID - provider, err := orchestrator.NewOrchestratorSpecificConfigProvider() - if err != nil { - log.Entry().Error(err) - } else { - if repoInfo.ref == "" { - repoInfo.ref = provider.GetReference() + if repoInfo.serverUrl == "" { + err = getGitRepoInfo(provider.GetRepoURL(), &repoInfo) + if err != nil { + log.Entry().Error(err) } + } + } - if repoInfo.commitId == "" { - repoInfo.commitId = provider.GetCommit() - } + return repoInfo +} - if repoInfo.serverUrl == "" { - err = getGitRepoInfo(provider.GetRepoURL(), &repoInfo) - if err != nil { - log.Entry().Error(err) - } - } - } +func getToken(config *codeqlExecuteScanOptions) (bool, string) { + if len(config.GithubToken) > 0 { + return true, config.GithubToken + } - cmd := []string{"github", "upload-results", "--sarif=" + fmt.Sprintf("%vtarget/codeqlReport.sarif", config.ModulePath), "-a=" + config.GithubToken} + envVal, isEnvGithubToken := os.LookupEnv("GITHUB_TOKEN") + if isEnvGithubToken { + return true, envVal + } - if repoInfo.commitId != "" { - cmd = append(cmd, "--commit="+repoInfo.commitId) - } + return false, "" +} - if repoInfo.serverUrl != "" { - cmd = append(cmd, "--github-url="+repoInfo.serverUrl) - } +func uploadResults(config *codeqlExecuteScanOptions, repoInfo RepoInfo, token string, utils codeqlExecuteScanUtils) error { + cmd := []string{"github", "upload-results", "--sarif=" + filepath.Join(config.ModulePath, "target", "codeqlReport.sarif")} - if repoInfo.repo != "" { - cmd = append(cmd, "--repository="+repoInfo.repo) - } + if config.GithubToken != "" { + cmd = append(cmd, "-a="+token) + } - if repoInfo.ref != "" { - cmd = append(cmd, "--ref="+repoInfo.ref) - } + if repoInfo.commitId != "" { + cmd = append(cmd, "--commit="+repoInfo.commitId) + } - //if no git pramas are passed(commitId, reference, serverUrl, repository), then codeql tries to auto populate it based on git information of the checkout repository. - //It also depends on the orchestrator. Some orchestrator keep git information and some not. - err = execute(utils, cmd, GeneralConfig.Verbose) - if err != nil { - log.Entry().Error("failed to upload sarif results") - return err - } + if repoInfo.serverUrl != "" { + cmd = append(cmd, "--github-url="+repoInfo.serverUrl) + } + + if repoInfo.repo != "" { + cmd = append(cmd, "--repository="+(repoInfo.owner+"/"+repoInfo.repo)) + } + + if repoInfo.ref != "" { + cmd = append(cmd, "--ref="+repoInfo.ref) + } + + //if no git pramas are passed(commitId, reference, serverUrl, repository), then codeql tries to auto populate it based on git information of the checkout repository. + //It also depends on the orchestrator. Some orchestrator keep git information and some not. + err := execute(utils, cmd, GeneralConfig.Verbose) + if err != nil { + log.Entry().Error("failed to upload sarif results") + return err } return nil } -func runCodeqlExecuteScan(config *codeqlExecuteScanOptions, telemetryData *telemetry.CustomData, utils codeqlExecuteScanUtils) error { +func runCodeqlExecuteScan(config *codeqlExecuteScanOptions, telemetryData *telemetry.CustomData, utils codeqlExecuteScanUtils) ([]piperutils.Path, error) { codeqlVersion, err := os.ReadFile("/etc/image-version") if err != nil { log.Entry().Infof("CodeQL image version: unknown") @@ -186,9 +208,9 @@ func runCodeqlExecuteScan(config *codeqlExecuteScanOptions, telemetryData *telem if len(language) == 0 && len(config.Language) == 0 { if config.BuildTool == "custom" { - return fmt.Errorf("as the buildTool is custom. please atleast specify the language parameter") + return reports, fmt.Errorf("as the buildTool is custom. please specify the language parameter") } else { - return fmt.Errorf("the step could not recognize the specified buildTool %s. please specify valid buildtool", config.BuildTool) + return reports, fmt.Errorf("the step could not recognize the specified buildTool %s. please specify valid buildtool", config.BuildTool) } } if len(language) > 0 { @@ -207,130 +229,153 @@ func runCodeqlExecuteScan(config *codeqlExecuteScanOptions, telemetryData *telem err = execute(utils, cmd, GeneralConfig.Verbose) if err != nil { log.Entry().Error("failed running command codeql database create") - return err + return reports, err } - err = os.MkdirAll(fmt.Sprintf("%vtarget", config.ModulePath), os.ModePerm) + err = os.MkdirAll(filepath.Join(config.ModulePath, "target"), os.ModePerm) if err != nil { - return fmt.Errorf("failed to create directory: %w", err) + return reports, fmt.Errorf("failed to create directory: %w", err) } cmd = nil - cmd = append(cmd, "database", "analyze", "--format=sarif-latest", fmt.Sprintf("--output=%vtarget/codeqlReport.sarif", config.ModulePath), config.Database) + cmd = append(cmd, "database", "analyze", "--format=sarif-latest", fmt.Sprintf("--output=%v", filepath.Join(config.ModulePath, "target", "codeqlReport.sarif")), config.Database) cmd = append(cmd, getRamAndThreadsFromConfig(config)...) cmd = codeqlQuery(cmd, config.QuerySuite) err = execute(utils, cmd, GeneralConfig.Verbose) if err != nil { log.Entry().Error("failed running command codeql database analyze for sarif generation") - return err + return reports, err } - reports = append(reports, piperutils.Path{Target: fmt.Sprintf("%vtarget/codeqlReport.sarif", config.ModulePath)}) + reports = append(reports, piperutils.Path{Target: filepath.Join(config.ModulePath, "target", "codeqlReport.sarif")}) cmd = nil - cmd = append(cmd, "database", "analyze", "--format=csv", fmt.Sprintf("--output=%vtarget/codeqlReport.csv", config.ModulePath), config.Database) + cmd = append(cmd, "database", "analyze", "--format=csv", fmt.Sprintf("--output=%v", filepath.Join(config.ModulePath, "target", "codeqlReport.csv")), config.Database) cmd = append(cmd, getRamAndThreadsFromConfig(config)...) cmd = codeqlQuery(cmd, config.QuerySuite) err = execute(utils, cmd, GeneralConfig.Verbose) if err != nil { log.Entry().Error("failed running command codeql database analyze for csv generation") - return err + return reports, err } - reports = append(reports, piperutils.Path{Target: fmt.Sprintf("%vtarget/codeqlReport.csv", config.ModulePath)}) - err = uploadResults(config, utils) - if err != nil { - log.Entry().Error("failed to upload results") - return err + reports = append(reports, piperutils.Path{Target: filepath.Join(config.ModulePath, "target", "codeqlReport.csv")}) + + repoInfo := initGitInfo(config) + repoUrl := fmt.Sprintf("%s/%s/%s", repoInfo.serverUrl, repoInfo.owner, repoInfo.repo) + repoReference, err := buildRepoReference(repoUrl, repoInfo.ref) + repoCodeqlScanUrl := fmt.Sprintf("%s/security/code-scanning?query=is:open+ref:%s", repoUrl, repoInfo.ref) + + if !config.UploadResults { + log.Entry().Warn("The sarif results will not be uploaded to the repository and compliance report will not be generated as uploadResults is set to false.") + } else { + hasToken, token := getToken(config) + if !hasToken { + return reports, errors.New("failed running upload-results as githubToken was not specified") + } + + err = uploadResults(config, repoInfo, token, utils) + if err != nil { + + return reports, err + } + + codeqlScanAuditInstance := codeql.NewCodeqlScanAuditInstance(config.GithubAPIURL, repoInfo.owner, repoInfo.repo, token, []string{}) + scanResults, err := codeqlScanAuditInstance.GetVulnerabilities(repoInfo.ref) + if err != nil { + return reports, errors.Wrap(err, "failed to get scan results") + } + + unaudited := (scanResults.Total - scanResults.Audited) + if unaudited > config.VulnerabilityThresholdTotal { + msg := fmt.Sprintf("Your repository %v with ref %v is not compliant. Total unaudited issues are %v which is greater than the VulnerabilityThresholdTotal count %v", repoUrl, repoInfo.ref, unaudited, config.VulnerabilityThresholdTotal) + if config.CheckForCompliance { + + return reports, errors.Errorf(msg) + } + + log.Entry().Warning(msg) + } + + codeqlAudit := codeql.CodeqlAudit{ToolName: "codeql", RepositoryUrl: repoUrl, CodeScanningLink: repoCodeqlScanUrl, RepositoryReferenceUrl: repoReference, ScanResults: scanResults} + paths, err := codeql.WriteJSONReport(codeqlAudit, config.ModulePath) + if err != nil { + return reports, errors.Wrap(err, "failed to write json compliance report") + } + + reports = append(reports, paths...) } - // create toolrecord file - toolRecordFileName, err := createToolRecordCodeql(utils, "./", *config) + toolRecordFileName, err := createAndPersistToolRecord(utils, repoInfo, repoReference, repoUrl, repoCodeqlScanUrl) if err != nil { - // do not fail until the framework is well established log.Entry().Warning("TR_CODEQL: Failed to create toolrecord file ...", err) } else { reports = append(reports, piperutils.Path{Target: toolRecordFileName}) } - piperutils.PersistReportsAndLinks("codeqlExecuteScan", "./", utils, reports, nil) - - return nil + return reports, nil } -func createToolRecordCodeql(utils codeqlExecuteScanUtils, workspace string, config codeqlExecuteScanOptions) (string, error) { - repoURL := strings.TrimSuffix(config.Repository, ".git") - toolInstance, orgName, repoName, err := parseRepositoryURL(repoURL) +func createAndPersistToolRecord(utils codeqlExecuteScanUtils, repoInfo RepoInfo, repoReference string, repoUrl string, repoCodeqlScanUrl string) (string, error) { + toolRecord, err := createToolRecordCodeql(utils, repoInfo, repoReference, repoUrl, repoCodeqlScanUrl) if err != nil { return "", err } - record := toolrecord.New(utils, workspace, "codeql", toolInstance) - record.DisplayName = fmt.Sprintf("%s %s - %s %s", orgName, repoName, config.AnalyzedRef, config.CommitID) - record.DisplayURL = fmt.Sprintf("%s/security/code-scanning?query=is:open+ref:%s", repoURL, config.AnalyzedRef) - // Repository - err = record.AddKeyData("repository", - fmt.Sprintf("%s/%s", orgName, repoName), - fmt.Sprintf("%s %s", orgName, repoName), - config.Repository) + + toolRecordFileName, err := persistToolRecord(toolRecord) if err != nil { return "", err } - // Repository Reference - repoReference, err := buildRepoReference(repoURL, config.AnalyzedRef) + + return toolRecordFileName, nil +} + +func createToolRecordCodeql(utils codeqlExecuteScanUtils, repoInfo RepoInfo, repoUrl string, repoReference string, repoCodeqlScanUrl string) (*toolrecord.Toolrecord, error) { + record := toolrecord.New(utils, "./", "codeql", repoInfo.serverUrl) + + if repoInfo.serverUrl == "" { + return record, errors.New("Repository not set") + } + + if repoInfo.commitId == "" || repoInfo.commitId == "NA" { + return record, errors.New("CommitId not set") + } + + if repoInfo.ref == "" { + return record, errors.New("Analyzed Reference not set") + } + + record.DisplayName = fmt.Sprintf("%s %s - %s %s", repoInfo.owner, repoInfo.repo, repoInfo.ref, repoInfo.commitId) + record.DisplayURL = fmt.Sprintf("%s/security/code-scanning?query=is:open+ref:%s", repoUrl, repoInfo.ref) + + err := record.AddKeyData("repository", + fmt.Sprintf("%s/%s", repoInfo.owner, repoInfo.repo), + fmt.Sprintf("%s %s", repoInfo.owner, repoInfo.repo), + repoUrl) if err != nil { - log.Entry().WithError(err).Warn("Failed to build repository reference") + return record, err } + err = record.AddKeyData("repositoryReference", - config.AnalyzedRef, - fmt.Sprintf("%s - %s", repoName, config.AnalyzedRef), + repoInfo.ref, + fmt.Sprintf("%s - %s", repoInfo.repo, repoInfo.ref), repoReference) if err != nil { - return "", err + return record, err } - // Scan Results + err = record.AddKeyData("scanResult", - fmt.Sprintf("%s/%s", config.AnalyzedRef, config.CommitID), - fmt.Sprintf("%s %s - %s %s", orgName, repoName, config.AnalyzedRef, config.CommitID), - fmt.Sprintf("%s/security/code-scanning?query=is:open+ref:%s", repoURL, config.AnalyzedRef)) + fmt.Sprintf("%s/%s", repoInfo.ref, repoInfo.commitId), + fmt.Sprintf("%s %s - %s %s", repoInfo.owner, repoInfo.repo, repoInfo.ref, repoInfo.commitId), + fmt.Sprintf("%s/security/code-scanning?query=is:open+ref:%s", repoUrl, repoInfo.ref)) if err != nil { - return "", err - } - err = record.Persist() - if err != nil { - return "", err + return record, err } - return record.GetFileName(), nil -} -func parseRepositoryURL(repository string) (toolInstance, orgName, repoName string, err error) { - if repository == "" { - err = errors.New("Repository param is not set") - return - } - fullRepo := strings.TrimSuffix(repository, ".git") - // regexp for toolInstance - re := regexp.MustCompile(`^[a-zA-Z0-9]+://[a-zA-Z0-9-_.]+/`) - matchedHost := re.FindAllString(fullRepo, -1) - if len(matchedHost) == 0 { - err = errors.New("Unable to parse tool instance from repository url") - return - } - orgRepoNames := strings.Split(strings.TrimPrefix(fullRepo, matchedHost[0]), "/") - if len(orgRepoNames) < 2 { - err = errors.New("Unable to parse organization and repo names from repository url") - return - } - - toolInstance = strings.Trim(matchedHost[0], "/") - orgName = orgRepoNames[0] - repoName = orgRepoNames[1] - return + return record, nil } func buildRepoReference(repository, analyzedRef string) (string, error) { - if repository == "" || analyzedRef == "" { - return "", errors.New("Repository or analyzedRef param is not set") - } ref := strings.Split(analyzedRef, "/") if len(ref) < 3 { return "", errors.New(fmt.Sprintf("Wrong analyzedRef format: %s", analyzedRef)) @@ -344,6 +389,14 @@ func buildRepoReference(repository, analyzedRef string) (string, error) { return fmt.Sprintf("%s/tree/%s", repository, ref[2]), nil } +func persistToolRecord(toolRecord *toolrecord.Toolrecord) (string, error) { + err := toolRecord.Persist() + if err != nil { + return "", err + } + return toolRecord.GetFileName(), nil +} + func getRamAndThreadsFromConfig(config *codeqlExecuteScanOptions) []string { params := make([]string, 0, 2) if len(config.Threads) > 0 { diff --git a/cmd/codeqlExecuteScan_generated.go b/cmd/codeqlExecuteScan_generated.go index 90238708fd..8c551a0885 100644 --- a/cmd/codeqlExecuteScan_generated.go +++ b/cmd/codeqlExecuteScan_generated.go @@ -20,19 +20,22 @@ import ( ) type codeqlExecuteScanOptions struct { - GithubToken string `json:"githubToken,omitempty"` - BuildTool string `json:"buildTool,omitempty" validate:"possible-values=custom maven golang npm pip yarn"` - BuildCommand string `json:"buildCommand,omitempty"` - Language string `json:"language,omitempty"` - ModulePath string `json:"modulePath,omitempty"` - Database string `json:"database,omitempty"` - QuerySuite string `json:"querySuite,omitempty"` - UploadResults bool `json:"uploadResults,omitempty"` - Threads string `json:"threads,omitempty"` - Ram string `json:"ram,omitempty"` - AnalyzedRef string `json:"analyzedRef,omitempty"` - Repository string `json:"repository,omitempty"` - CommitID string `json:"commitId,omitempty"` + GithubToken string `json:"githubToken,omitempty"` + GithubAPIURL string `json:"githubApiUrl,omitempty"` + BuildTool string `json:"buildTool,omitempty" validate:"possible-values=custom maven golang npm pip yarn"` + BuildCommand string `json:"buildCommand,omitempty"` + Language string `json:"language,omitempty"` + ModulePath string `json:"modulePath,omitempty"` + Database string `json:"database,omitempty"` + QuerySuite string `json:"querySuite,omitempty"` + UploadResults bool `json:"uploadResults,omitempty"` + Threads string `json:"threads,omitempty"` + Ram string `json:"ram,omitempty"` + AnalyzedRef string `json:"analyzedRef,omitempty"` + Repository string `json:"repository,omitempty"` + CommitID string `json:"commitId,omitempty"` + VulnerabilityThresholdTotal int `json:"vulnerabilityThresholdTotal,omitempty"` + CheckForCompliance bool `json:"checkForCompliance,omitempty"` } type codeqlExecuteScanReports struct { @@ -173,6 +176,7 @@ and Java plus Maven.`, func addCodeqlExecuteScanFlags(cmd *cobra.Command, stepConfig *codeqlExecuteScanOptions) { cmd.Flags().StringVar(&stepConfig.GithubToken, "githubToken", os.Getenv("PIPER_githubToken"), "GitHub personal access token in plain text. NEVER set this parameter in a file commited to a source code repository. This parameter is intended to be used from the command line or set securely via the environment variable listed below. In most pipeline use-cases, you should instead either store the token in Vault (where it can be automatically retrieved by the step from one of the paths listed below) or store it as a Jenkins secret and configure the secret's id via the `githubTokenCredentialsId` parameter.") + cmd.Flags().StringVar(&stepConfig.GithubAPIURL, "githubApiUrl", `https://api.github.com`, "Set the GitHub API URL.") cmd.Flags().StringVar(&stepConfig.BuildTool, "buildTool", `maven`, "Defines the build tool which is used for building the project.") cmd.Flags().StringVar(&stepConfig.BuildCommand, "buildCommand", os.Getenv("PIPER_buildCommand"), "Command to build the project") cmd.Flags().StringVar(&stepConfig.Language, "language", os.Getenv("PIPER_language"), "The programming language used to analyze.") @@ -185,6 +189,8 @@ func addCodeqlExecuteScanFlags(cmd *cobra.Command, stepConfig *codeqlExecuteScan cmd.Flags().StringVar(&stepConfig.AnalyzedRef, "analyzedRef", os.Getenv("PIPER_analyzedRef"), "Name of the ref that was analyzed.") cmd.Flags().StringVar(&stepConfig.Repository, "repository", os.Getenv("PIPER_repository"), "URL of the GitHub instance") cmd.Flags().StringVar(&stepConfig.CommitID, "commitId", os.Getenv("PIPER_commitId"), "SHA of commit that was analyzed.") + cmd.Flags().IntVar(&stepConfig.VulnerabilityThresholdTotal, "vulnerabilityThresholdTotal", 0, "Threashold for maximum number of allowed vulnerabilities.") + cmd.Flags().BoolVar(&stepConfig.CheckForCompliance, "checkForCompliance", false, "If set to true, the piper step checks for compliance based on vulnerability threadholds. Example - If total vulnerabilites are 10 and vulnerabilityThresholdTotal is set as 0, then the steps throws an compliance error.") cmd.MarkFlagRequired("buildTool") } @@ -228,6 +234,15 @@ func codeqlExecuteScanMetadata() config.StepData { Aliases: []config.Alias{{Name: "access_token"}}, Default: os.Getenv("PIPER_githubToken"), }, + { + Name: "githubApiUrl", + ResourceRef: []config.ResourceReference{}, + Scope: []string{"GENERAL", "PARAMETERS", "STAGES", "STEPS"}, + Type: "string", + Mandatory: false, + Aliases: []config.Alias{}, + Default: `https://api.github.com`, + }, { Name: "buildTool", ResourceRef: []config.ResourceReference{}, @@ -351,6 +366,24 @@ func codeqlExecuteScanMetadata() config.StepData { Aliases: []config.Alias{}, Default: os.Getenv("PIPER_commitId"), }, + { + Name: "vulnerabilityThresholdTotal", + ResourceRef: []config.ResourceReference{}, + Scope: []string{"PARAMETERS", "STAGES", "STEPS"}, + Type: "int", + Mandatory: false, + Aliases: []config.Alias{}, + Default: 0, + }, + { + Name: "checkForCompliance", + ResourceRef: []config.ResourceReference{}, + Scope: []string{"PARAMETERS", "STAGES", "STEPS"}, + Type: "bool", + Mandatory: false, + Aliases: []config.Alias{}, + Default: false, + }, }, }, Containers: []config.Container{ diff --git a/cmd/codeqlExecuteScan_test.go b/cmd/codeqlExecuteScan_test.go index 4e81ab0ca2..6cc03013ad 100644 --- a/cmd/codeqlExecuteScan_test.go +++ b/cmd/codeqlExecuteScan_test.go @@ -1,9 +1,11 @@ package cmd import ( + "fmt" "testing" "github.com/SAP/jenkins-library/pkg/mock" + "github.com/SAP/jenkins-library/pkg/orchestrator" "github.com/stretchr/testify/assert" ) @@ -24,42 +26,50 @@ func TestRunCodeqlExecuteScan(t *testing.T) { t.Run("Valid CodeqlExecuteScan", func(t *testing.T) { config := codeqlExecuteScanOptions{BuildTool: "maven", ModulePath: "./"} - assert.Equal(t, nil, runCodeqlExecuteScan(&config, nil, newCodeqlExecuteScanTestsUtils())) + _, err := runCodeqlExecuteScan(&config, nil, newCodeqlExecuteScanTestsUtils()) + assert.NoError(t, err) }) t.Run("No auth token passed on upload results", func(t *testing.T) { config := codeqlExecuteScanOptions{BuildTool: "maven", UploadResults: true, ModulePath: "./"} - assert.Error(t, runCodeqlExecuteScan(&config, nil, newCodeqlExecuteScanTestsUtils())) + _, err := runCodeqlExecuteScan(&config, nil, newCodeqlExecuteScanTestsUtils()) + assert.Error(t, err) }) t.Run("GitCommitID is NA on upload results", func(t *testing.T) { config := codeqlExecuteScanOptions{BuildTool: "maven", UploadResults: true, ModulePath: "./", CommitID: "NA"} - assert.Error(t, runCodeqlExecuteScan(&config, nil, newCodeqlExecuteScanTestsUtils())) + _, err := runCodeqlExecuteScan(&config, nil, newCodeqlExecuteScanTestsUtils()) + assert.Error(t, err) }) - t.Run("Upload results with token", func(t *testing.T) { + t.Run("Upload results fails as repository not specified", func(t *testing.T) { config := codeqlExecuteScanOptions{BuildTool: "maven", ModulePath: "./", UploadResults: true, GithubToken: "test"} - assert.Equal(t, nil, runCodeqlExecuteScan(&config, nil, newCodeqlExecuteScanTestsUtils())) + _, err := runCodeqlExecuteScan(&config, nil, newCodeqlExecuteScanTestsUtils()) + assert.Error(t, err) }) t.Run("Custom buildtool", func(t *testing.T) { - config := codeqlExecuteScanOptions{BuildTool: "custom", Language: "javascript", ModulePath: "./", GithubToken: "test"} - assert.Equal(t, nil, runCodeqlExecuteScan(&config, nil, newCodeqlExecuteScanTestsUtils())) + config := codeqlExecuteScanOptions{BuildTool: "custom", Language: "javascript", ModulePath: "./"} + _, err := runCodeqlExecuteScan(&config, nil, newCodeqlExecuteScanTestsUtils()) + assert.NoError(t, err) }) t.Run("Custom buildtool but no language specified", func(t *testing.T) { config := codeqlExecuteScanOptions{BuildTool: "custom", ModulePath: "./", GithubToken: "test"} - assert.Error(t, runCodeqlExecuteScan(&config, nil, newCodeqlExecuteScanTestsUtils())) + _, err := runCodeqlExecuteScan(&config, nil, newCodeqlExecuteScanTestsUtils()) + assert.Error(t, err) }) t.Run("Invalid buildtool and no language specified", func(t *testing.T) { config := codeqlExecuteScanOptions{BuildTool: "test", ModulePath: "./", GithubToken: "test"} - assert.Error(t, runCodeqlExecuteScan(&config, nil, newCodeqlExecuteScanTestsUtils())) + _, err := runCodeqlExecuteScan(&config, nil, newCodeqlExecuteScanTestsUtils()) + assert.Error(t, err) }) t.Run("Invalid buildtool but language specified", func(t *testing.T) { config := codeqlExecuteScanOptions{BuildTool: "test", Language: "javascript", ModulePath: "./", GithubToken: "test"} - assert.Equal(t, nil, runCodeqlExecuteScan(&config, nil, newCodeqlExecuteScanTestsUtils())) + _, err := runCodeqlExecuteScan(&config, nil, newCodeqlExecuteScanTestsUtils()) + assert.NoError(t, err) }) } @@ -69,7 +79,8 @@ func TestGetGitRepoInfo(t *testing.T) { err := getGitRepoInfo("https://github.hello.test/Testing/fortify.git", &repoInfo) assert.NoError(t, err) assert.Equal(t, "https://github.hello.test", repoInfo.serverUrl) - assert.Equal(t, "Testing/fortify", repoInfo.repo) + assert.Equal(t, "fortify", repoInfo.repo) + assert.Equal(t, "Testing", repoInfo.owner) }) t.Run("Valid URL2", func(t *testing.T) { @@ -77,14 +88,16 @@ func TestGetGitRepoInfo(t *testing.T) { err := getGitRepoInfo("https://github.hello.test/Testing/fortify", &repoInfo) assert.NoError(t, err) assert.Equal(t, "https://github.hello.test", repoInfo.serverUrl) - assert.Equal(t, "Testing/fortify", repoInfo.repo) + assert.Equal(t, "fortify", repoInfo.repo) + assert.Equal(t, "Testing", repoInfo.owner) }) t.Run("Valid URL1 with dots", func(t *testing.T) { var repoInfo RepoInfo err := getGitRepoInfo("https://github.hello.test/Testing/com.sap.fortify.git", &repoInfo) assert.NoError(t, err) assert.Equal(t, "https://github.hello.test", repoInfo.serverUrl) - assert.Equal(t, "Testing/com.sap.fortify", repoInfo.repo) + assert.Equal(t, "com.sap.fortify", repoInfo.repo) + assert.Equal(t, "Testing", repoInfo.owner) }) t.Run("Valid URL2 with dots", func(t *testing.T) { @@ -92,14 +105,16 @@ func TestGetGitRepoInfo(t *testing.T) { err := getGitRepoInfo("https://github.hello.test/Testing/com.sap.fortify", &repoInfo) assert.NoError(t, err) assert.Equal(t, "https://github.hello.test", repoInfo.serverUrl) - assert.Equal(t, "Testing/com.sap.fortify", repoInfo.repo) + assert.Equal(t, "com.sap.fortify", repoInfo.repo) + assert.Equal(t, "Testing", repoInfo.owner) }) t.Run("Valid URL1 with username and token", func(t *testing.T) { var repoInfo RepoInfo err := getGitRepoInfo("https://username:token@github.hello.test/Testing/fortify.git", &repoInfo) assert.NoError(t, err) assert.Equal(t, "https://github.hello.test", repoInfo.serverUrl) - assert.Equal(t, "Testing/fortify", repoInfo.repo) + assert.Equal(t, "fortify", repoInfo.repo) + assert.Equal(t, "Testing", repoInfo.owner) }) t.Run("Valid URL2 with username and token", func(t *testing.T) { @@ -107,7 +122,8 @@ func TestGetGitRepoInfo(t *testing.T) { err := getGitRepoInfo("https://username:token@github.hello.test/Testing/fortify", &repoInfo) assert.NoError(t, err) assert.Equal(t, "https://github.hello.test", repoInfo.serverUrl) - assert.Equal(t, "Testing/fortify", repoInfo.repo) + assert.Equal(t, "fortify", repoInfo.repo) + assert.Equal(t, "Testing", repoInfo.owner) }) t.Run("Invalid URL as no org/owner passed", func(t *testing.T) { @@ -121,58 +137,78 @@ func TestGetGitRepoInfo(t *testing.T) { }) } -func TestParseRepositoryURL(t *testing.T) { - t.Run("Valid repository", func(t *testing.T) { - repository := "https://github.hello.test/Testing/fortify.git" - toolInstance, orgName, repoName, err := parseRepositoryURL(repository) - assert.NoError(t, err) - assert.Equal(t, "https://github.hello.test", toolInstance) - assert.Equal(t, "Testing", orgName) - assert.Equal(t, "fortify", repoName) +func TestInitGitInfo(t *testing.T) { + t.Run("Valid URL1", func(t *testing.T) { + config := codeqlExecuteScanOptions{Repository: "https://github.hello.test/Testing/codeql.git", AnalyzedRef: "refs/head/branch", CommitID: "abcd1234"} + repoInfo := initGitInfo(&config) + assert.Equal(t, "abcd1234", repoInfo.commitId) + assert.Equal(t, "Testing", repoInfo.owner) + assert.Equal(t, "codeql", repoInfo.repo) + assert.Equal(t, "refs/head/branch", repoInfo.ref) + assert.Equal(t, "https://github.hello.test", repoInfo.serverUrl) }) - t.Run("valid repository 2", func(t *testing.T) { - repository := "https://github.hello.test/Testing/fortify" - toolInstance, orgName, repoName, err := parseRepositoryURL(repository) - assert.NoError(t, err) - assert.Equal(t, "https://github.hello.test", toolInstance) - assert.Equal(t, "Testing", orgName) - assert.Equal(t, "fortify", repoName) + + t.Run("Valid URL2", func(t *testing.T) { + config := codeqlExecuteScanOptions{Repository: "https://github.hello.test/Testing/codeql", AnalyzedRef: "refs/head/branch", CommitID: "abcd1234"} + repoInfo := initGitInfo(&config) + assert.Equal(t, "abcd1234", repoInfo.commitId) + assert.Equal(t, "Testing", repoInfo.owner) + assert.Equal(t, "codeql", repoInfo.repo) + assert.Equal(t, "refs/head/branch", repoInfo.ref) + assert.Equal(t, "https://github.hello.test", repoInfo.serverUrl) }) - t.Run("Invalid repository without repo name", func(t *testing.T) { - repository := "https://github.hello.test/Testing" - toolInstance, orgName, repoName, err := parseRepositoryURL(repository) - assert.Error(t, err) - assert.ErrorContains(t, err, "Unable to parse organization and repo names") - assert.Equal(t, "", toolInstance) - assert.Equal(t, "", orgName) - assert.Equal(t, "", repoName) - }) - t.Run("Invalid repository without organization name", func(t *testing.T) { - repository := "https://github.hello.test/fortify" - toolInstance, orgName, repoName, err := parseRepositoryURL(repository) - assert.Error(t, err) - assert.ErrorContains(t, err, "Unable to parse organization and repo names") - assert.Equal(t, "", toolInstance) - assert.Equal(t, "", orgName) - assert.Equal(t, "", repoName) - }) - t.Run("Invalid repository without tool instance", func(t *testing.T) { - repository := "/Testing/fortify" - toolInstance, orgName, repoName, err := parseRepositoryURL(repository) - assert.Error(t, err) - assert.ErrorContains(t, err, "Unable to parse tool instance") - assert.Equal(t, "", toolInstance) - assert.Equal(t, "", orgName) - assert.Equal(t, "", repoName) - }) - t.Run("Empty repository", func(t *testing.T) { - repository := "" - toolInstance, orgName, repoName, err := parseRepositoryURL(repository) - assert.Error(t, err) - assert.ErrorContains(t, err, "Repository param is not set") - assert.Equal(t, "", toolInstance) - assert.Equal(t, "", orgName) - assert.Equal(t, "", repoName) + + t.Run("Valid url with dots URL1", func(t *testing.T) { + config := codeqlExecuteScanOptions{Repository: "https://github.hello.test/Testing/com.sap.codeql.git", AnalyzedRef: "refs/head/branch", CommitID: "abcd1234"} + repoInfo := initGitInfo(&config) + assert.Equal(t, "abcd1234", repoInfo.commitId) + assert.Equal(t, "Testing", repoInfo.owner) + assert.Equal(t, "com.sap.codeql", repoInfo.repo) + assert.Equal(t, "refs/head/branch", repoInfo.ref) + assert.Equal(t, "https://github.hello.test", repoInfo.serverUrl) + }) + + t.Run("Valid url with dots URL2", func(t *testing.T) { + config := codeqlExecuteScanOptions{Repository: "https://github.hello.test/Testing/com.sap.codeql", AnalyzedRef: "refs/head/branch", CommitID: "abcd1234"} + repoInfo := initGitInfo(&config) + assert.Equal(t, "abcd1234", repoInfo.commitId) + assert.Equal(t, "Testing", repoInfo.owner) + assert.Equal(t, "com.sap.codeql", repoInfo.repo) + assert.Equal(t, "refs/head/branch", repoInfo.ref) + assert.Equal(t, "https://github.hello.test", repoInfo.serverUrl) + }) + + t.Run("Valid url with username and token URL1", func(t *testing.T) { + config := codeqlExecuteScanOptions{Repository: "https://username:token@github.hello.test/Testing/codeql.git", AnalyzedRef: "refs/head/branch", CommitID: "abcd1234"} + repoInfo := initGitInfo(&config) + assert.Equal(t, "abcd1234", repoInfo.commitId) + assert.Equal(t, "Testing", repoInfo.owner) + assert.Equal(t, "codeql", repoInfo.repo) + assert.Equal(t, "refs/head/branch", repoInfo.ref) + assert.Equal(t, "https://github.hello.test", repoInfo.serverUrl) + }) + + t.Run("Valid url with username and token URL2", func(t *testing.T) { + config := codeqlExecuteScanOptions{Repository: "https://username:token@github.hello.test/Testing/codeql", AnalyzedRef: "refs/head/branch", CommitID: "abcd1234"} + repoInfo := initGitInfo(&config) + assert.Equal(t, "abcd1234", repoInfo.commitId) + assert.Equal(t, "Testing", repoInfo.owner) + assert.Equal(t, "codeql", repoInfo.repo) + assert.Equal(t, "refs/head/branch", repoInfo.ref) + assert.Equal(t, "https://github.hello.test", repoInfo.serverUrl) + }) + + t.Run("Invalid URL with no org/reponame", func(t *testing.T) { + config := codeqlExecuteScanOptions{Repository: "https://github.hello.test", AnalyzedRef: "refs/head/branch", CommitID: "abcd1234"} + repoInfo := initGitInfo(&config) + _, err := orchestrator.NewOrchestratorSpecificConfigProvider() + assert.Equal(t, "abcd1234", repoInfo.commitId) + assert.Equal(t, "refs/head/branch", repoInfo.ref) + if err != nil { + assert.Equal(t, "", repoInfo.owner) + assert.Equal(t, "", repoInfo.repo) + assert.Equal(t, "", repoInfo.serverUrl) + } }) } @@ -207,87 +243,56 @@ func TestBuildRepoReference(t *testing.T) { assert.ErrorContains(t, err, "Wrong analyzedRef format") assert.Equal(t, "", ref) }) - t.Run("Empty repository", func(t *testing.T) { - repository := "" - analyzedRef := "refs/pull/merge" - ref, err := buildRepoReference(repository, analyzedRef) - assert.Error(t, err) - assert.ErrorContains(t, err, "Repository or analyzedRef param is not set") - assert.Equal(t, "", ref) - }) - t.Run("Empty analyzedRef", func(t *testing.T) { - repository := "https://github.hello.test/Testing/fortify" - analyzedRef := "" - ref, err := buildRepoReference(repository, analyzedRef) - assert.Error(t, err) - assert.ErrorContains(t, err, "Repository or analyzedRef param is not set") - assert.Equal(t, "", ref) - }) } +func getRepoReferences(repoInfo RepoInfo) (string, string, string) { + repoUrl := fmt.Sprintf("%s/%s/%s", repoInfo.serverUrl, repoInfo.owner, repoInfo.repo) + repoReference, _ := buildRepoReference(repoUrl, repoInfo.ref) + repoCodeqlScanUrl := fmt.Sprintf("%s/security/code-scanning?query=is:open+ref:%s", repoUrl, repoInfo.ref) + return repoUrl, repoReference, repoCodeqlScanUrl +} func TestCreateToolRecordCodeql(t *testing.T) { t.Run("Valid toolrun file", func(t *testing.T) { - config := codeqlExecuteScanOptions{ - Repository: "https://github.hello.test/Testing/fortify.git", - AnalyzedRef: "refs/head/branch", - CommitID: "test", - } - fileName, err := createToolRecordCodeql(newCodeqlExecuteScanTestsUtils(), "test", config) + repoInfo := RepoInfo{serverUrl: "https://github.hello.test", commitId: "test", ref: "refs/head/branch", owner: "Testing", repo: "fortify"} + repoUrl, repoReference, repoCodeqlScanUrl := getRepoReferences(repoInfo) + toolRecord, err := createToolRecordCodeql(newCodeqlExecuteScanTestsUtils(), repoInfo, repoUrl, repoReference, repoCodeqlScanUrl) assert.NoError(t, err) - assert.Contains(t, fileName, "toolrun_codeql") + assert.Equal(t, toolRecord.ToolName, "codeql") + assert.Equal(t, toolRecord.ToolInstance, "https://github.hello.test") + assert.Equal(t, toolRecord.DisplayName, "Testing fortify - refs/head/branch test") + assert.Equal(t, toolRecord.DisplayURL, "https://github.hello.test/Testing/fortify/security/code-scanning?query=is:open+ref:refs/head/branch") }) t.Run("Empty repository URL", func(t *testing.T) { - config := codeqlExecuteScanOptions{ - Repository: "", - AnalyzedRef: "refs/head/branch", - CommitID: "test", - } - fileName, err := createToolRecordCodeql(newCodeqlExecuteScanTestsUtils(), "", config) - assert.Error(t, err) - assert.ErrorContains(t, err, "Repository param is not set") - assert.Empty(t, fileName) - }) - t.Run("Invalid repository URL", func(t *testing.T) { - config := codeqlExecuteScanOptions{ - Repository: "https://github.hello.test/Testing", - AnalyzedRef: "refs/head/branch", - CommitID: "test", - } - fileName, err := createToolRecordCodeql(newCodeqlExecuteScanTestsUtils(), "test", config) - assert.Error(t, err) - assert.Regexp(t, "^Unable to parse [a-z ]+ from repository url$", err.Error()) - assert.Empty(t, fileName) - }) - t.Run("Empty workspace", func(t *testing.T) { - config := codeqlExecuteScanOptions{ - Repository: "https://github.hello.test/Testing/fortify.git", - AnalyzedRef: "refs/head/branch", - CommitID: "test", - } - fileName, err := createToolRecordCodeql(newCodeqlExecuteScanTestsUtils(), "", config) + repoInfo := RepoInfo{serverUrl: "", commitId: "test", ref: "refs/head/branch", owner: "Testing", repo: "fortify"} + repoUrl, repoReference, repoCodeqlScanUrl := getRepoReferences(repoInfo) + _, err := createToolRecordCodeql(newCodeqlExecuteScanTestsUtils(), repoInfo, repoUrl, repoReference, repoCodeqlScanUrl) + assert.Error(t, err) - assert.ErrorContains(t, err, "TR_PERSIST: empty workspace") - assert.Empty(t, fileName) + assert.ErrorContains(t, err, "Repository not set") }) + t.Run("Empty analyzedRef", func(t *testing.T) { - config := codeqlExecuteScanOptions{ - Repository: "https://github.hello.test/Testing/fortify.git", - AnalyzedRef: "", - CommitID: "test", - } - fileName, err := createToolRecordCodeql(newCodeqlExecuteScanTestsUtils(), "test", config) + repoInfo := RepoInfo{serverUrl: "https://github.hello.test", commitId: "test", ref: "", owner: "Testing", repo: "fortify"} + repoUrl, repoReference, repoCodeqlScanUrl := getRepoReferences(repoInfo) + _, err := createToolRecordCodeql(newCodeqlExecuteScanTestsUtils(), repoInfo, repoUrl, repoReference, repoCodeqlScanUrl) + assert.Error(t, err) - assert.ErrorContains(t, err, "TR_ADD_KEY: empty keyvalue") - assert.Empty(t, fileName, "toolrun_codeql") + assert.ErrorContains(t, err, "Analyzed Reference not set") + }) + + t.Run("Empty CommitId", func(t *testing.T) { + repoInfo := RepoInfo{serverUrl: "https://github.hello.test", commitId: "", ref: "refs/head/branch", owner: "Testing", repo: "fortify"} + repoUrl, repoReference, repoCodeqlScanUrl := getRepoReferences(repoInfo) + _, err := createToolRecordCodeql(newCodeqlExecuteScanTestsUtils(), repoInfo, repoUrl, repoReference, repoCodeqlScanUrl) + + assert.Error(t, err) + assert.ErrorContains(t, err, "CommitId not set") }) t.Run("Invalid analyzedRef", func(t *testing.T) { - config := codeqlExecuteScanOptions{ - Repository: "https://github.hello.test/Testing/fortify.git", - AnalyzedRef: "refs/head", - CommitID: "test", - } - fileName, err := createToolRecordCodeql(newCodeqlExecuteScanTestsUtils(), "test", config) - assert.NoError(t, err) - assert.Contains(t, fileName, "toolrun_codeql") + repoInfo := RepoInfo{serverUrl: "https://github.hello.test", commitId: "", ref: "refs/branch", owner: "Testing", repo: "fortify"} + repoUrl, repoReference, repoCodeqlScanUrl := getRepoReferences(repoInfo) + _, err := createToolRecordCodeql(newCodeqlExecuteScanTestsUtils(), repoInfo, repoUrl, repoReference, repoCodeqlScanUrl) + + assert.Error(t, err) }) } diff --git a/pkg/codeql/codeql.go b/pkg/codeql/codeql.go new file mode 100644 index 0000000000..7e7f82c99c --- /dev/null +++ b/pkg/codeql/codeql.go @@ -0,0 +1,65 @@ +package codeql + +import ( + "context" + + sapgithub "github.com/SAP/jenkins-library/pkg/github" + "github.com/google/go-github/v45/github" +) + +type CodeqlScanAudit interface { + GetVulnerabilities(analyzedRef string, state string) error +} + +type githubCodeqlScanningService interface { + ListAlertsForRepo(ctx context.Context, owner, repo string, opts *github.AlertListOptions) ([]*github.Alert, *github.Response, error) +} + +const auditStateOpen = "open" + +func NewCodeqlScanAuditInstance(apiURL, owner, repository, token string, trustedCerts []string) CodeqlScanAuditInstance { + return CodeqlScanAuditInstance{apiURL: apiURL, owner: owner, repository: repository, token: token, trustedCerts: trustedCerts} +} + +type CodeqlScanAuditInstance struct { + apiURL string + owner string + repository string + token string + trustedCerts []string + alertListoptions github.AlertListOptions +} + +func (codeqlScanAudit *CodeqlScanAuditInstance) GetVulnerabilities(analyzedRef string) (CodeqlScanning, error) { + ctx, client, err := sapgithub.NewClient(codeqlScanAudit.token, codeqlScanAudit.apiURL, "", codeqlScanAudit.trustedCerts) + if err != nil { + return CodeqlScanning{}, err + } + + return getVulnerabilitiesFromClient(ctx, client.CodeScanning, analyzedRef, codeqlScanAudit) +} + +func getVulnerabilitiesFromClient(ctx context.Context, codeScanning githubCodeqlScanningService, analyzedRef string, codeqlScanAudit *CodeqlScanAuditInstance) (CodeqlScanning, error) { + alertOptions := github.AlertListOptions{ + State: "", + Ref: analyzedRef, + ListOptions: github.ListOptions{}, + } + + alerts, _, err := codeScanning.ListAlertsForRepo(ctx, codeqlScanAudit.owner, codeqlScanAudit.repository, &alertOptions) + if err != nil { + return CodeqlScanning{}, err + } + + openStateCount := 0 + for _, alert := range alerts { + if *alert.State == auditStateOpen { + openStateCount = openStateCount + 1 + } + } + + codeqlScanning := CodeqlScanning{} + codeqlScanning.Total = len(alerts) + codeqlScanning.Audited = (codeqlScanning.Total - openStateCount) + return codeqlScanning, nil +} diff --git a/pkg/codeql/codeql_test.go b/pkg/codeql/codeql_test.go new file mode 100644 index 0000000000..c0f64aee64 --- /dev/null +++ b/pkg/codeql/codeql_test.go @@ -0,0 +1,47 @@ +package codeql + +import ( + "context" + "errors" + "testing" + + "github.com/google/go-github/v45/github" + "github.com/stretchr/testify/assert" +) + +type githubCodeqlScanningMock struct { +} + +func (g *githubCodeqlScanningMock) ListAlertsForRepo(ctx context.Context, owner, repo string, opts *github.AlertListOptions) ([]*github.Alert, *github.Response, error) { + openState := "open" + closedState := "closed" + alerts := []*github.Alert{{State: &openState}, {State: &openState}, {State: &closedState}} + return alerts, nil, nil +} + +type githubCodeqlScanningErrorMock struct { +} + +func (g *githubCodeqlScanningErrorMock) ListAlertsForRepo(ctx context.Context, owner, repo string, opts *github.AlertListOptions) ([]*github.Alert, *github.Response, error) { + return []*github.Alert{}, nil, errors.New("Some error") +} + +func TestGetVulnerabilitiesFromClient(t *testing.T) { + ctx := context.Background() + t.Parallel() + t.Run("Success", func(t *testing.T) { + ghCodeqlScanningMock := githubCodeqlScanningMock{} + codeqlScanAuditInstance := NewCodeqlScanAuditInstance("", "", "", "", []string{}) + codeScanning, err := getVulnerabilitiesFromClient(ctx, &ghCodeqlScanningMock, "ref", &codeqlScanAuditInstance) + assert.NoError(t, err) + assert.Equal(t, 3, codeScanning.Total) + assert.Equal(t, 1, codeScanning.Audited) + }) + + t.Run("Error", func(t *testing.T) { + ghCodeqlScanningErrorMock := githubCodeqlScanningErrorMock{} + codeqlScanAuditInstance := NewCodeqlScanAuditInstance("", "", "", "", []string{}) + _, err := getVulnerabilitiesFromClient(ctx, &ghCodeqlScanningErrorMock, "ref", &codeqlScanAuditInstance) + assert.Error(t, err) + }) +} diff --git a/pkg/codeql/reporting.go b/pkg/codeql/reporting.go new file mode 100644 index 0000000000..e2ac9fa1f0 --- /dev/null +++ b/pkg/codeql/reporting.go @@ -0,0 +1,44 @@ +package codeql + +import ( + "encoding/json" + "path/filepath" + + "github.com/SAP/jenkins-library/pkg/log" + "github.com/SAP/jenkins-library/pkg/piperutils" + "github.com/pkg/errors" +) + +type CodeqlAudit struct { + ToolName string `json:"toolName"` + RepositoryUrl string `json:"repositoryUrl"` + RepositoryReferenceUrl string `json:"repositoryReferenceUrl"` //URL of PR or Branch where scan was performed + CodeScanningLink string `json:"codeScanningLink"` + ScanResults CodeqlScanning `json:"scanResults"` +} + +type CodeqlScanning struct { + Total int `json:"total"` + Audited int `json:"audited"` +} + +func WriteJSONReport(jsonReport CodeqlAudit, modulePath string) ([]piperutils.Path, error) { + utils := piperutils.Files{} + reportPaths := []piperutils.Path{} + + reportsDirectory := filepath.Join(modulePath, "codeql") + jsonComplianceReportPath := filepath.Join(reportsDirectory, "piper_codeql_report.json") + if err := utils.MkdirAll(reportsDirectory, 0777); err != nil { + return reportPaths, errors.Wrapf(err, "failed to create report directory") + } + + file, _ := json.Marshal(jsonReport) + if err := utils.FileWrite(jsonComplianceReportPath, file, 0666); err != nil { + log.SetErrorCategory(log.ErrorConfiguration) + return reportPaths, errors.Wrapf(err, "failed to write codeql json compliance report") + } + + reportPaths = append(reportPaths, piperutils.Path{Name: "Codeql JSON Compliance Report", Target: jsonComplianceReportPath}) + + return reportPaths, nil +} diff --git a/resources/metadata/codeqlExecuteScan.yaml b/resources/metadata/codeqlExecuteScan.yaml index 65d5dce95d..42e9280cd9 100644 --- a/resources/metadata/codeqlExecuteScan.yaml +++ b/resources/metadata/codeqlExecuteScan.yaml @@ -39,6 +39,15 @@ spec: - type: vaultSecret default: github name: githubVaultSecretName + - name: githubApiUrl + description: "Set the GitHub API URL." + scope: + - GENERAL + - PARAMETERS + - STAGES + - STEPS + type: string + default: "https://api.github.com" - name: buildTool type: string description: Defines the build tool which is used for building the project. @@ -142,6 +151,22 @@ spec: - name: commonPipelineEnvironment param: git/remoteCommitId type: string + - name: vulnerabilityThresholdTotal + description: "Threashold for maximum number of allowed vulnerabilities." + type: int + default: 0 + scope: + - PARAMETERS + - STAGES + - STEPS + - name: checkForCompliance + description: "If set to true, the piper step checks for compliance based on vulnerability threadholds. Example - If total vulnerabilites are 10 and vulnerabilityThresholdTotal is set as 0, then the steps throws an compliance error." + type: bool + default: false + scope: + - PARAMETERS + - STAGES + - STEPS containers: - image: "" outputs: