diff --git a/.github/workflows/build-adr.yml b/.github/workflows/build-adr.yml index f23a3d03e6..6e6fdcc71e 100644 --- a/.github/workflows/build-adr.yml +++ b/.github/workflows/build-adr.yml @@ -15,7 +15,7 @@ jobs: # required by Log4brains to work correctly (needs the whole Git history) fetch-depth: 0 - name: Setup Node - uses: actions/setup-node@v3 + uses: actions/setup-node@v4 with: node-version: "14" - name: Install Log4brains And Build ADRs diff --git a/.github/workflows/consumer-tests-pr.yml b/.github/workflows/consumer-tests-pr.yml index 2e4140421b..f1aaa3fd1b 100644 --- a/.github/workflows/consumer-tests-pr.yml +++ b/.github/workflows/consumer-tests-pr.yml @@ -44,7 +44,7 @@ jobs: with: repository: ${{ steps.repository.outputs.repository }} ref: ${{ steps.branch_name.outputs.branch_name }} - - uses: actions/setup-java@v3 + - uses: actions/setup-java@v4 with: java-version: 11 distribution: zulu diff --git a/.github/workflows/consumer-tests.yml b/.github/workflows/consumer-tests.yml index defc95a4e8..ca4cdb0471 100644 --- a/.github/workflows/consumer-tests.yml +++ b/.github/workflows/consumer-tests.yml @@ -14,7 +14,7 @@ jobs: steps: - uses: styfle/cancel-workflow-action@0.11.0 - uses: actions/checkout@v4 - - uses: actions/setup-java@v3 + - uses: actions/setup-java@v4 with: java-version: 11 distribution: zulu diff --git a/.github/workflows/documentation.yml b/.github/workflows/documentation.yml index 5f1da37dfa..818fa6cdb4 100644 --- a/.github/workflows/documentation.yml +++ b/.github/workflows/documentation.yml @@ -16,7 +16,7 @@ jobs: - uses: actions/checkout@v4 - - uses: actions/setup-java@v3 + - uses: actions/setup-java@v4 with: java-version: 8 distribution: zulu diff --git a/.github/workflows/release-go.yml b/.github/workflows/release-go.yml index 7fc27d24ae..1f800f2c11 100644 --- a/.github/workflows/release-go.yml +++ b/.github/workflows/release-go.yml @@ -14,7 +14,7 @@ jobs: - uses: styfle/cancel-workflow-action@0.11.0 - uses: actions/checkout@v4 # Workaround for https://github.com/SAP/jenkins-library/issues/1723, build only works with jdk8 currently - - uses: actions/setup-java@v3 + - uses: actions/setup-java@v4 with: java-version: 8 distribution: zulu @@ -23,11 +23,9 @@ jobs: curl --insecure --silent --location --write-out '%{http_code}' --output ./piper_master https://github.com/SAP/jenkins-library/releases/latest/download/piper_master curl --insecure --silent --location --write-out '%{http_code}' --output ./piper_master-darwin.x86_64 https://github.com/SAP/jenkins-library/releases/latest/download/piper_master-darwin.x86_64 curl --insecure --silent --location --write-out '%{http_code}' --output ./piper_master-darwin.arm64 https://github.com/SAP/jenkins-library/releases/latest/download/piper_master-darwin.arm64 - curl --insecure --silent --location --write-out '%{http_code}' --output ./piper_master-win.x86_64.exe https://github.com/SAP/jenkins-library/releases/latest/download/piper_master-win.x86_64.exe cp ./piper_master ./piper cp ./piper_master-darwin.x86_64 ./piper-darwin.x86_64 cp ./piper_master-darwin.arm64 ./piper-darwin.arm64 - cp ./piper_master-win.x86_64.exe ./piper-win.x86_64.exe npm install semver --quiet echo "PIPER_version=v$(node_modules/.bin/semver -i minor $(curl --silent "https://api.github.com/repos/$GITHUB_REPOSITORY/releases/latest" | jq -r .tag_name))" >> $GITHUB_ENV - uses: SAP/project-piper-action@master @@ -35,7 +33,7 @@ jobs: with: piper-version: master command: githubPublishRelease - flags: --token ${{ secrets.GITHUB_TOKEN }} --assetPathList ./piper_master --assetPathList ./piper --assetPathList ./piper_master-darwin.x86_64 --assetPathList ./piper-darwin.x86_64 --assetPathList ./piper_master-darwin.arm64 --assetPathList ./piper-darwin.arm64 --assetPathList ./piper_master-win.x86_64.exe --assetPathList ./piper-win.x86_64.exe + flags: --token ${{ secrets.GITHUB_TOKEN }} --assetPathList ./piper_master --assetPathList ./piper --assetPathList ./piper_master-darwin.x86_64 --assetPathList ./piper-darwin.x86_64 --assetPathList ./piper_master-darwin.arm64 --assetPathList ./piper-darwin.arm64 - name: Build and publish jar for consumption in unit tests run: mvn package - uses: SAP/project-piper-action@master diff --git a/.github/workflows/upload-go-master.yml b/.github/workflows/upload-go-master.yml index 92ebb4ca25..058101a3ea 100644 --- a/.github/workflows/upload-go-master.yml +++ b/.github/workflows/upload-go-master.yml @@ -61,19 +61,3 @@ jobs: piper-version: master command: githubPublishRelease flags: --token ${{ secrets.GITHUB_TOKEN }} --version latest --assetPath ./piper_master-darwin.arm64 - - env: - CGO_ENABLED: 0 - GOOS: windows - GOARCH: amd64 - run: | - # See https://golang.org/cmd/link/ for info on -w (omit the DWARF symbol table) and -s (omit the symbol table and debug information) - # We use those flags to get a smaller compiled binary for faster downloads. - go build -ldflags "-w -s -X github.com/SAP/jenkins-library/cmd.GitCommit=${GITHUB_SHA} \ - -X github.com/SAP/jenkins-library/pkg/log.LibraryRepository=${GITHUB_REPOSITORY} \ - -X github.com/SAP/jenkins-library/pkg/telemetry.LibraryRepository=https://github.com/${GITHUB_REPOSITORY}.git" \ - -o piper_master-win.x86_64.exe . - - uses: SAP/project-piper-action@master - with: - piper-version: master - command: githubPublishRelease - flags: --token ${{ secrets.GITHUB_TOKEN }} --version latest --assetPath ./piper_master-win.x86_64.exe diff --git a/.github/workflows/verify-groovy.yml b/.github/workflows/verify-groovy.yml index 64e4df6e49..6132fd591c 100644 --- a/.github/workflows/verify-groovy.yml +++ b/.github/workflows/verify-groovy.yml @@ -16,7 +16,7 @@ jobs: - uses: actions/checkout@v4 - - uses: actions/setup-java@v3 + - uses: actions/setup-java@v4 with: java-version: 8 distribution: zulu diff --git a/cmd/abapAddonAssemblyKitCheckCVs_generated.go b/cmd/abapAddonAssemblyKitCheckCVs_generated.go index 63fcba0d9a..55e7b8e475 100644 --- a/cmd/abapAddonAssemblyKitCheckCVs_generated.go +++ b/cmd/abapAddonAssemblyKitCheckCVs_generated.go @@ -96,7 +96,7 @@ For Terminology refer to the [Scenario Description](https://www.project-piper.io log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/abapAddonAssemblyKitCheckPV_generated.go b/cmd/abapAddonAssemblyKitCheckPV_generated.go index e8b93b13a3..3d3cbd2464 100644 --- a/cmd/abapAddonAssemblyKitCheckPV_generated.go +++ b/cmd/abapAddonAssemblyKitCheckPV_generated.go @@ -96,7 +96,7 @@ For Terminology refer to the [Scenario Description](https://www.project-piper.io log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/abapAddonAssemblyKitCreateTargetVector_generated.go b/cmd/abapAddonAssemblyKitCreateTargetVector_generated.go index f0cb8b058f..b5afb1bc98 100644 --- a/cmd/abapAddonAssemblyKitCreateTargetVector_generated.go +++ b/cmd/abapAddonAssemblyKitCreateTargetVector_generated.go @@ -96,7 +96,7 @@ For Terminology refer to the [Scenario Description](https://www.project-piper.io log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/abapAddonAssemblyKitPublishTargetVector_generated.go b/cmd/abapAddonAssemblyKitPublishTargetVector_generated.go index 6ea810b3b3..8f9bc5afbb 100644 --- a/cmd/abapAddonAssemblyKitPublishTargetVector_generated.go +++ b/cmd/abapAddonAssemblyKitPublishTargetVector_generated.go @@ -67,7 +67,7 @@ For Terminology refer to the [Scenario Description](https://www.project-piper.io log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/abapAddonAssemblyKitRegisterPackages_generated.go b/cmd/abapAddonAssemblyKitRegisterPackages_generated.go index a376088e6d..4700ca5909 100644 --- a/cmd/abapAddonAssemblyKitRegisterPackages_generated.go +++ b/cmd/abapAddonAssemblyKitRegisterPackages_generated.go @@ -97,7 +97,7 @@ For Terminology refer to the [Scenario Description](https://www.project-piper.io log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/abapAddonAssemblyKitReleasePackages_generated.go b/cmd/abapAddonAssemblyKitReleasePackages_generated.go index 224f68c6c2..7b12dd8ab0 100644 --- a/cmd/abapAddonAssemblyKitReleasePackages_generated.go +++ b/cmd/abapAddonAssemblyKitReleasePackages_generated.go @@ -95,7 +95,7 @@ For Terminology refer to the [Scenario Description](https://www.project-piper.io log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/abapAddonAssemblyKitReserveNextPackages_generated.go b/cmd/abapAddonAssemblyKitReserveNextPackages_generated.go index 92136637c7..214441ebf5 100644 --- a/cmd/abapAddonAssemblyKitReserveNextPackages_generated.go +++ b/cmd/abapAddonAssemblyKitReserveNextPackages_generated.go @@ -101,7 +101,7 @@ For Terminology refer to the [Scenario Description](https://www.project-piper.io log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/abapEnvironmentAssembleConfirm_generated.go b/cmd/abapEnvironmentAssembleConfirm_generated.go index 48beb91a39..f916a94a42 100644 --- a/cmd/abapEnvironmentAssembleConfirm_generated.go +++ b/cmd/abapEnvironmentAssembleConfirm_generated.go @@ -99,7 +99,7 @@ func AbapEnvironmentAssembleConfirmCommand() *cobra.Command { log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/abapEnvironmentAssemblePackages.go b/cmd/abapEnvironmentAssemblePackages.go index 2a5064f97f..4ee80e2045 100644 --- a/cmd/abapEnvironmentAssemblePackages.go +++ b/cmd/abapEnvironmentAssemblePackages.go @@ -154,6 +154,10 @@ func (br *buildWithRepository) start() error { ValueID: "CVERS", Value: br.repo.Name + "." + br.repo.Version + "." + br.repo.SpLevel, }, + { + ValueID: "SEMANTIC_VERSION", + Value: br.repo.VersionYAML, + }, { ValueID: "PACKAGE_TYPE", Value: br.repo.PackageType, @@ -180,11 +184,6 @@ func (br *buildWithRepository) start() error { Value: br.repo.PredecessorCommitID}) } if br.repo.CommitID != "" { - // old value to be used until 2302 [can be deleted earliest with 2311] - valuesInput.Values = append(valuesInput.Values, - abapbuild.Value{ValueID: "ACTUAL_DELIVERY_COMMIT", - Value: br.repo.CommitID}) - // new value used as of 2302 valuesInput.Values = append(valuesInput.Values, abapbuild.Value{ValueID: "CURRENT_DELIVERY_COMMIT", Value: br.repo.CommitID}) diff --git a/cmd/abapEnvironmentAssemblePackages_generated.go b/cmd/abapEnvironmentAssemblePackages_generated.go index ee5edda5d3..3923159cec 100644 --- a/cmd/abapEnvironmentAssemblePackages_generated.go +++ b/cmd/abapEnvironmentAssemblePackages_generated.go @@ -101,7 +101,7 @@ Platform ABAP Environment system and saves the corresponding [SAR archive](https log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/abapEnvironmentBuild_generated.go b/cmd/abapEnvironmentBuild_generated.go index f2938b36ae..dbbb8ad84e 100644 --- a/cmd/abapEnvironmentBuild_generated.go +++ b/cmd/abapEnvironmentBuild_generated.go @@ -114,7 +114,7 @@ func AbapEnvironmentBuildCommand() *cobra.Command { log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/abapEnvironmentCheckoutBranch.go b/cmd/abapEnvironmentCheckoutBranch.go index 8fcb26ead3..a1c7bd9642 100644 --- a/cmd/abapEnvironmentCheckoutBranch.go +++ b/cmd/abapEnvironmentCheckoutBranch.go @@ -1,10 +1,7 @@ package cmd import ( - "encoding/json" "fmt" - "io" - "net/http/cookiejar" "reflect" "time" @@ -28,49 +25,39 @@ func abapEnvironmentCheckoutBranch(options abapEnvironmentCheckoutBranchOptions, Exec: &c, } - client := piperhttp.Client{} + apiManager := abaputils.SoftwareComponentApiManager{ + Client: &piperhttp.Client{}, + PollIntervall: 5 * time.Second, + } // error situations should stop execution through log.Entry().Fatal() call which leads to an os.Exit(1) in the end - err := runAbapEnvironmentCheckoutBranch(&options, &autils, &client) + err := runAbapEnvironmentCheckoutBranch(&options, &autils, &apiManager) if err != nil { log.Entry().WithError(err).Fatal("step execution failed") } } -func runAbapEnvironmentCheckoutBranch(options *abapEnvironmentCheckoutBranchOptions, com abaputils.Communication, client piperhttp.Sender) (err error) { +func runAbapEnvironmentCheckoutBranch(options *abapEnvironmentCheckoutBranchOptions, com abaputils.Communication, apiManager abaputils.SoftwareComponentApiManagerInterface) (err error) { // Mapping for options subOptions := convertCheckoutConfig(options) // Determine the host, user and password, either via the input parameters or via a cloud foundry service key - connectionDetails, errorGetInfo := com.GetAbapCommunicationArrangementInfo(subOptions, "/sap/opu/odata/sap/MANAGE_GIT_REPOSITORY/") + connectionDetails, errorGetInfo := com.GetAbapCommunicationArrangementInfo(subOptions, "") if errorGetInfo != nil { log.Entry().WithError(errorGetInfo).Fatal("Parameters for the ABAP Connection not available") } - // Configuring the HTTP Client and CookieJar - cookieJar, errorCookieJar := cookiejar.New(nil) - if errorCookieJar != nil { - return errors.Wrap(errorCookieJar, "Could not create a Cookie Jar") - } - clientOptions := piperhttp.ClientOptions{ - MaxRequestDuration: 180 * time.Second, - CookieJar: cookieJar, - Username: connectionDetails.User, - Password: connectionDetails.Password, - } - client.SetOptions(clientOptions) - pollIntervall := com.GetPollIntervall() - repositories := []abaputils.Repository{} err = checkCheckoutBranchRepositoryConfiguration(*options) - - if err == nil { - repositories, err = abaputils.GetRepositories(&abaputils.RepositoriesConfig{BranchName: options.BranchName, RepositoryName: options.RepositoryName, Repositories: options.Repositories}, true) + if err != nil { + return errors.Wrap(err, "Configuration is not consistent") } - if err == nil { - err = checkoutBranches(repositories, connectionDetails, client, pollIntervall) + repositories, err = abaputils.GetRepositories(&abaputils.RepositoriesConfig{BranchName: options.BranchName, RepositoryName: options.RepositoryName, Repositories: options.Repositories}, true) + if err != nil { + return errors.Wrap(err, "Could not read repositories") } + err = checkoutBranches(repositories, connectionDetails, apiManager) if err != nil { return fmt.Errorf("Something failed during the checkout: %w", err) } @@ -79,10 +66,10 @@ func runAbapEnvironmentCheckoutBranch(options *abapEnvironmentCheckoutBranchOpti return nil } -func checkoutBranches(repositories []abaputils.Repository, checkoutConnectionDetails abaputils.ConnectionDetailsHTTP, client piperhttp.Sender, pollIntervall time.Duration) (err error) { +func checkoutBranches(repositories []abaputils.Repository, checkoutConnectionDetails abaputils.ConnectionDetailsHTTP, apiManager abaputils.SoftwareComponentApiManagerInterface) (err error) { log.Entry().Infof("Start switching %v branches", len(repositories)) for _, repo := range repositories { - err = handleCheckout(repo, checkoutConnectionDetails, client, pollIntervall) + err = handleCheckout(repo, checkoutConnectionDetails, apiManager) if err != nil { break } @@ -90,67 +77,9 @@ func checkoutBranches(repositories []abaputils.Repository, checkoutConnectionDet return err } -func triggerCheckout(repositoryName string, branchName string, checkoutConnectionDetails abaputils.ConnectionDetailsHTTP, client piperhttp.Sender) (abaputils.ConnectionDetailsHTTP, error) { - uriConnectionDetails := checkoutConnectionDetails - uriConnectionDetails.URL = "" - checkoutConnectionDetails.XCsrfToken = "fetch" - - if repositoryName == "" || branchName == "" { - return uriConnectionDetails, fmt.Errorf("Failed to trigger checkout: %w", errors.New("Repository and/or Branch Configuration is empty. Please make sure that you have specified the correct values")) - } - - // Loging into the ABAP System - getting the x-csrf-token and cookies - resp, err := abaputils.GetHTTPResponse("HEAD", checkoutConnectionDetails, nil, client) - if err != nil { - err = abaputils.HandleHTTPError(resp, err, "Authentication on the ABAP system failed", checkoutConnectionDetails) - return uriConnectionDetails, err - } - defer resp.Body.Close() - - log.Entry().WithField("StatusCode", resp.Status).WithField("ABAP Endpoint", checkoutConnectionDetails.URL).Debug("Authentication on the ABAP system was successful") - uriConnectionDetails.XCsrfToken = resp.Header.Get("X-Csrf-Token") - checkoutConnectionDetails.XCsrfToken = uriConnectionDetails.XCsrfToken - - // the request looks like: POST/sap/opu/odata/sap/MANAGE_GIT_REPOSITORY/checkout_branch?branch_name='newBranch'&sc_name=/DMO/GIT_REPOSITORY' - checkoutConnectionDetails.URL = checkoutConnectionDetails.URL + `/checkout_branch?branch_name='` + branchName + `'&sc_name='` + repositoryName + `'` - jsonBody := []byte(``) - - // no JSON body needed - resp, err = abaputils.GetHTTPResponse("POST", checkoutConnectionDetails, jsonBody, client) - if err != nil { - err = abaputils.HandleHTTPError(resp, err, "Could not trigger checkout of branch "+branchName, uriConnectionDetails) - return uriConnectionDetails, err - } - defer resp.Body.Close() - log.Entry().WithField("StatusCode", resp.StatusCode).WithField("repositoryName", repositoryName).WithField("branchName", branchName).Debug("Triggered checkout of branch") - - // Parse Response - var body abaputils.PullEntity - var abapResp map[string]*json.RawMessage - bodyText, errRead := io.ReadAll(resp.Body) - if errRead != nil { - return uriConnectionDetails, err - } - if err := json.Unmarshal(bodyText, &abapResp); err != nil { - return uriConnectionDetails, err - } - if err := json.Unmarshal(*abapResp["d"], &body); err != nil { - return uriConnectionDetails, err - } - - if reflect.DeepEqual(abaputils.PullEntity{}, body) { - log.Entry().WithField("StatusCode", resp.Status).WithField("branchName", branchName).Error("Could not switch to specified branch") - err := errors.New("Request to ABAP System failed") - return uriConnectionDetails, err - } - - uriConnectionDetails.URL = body.Metadata.URI - return uriConnectionDetails, nil -} - func checkCheckoutBranchRepositoryConfiguration(options abapEnvironmentCheckoutBranchOptions) error { if options.Repositories == "" && options.RepositoryName == "" && options.BranchName == "" { - return fmt.Errorf("Checking configuration failed: %w", errors.New("You have not specified any repository or branch configuration to be checked out in the ABAP Environment System. Please make sure that you specified the repositories with their branches that should be checked out either in a dedicated file or via the parameters 'repositoryName' and 'branchName'. For more information please read the User documentation")) + return errors.New("You have not specified any repository or branch configuration to be checked out in the ABAP Environment System. Please make sure that you specified the repositories with their branches that should be checked out either in a dedicated file or via the parameters 'repositoryName' and 'branchName'. For more information please read the user documentation") } if options.Repositories != "" && options.RepositoryName != "" && options.BranchName != "" { log.Entry().Info("It seems like you have specified repositories directly via the configuration parameters 'repositoryName' and 'branchName' as well as in the dedicated repositories configuration file. Please note that in this case both configurations will be handled and checked out.") @@ -166,20 +95,26 @@ func checkCheckoutBranchRepositoryConfiguration(options abapEnvironmentCheckoutB return nil } -func handleCheckout(repo abaputils.Repository, checkoutConnectionDetails abaputils.ConnectionDetailsHTTP, client piperhttp.Sender, pollIntervall time.Duration) (err error) { +func handleCheckout(repo abaputils.Repository, checkoutConnectionDetails abaputils.ConnectionDetailsHTTP, apiManager abaputils.SoftwareComponentApiManagerInterface) (err error) { + if reflect.DeepEqual(abaputils.Repository{}, repo) { return fmt.Errorf("Failed to read repository configuration: %w", errors.New("Error in configuration, most likely you have entered empty or wrong configuration values. Please make sure that you have correctly specified the branches in the repositories to be checked out")) } startCheckoutLogs(repo.Branch, repo.Name) - uriConnectionDetails, err := triggerCheckout(repo.Name, repo.Branch, checkoutConnectionDetails, client) + api, errGetAPI := apiManager.GetAPI(checkoutConnectionDetails, repo) + if errGetAPI != nil { + return errors.Wrap(errGetAPI, "Could not initialize the connection to the system") + } + + err = api.CheckoutBranch() if err != nil { return fmt.Errorf("Failed to trigger Checkout: %w", errors.New("Checkout of "+repo.Branch+" for software component "+repo.Name+" failed on the ABAP System")) } // Polling the status of the repository import on the ABAP Environment system - status, err := abaputils.PollEntity(repo.Name, uriConnectionDetails, client, pollIntervall) - if err != nil { + status, errorPollEntity := abaputils.PollEntity(api, apiManager.GetPollIntervall()) + if errorPollEntity != nil { return fmt.Errorf("Failed to poll Checkout: %w", errors.New("Status of checkout action on repository"+repo.Name+" failed on the ABAP System")) } const abapStatusCheckoutFail = "E" diff --git a/cmd/abapEnvironmentCheckoutBranch_generated.go b/cmd/abapEnvironmentCheckoutBranch_generated.go index ea0ded5db5..2b3230660b 100644 --- a/cmd/abapEnvironmentCheckoutBranch_generated.go +++ b/cmd/abapEnvironmentCheckoutBranch_generated.go @@ -73,7 +73,7 @@ Please provide either of the following options: log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/abapEnvironmentCheckoutBranch_test.go b/cmd/abapEnvironmentCheckoutBranch_test.go index 91bbe3cf62..87bf02fe43 100644 --- a/cmd/abapEnvironmentCheckoutBranch_test.go +++ b/cmd/abapEnvironmentCheckoutBranch_test.go @@ -7,9 +7,9 @@ import ( "encoding/json" "os" "testing" + "time" "github.com/SAP/jenkins-library/pkg/abaputils" - "github.com/pkg/errors" "github.com/stretchr/testify/assert" ) @@ -67,11 +67,12 @@ func TestCheckoutBranchStep(t *testing.T) { StatusCode: 200, } - err := runAbapEnvironmentCheckoutBranch(&config, &autils, client) + apiManager = &abaputils.SoftwareComponentApiManager{Client: client, PollIntervall: 1 * time.Nanosecond, Force0510: true} + err := runAbapEnvironmentCheckoutBranch(&config, &autils, apiManager) assert.NoError(t, err, "Did not expect error") }) t.Run("Run Step Failure - empty config", func(t *testing.T) { - expectedErrorMessage := "Something failed during the checkout: Checking configuration failed: You have not specified any repository or branch configuration to be checked out in the ABAP Environment System. Please make sure that you specified the repositories with their branches that should be checked out either in a dedicated file or via the parameters 'repositoryName' and 'branchName'. For more information please read the User documentation" + expectedErrorMessage := "Configuration is not consistent: You have not specified any repository or branch configuration to be checked out in the ABAP Environment System. Please make sure that you specified the repositories with their branches that should be checked out either in a dedicated file or via the parameters 'repositoryName' and 'branchName'. For more information please read the user documentation" var autils = abaputils.AUtilsMock{} defer autils.Cleanup() @@ -85,7 +86,6 @@ func TestCheckoutBranchStep(t *testing.T) { logResultError := `{"d": { "sc_name": "/DMO/SWC", "status": "S", "to_Log_Overview": { "results": [ { "log_index": 1, "log_name": "Main Import", "type_of_found_issues": "Error", "timestamp": "/Date(1644332299000+0000)/", "to_Log_Protocol": { "results": [ { "log_index": 1, "index_no": "1", "log_name": "", "type": "Info", "descr": "Main import", "timestamp": null, "criticality": 0 } ] } } ] } } }` client := &abaputils.ClientMock{ BodyList: []string{ - `{"d" : [] }`, `{"d" : ` + executionLogStringCheckout + `}`, logResultError, `{"d" : { "status" : "E" } }`, @@ -96,7 +96,8 @@ func TestCheckoutBranchStep(t *testing.T) { StatusCode: 200, } - err := runAbapEnvironmentCheckoutBranch(&config, &autils, client) + apiManager = &abaputils.SoftwareComponentApiManager{Client: client, PollIntervall: 1 * time.Nanosecond, Force0510: true} + err := runAbapEnvironmentCheckoutBranch(&config, &autils, apiManager) assert.EqualError(t, err, expectedErrorMessage) }) t.Run("Run Step Failure - wrong status", func(t *testing.T) { @@ -124,7 +125,6 @@ func TestCheckoutBranchStep(t *testing.T) { logResultError := `{"d": { "sc_name": "/DMO/SWC", "status": "S", "to_Log_Overview": { "results": [ { "log_index": 1, "log_name": "Main Import", "type_of_found_issues": "Error", "timestamp": "/Date(1644332299000+0000)/", "to_Log_Protocol": { "results": [ { "log_index": 1, "index_no": "1", "log_name": "", "type": "Info", "descr": "Main import", "timestamp": null, "criticality": 0 } ] } } ] } } }` client := &abaputils.ClientMock{ BodyList: []string{ - `{"d" : [] }`, `{"d" : ` + executionLogStringCheckout + `}`, logResultError, `{"d" : { "status" : "E" } }`, @@ -135,7 +135,8 @@ func TestCheckoutBranchStep(t *testing.T) { StatusCode: 200, } - err := runAbapEnvironmentCheckoutBranch(&config, &autils, client) + apiManager = &abaputils.SoftwareComponentApiManager{Client: client, PollIntervall: 1 * time.Nanosecond, Force0510: true} + err := runAbapEnvironmentCheckoutBranch(&config, &autils, apiManager) assert.EqualError(t, err, expectedErrorMessage) }) t.Run("Success case: checkout Branches from file config", func(t *testing.T) { @@ -183,11 +184,12 @@ repositories: Password: "testPassword", Repositories: "repositoriesTest.yml", } - err = runAbapEnvironmentCheckoutBranch(&config, &autils, client) + apiManager = &abaputils.SoftwareComponentApiManager{Client: client, PollIntervall: 1 * time.Nanosecond, Force0510: true} + err = runAbapEnvironmentCheckoutBranch(&config, &autils, apiManager) assert.NoError(t, err) }) t.Run("Failure case: checkout Branches from empty file config", func(t *testing.T) { - expectedErrorMessage := "Something failed during the checkout: Error in config file repositoriesTest.yml, AddonDescriptor doesn't contain any repositories" + expectedErrorMessage := "Could not read repositories: Error in config file repositoriesTest.yml, AddonDescriptor doesn't contain any repositories" var autils = abaputils.AUtilsMock{} defer autils.Cleanup() @@ -226,11 +228,12 @@ repositories: Password: "testPassword", Repositories: "repositoriesTest.yml", } - err = runAbapEnvironmentCheckoutBranch(&config, &autils, client) + apiManager = &abaputils.SoftwareComponentApiManager{Client: client, PollIntervall: 1 * time.Nanosecond, Force0510: true} + err = runAbapEnvironmentCheckoutBranch(&config, &autils, apiManager) assert.EqualError(t, err, expectedErrorMessage) }) t.Run("Failure case: checkout Branches from wrong file config", func(t *testing.T) { - expectedErrorMessage := "Something failed during the checkout: Could not unmarshal repositoriesTest.yml" + expectedErrorMessage := "Could not read repositories: Could not unmarshal repositoriesTest.yml" var autils = abaputils.AUtilsMock{} defer autils.Cleanup() @@ -274,88 +277,12 @@ repositories: Password: "testPassword", Repositories: "repositoriesTest.yml", } - err = runAbapEnvironmentCheckoutBranch(&config, &autils, client) + apiManager = &abaputils.SoftwareComponentApiManager{Client: client, PollIntervall: 1 * time.Nanosecond, Force0510: true} + err = runAbapEnvironmentCheckoutBranch(&config, &autils, apiManager) assert.EqualError(t, err, expectedErrorMessage) }) } -func TestTriggerCheckout(t *testing.T) { - t.Run("Test trigger checkout: success case", func(t *testing.T) { - - // given - receivedURI := "example.com/Branches" - uriExpected := receivedURI - tokenExpected := "myToken" - - client := &abaputils.ClientMock{ - Body: `{"d" : { "__metadata" : { "uri" : "` + receivedURI + `" } } }`, - Token: tokenExpected, - StatusCode: 200, - } - config := abapEnvironmentCheckoutBranchOptions{ - CfAPIEndpoint: "https://api.endpoint.com", - CfOrg: "testOrg", - CfSpace: "testSpace", - CfServiceInstance: "testInstance", - CfServiceKeyName: "testServiceKey", - Username: "testUser", - Password: "testPassword", - RepositoryName: "testRepo1", - BranchName: "feature-unit-test", - } - con := abaputils.ConnectionDetailsHTTP{ - User: "MY_USER", - Password: "MY_PW", - URL: "https://api.endpoint.com/Branches", - } - // when - entityConnection, err := triggerCheckout(config.RepositoryName, config.BranchName, con, client) - - // then - assert.NoError(t, err) - assert.Equal(t, uriExpected, entityConnection.URL) - assert.Equal(t, tokenExpected, entityConnection.XCsrfToken) - }) - - t.Run("Test trigger checkout: ABAP Error case", func(t *testing.T) { - - // given - errorMessage := "ABAP Error Message" - errorCode := "ERROR/001" - HTTPErrorMessage := "HTTP Error Message" - combinedErrorMessage := "HTTP Error Message: ERROR/001 - ABAP Error Message" - - client := &abaputils.ClientMock{ - Body: `{"error" : { "code" : "` + errorCode + `", "message" : { "lang" : "en", "value" : "` + errorMessage + `" } } }`, - Token: "myToken", - StatusCode: 400, - Error: errors.New(HTTPErrorMessage), - } - config := abapEnvironmentCheckoutBranchOptions{ - CfAPIEndpoint: "https://api.endpoint.com", - CfOrg: "testOrg", - CfSpace: "testSpace", - CfServiceInstance: "testInstance", - CfServiceKeyName: "testServiceKey", - Username: "testUser", - Password: "testPassword", - RepositoryName: "testRepo1", - BranchName: "feature-unit-test", - } - con := abaputils.ConnectionDetailsHTTP{ - User: "MY_USER", - Password: "MY_PW", - URL: "https://api.endpoint.com/Branches", - } - - // when - _, err := triggerCheckout(config.RepositoryName, config.BranchName, con, client) - - // then - assert.Equal(t, combinedErrorMessage, err.Error(), "Different error message expected") - }) -} - func TestCheckoutConfigChecker(t *testing.T) { t.Run("Success case: check config", func(t *testing.T) { config := abapEnvironmentCheckoutBranchOptions{ @@ -374,7 +301,7 @@ func TestCheckoutConfigChecker(t *testing.T) { assert.NoError(t, err) }) t.Run("Failure case: check empty config", func(t *testing.T) { - expectedErrorMessage := "Checking configuration failed: You have not specified any repository or branch configuration to be checked out in the ABAP Environment System. Please make sure that you specified the repositories with their branches that should be checked out either in a dedicated file or via the parameters 'repositoryName' and 'branchName'. For more information please read the User documentation" + expectedErrorMessage := "You have not specified any repository or branch configuration to be checked out in the ABAP Environment System. Please make sure that you specified the repositories with their branches that should be checked out either in a dedicated file or via the parameters 'repositoryName' and 'branchName'. For more information please read the user documentation" config := abapEnvironmentCheckoutBranchOptions{} err := checkCheckoutBranchRepositoryConfiguration(config) diff --git a/cmd/abapEnvironmentCloneGitRepo.go b/cmd/abapEnvironmentCloneGitRepo.go index 7b6db9875d..9776312b2e 100644 --- a/cmd/abapEnvironmentCloneGitRepo.go +++ b/cmd/abapEnvironmentCloneGitRepo.go @@ -1,12 +1,6 @@ package cmd import ( - "encoding/json" - "fmt" - "io" - "net/http" - "net/http/cookiejar" - "reflect" "time" "github.com/SAP/jenkins-library/pkg/abaputils" @@ -28,81 +22,138 @@ func abapEnvironmentCloneGitRepo(config abapEnvironmentCloneGitRepoOptions, _ *t Exec: &c, } - client := piperhttp.Client{} + apiManager := abaputils.SoftwareComponentApiManager{ + Client: &piperhttp.Client{}, + PollIntervall: 5 * time.Second, + } // error situations should stop execution through log.Entry().Fatal() call which leads to an os.Exit(1) in the end - err := runAbapEnvironmentCloneGitRepo(&config, &autils, &client) + err := runAbapEnvironmentCloneGitRepo(&config, &autils, &apiManager) if err != nil { log.Entry().WithError(err).Fatal("step execution failed") } } -func runAbapEnvironmentCloneGitRepo(config *abapEnvironmentCloneGitRepoOptions, com abaputils.Communication, client piperhttp.Sender) error { +func runAbapEnvironmentCloneGitRepo(config *abapEnvironmentCloneGitRepoOptions, com abaputils.Communication, apiManager abaputils.SoftwareComponentApiManagerInterface) error { // Mapping for options subOptions := convertCloneConfig(config) + errConfig := checkConfiguration(config) + if errConfig != nil { + return errors.Wrap(errConfig, "The provided configuration is not allowed") + } + + repositories, errGetRepos := abaputils.GetRepositories(&abaputils.RepositoriesConfig{BranchName: config.BranchName, RepositoryName: config.RepositoryName, Repositories: config.Repositories}, false) + if errGetRepos != nil { + return errors.Wrap(errGetRepos, "Could not read repositories") + } + // Determine the host, user and password, either via the input parameters or via a cloud foundry service key connectionDetails, errorGetInfo := com.GetAbapCommunicationArrangementInfo(subOptions, "") if errorGetInfo != nil { return errors.Wrap(errorGetInfo, "Parameters for the ABAP Connection not available") } - // Configuring the HTTP Client and CookieJar - cookieJar, errorCookieJar := cookiejar.New(nil) - if errorCookieJar != nil { - return errors.Wrap(errorCookieJar, "Could not create a Cookie Jar") - } - - client.SetOptions(piperhttp.ClientOptions{ - MaxRequestDuration: 180 * time.Second, - CookieJar: cookieJar, - Username: connectionDetails.User, - Password: connectionDetails.Password, - }) + log.Entry().Infof("Start cloning %v repositories", len(repositories)) + for _, repo := range repositories { - errConfig := checkConfiguration(config) - if errConfig != nil { - return errors.Wrap(errConfig, "The provided configuration is not allowed") + cloneError := cloneSingleRepo(apiManager, connectionDetails, repo, config, com) + if cloneError != nil { + return cloneError + } } + abaputils.AddDefaultDashedLine(1) + log.Entry().Info("All repositories were cloned successfully") + return nil +} - repositories, errGetRepos := abaputils.GetRepositories(&abaputils.RepositoriesConfig{BranchName: config.BranchName, RepositoryName: config.RepositoryName, Repositories: config.Repositories}, true) - if errGetRepos != nil { - return fmt.Errorf("Something failed during the clone: %w", errGetRepos) +func cloneSingleRepo(apiManager abaputils.SoftwareComponentApiManagerInterface, connectionDetails abaputils.ConnectionDetailsHTTP, repo abaputils.Repository, config *abapEnvironmentCloneGitRepoOptions, com abaputils.Communication) error { + + // New API instance for each request + // Triggering the Clone of the repository into the ABAP Environment system + // Polling the status of the repository import on the ABAP Environment system + // If the repository had been cloned already, as checkout/pull has been done - polling the status is not necessary anymore + api, errGetAPI := apiManager.GetAPI(connectionDetails, repo) + if errGetAPI != nil { + return errors.Wrap(errGetAPI, "Could not initialize the connection to the system") } - log.Entry().Infof("Start cloning %v repositories", len(repositories)) - for _, repo := range repositories { + logString := repo.GetCloneLogString() + errorString := "Clone of " + logString + " failed on the ABAP system" - logString := repo.GetCloneLogString() - errorString := "Clone of " + logString + " failed on the ABAP system" + abaputils.AddDefaultDashedLine(1) + log.Entry().Info("Start cloning " + logString) + abaputils.AddDefaultDashedLine(1) - abaputils.AddDefaultDashedLine() - log.Entry().Info("Start cloning " + logString) - abaputils.AddDefaultDashedLine() + alreadyCloned, activeBranch, errCheckCloned := api.GetRepository() + if errCheckCloned != nil { + return errors.Wrapf(errCheckCloned, errorString) + } - // Triggering the Clone of the repository into the ABAP Environment system - uriConnectionDetails, errorTriggerClone, didCheckoutPullInstead := triggerClone(repo, connectionDetails, client) - if errorTriggerClone != nil { - return errors.Wrapf(errorTriggerClone, errorString) + if !alreadyCloned { + errClone := api.Clone() + if errClone != nil { + return errors.Wrapf(errClone, errorString) } - if !didCheckoutPullInstead { - // Polling the status of the repository import on the ABAP Environment system - // If the repository had been cloned already, as checkout/pull has been done - polling the status is not necessary anymore - status, errorPollEntity := abaputils.PollEntity(repo.Name, uriConnectionDetails, client, com.GetPollIntervall()) - if errorPollEntity != nil { - return errors.Wrapf(errorPollEntity, errorString) - } - if status == "E" { - return errors.New("Clone of " + logString + " failed on the ABAP System") + status, errorPollEntity := abaputils.PollEntity(api, apiManager.GetPollIntervall()) + if errorPollEntity != nil { + return errors.Wrapf(errorPollEntity, errorString) + } + if status == "E" { + return errors.New("Clone of " + logString + " failed on the ABAP System") + } + log.Entry().Info("The " + logString + " was cloned successfully") + } else { + abaputils.AddDefaultDashedLine(2) + log.Entry().Infof("%s", "The repository / software component has already been cloned on the ABAP Environment system ") + log.Entry().Infof("%s", "If required, a `checkout branch`, and a `pull` will be performed instead") + abaputils.AddDefaultDashedLine(2) + var returnedError error + if repo.Branch != "" && !(activeBranch == repo.Branch) { + returnedError = runAbapEnvironmentCheckoutBranch(getCheckoutOptions(config, repo), com, apiManager) + abaputils.AddDefaultDashedLine(2) + if returnedError != nil { + return returnedError } - log.Entry().Info("The " + logString + " was cloned successfully") } + returnedError = runAbapEnvironmentPullGitRepo(getPullOptions(config, repo), com, apiManager) + return returnedError } - abaputils.AddDefaultDashedLine() - log.Entry().Info("All repositories were cloned successfully") return nil } +func getCheckoutOptions(config *abapEnvironmentCloneGitRepoOptions, repo abaputils.Repository) *abapEnvironmentCheckoutBranchOptions { + checkoutOptions := abapEnvironmentCheckoutBranchOptions{ + Username: config.Username, + Password: config.Password, + Host: config.Host, + RepositoryName: repo.Name, + BranchName: repo.Branch, + CfAPIEndpoint: config.CfAPIEndpoint, + CfOrg: config.CfOrg, + CfServiceInstance: config.CfServiceInstance, + CfServiceKeyName: config.CfServiceKeyName, + CfSpace: config.CfSpace, + } + return &checkoutOptions +} + +func getPullOptions(config *abapEnvironmentCloneGitRepoOptions, repo abaputils.Repository) *abapEnvironmentPullGitRepoOptions { + pullOptions := abapEnvironmentPullGitRepoOptions{ + Username: config.Username, + Password: config.Password, + Host: config.Host, + RepositoryName: repo.Name, + CommitID: repo.CommitID, + CfAPIEndpoint: config.CfAPIEndpoint, + CfOrg: config.CfOrg, + CfServiceInstance: config.CfServiceInstance, + CfServiceKeyName: config.CfServiceKeyName, + CfSpace: config.CfSpace, + } + return &pullOptions +} + func checkConfiguration(config *abapEnvironmentCloneGitRepoOptions) error { if config.Repositories != "" && config.RepositoryName != "" { return errors.New("It is not allowed to configure the parameters `repositories`and `repositoryName` at the same time") @@ -113,125 +164,14 @@ func checkConfiguration(config *abapEnvironmentCloneGitRepoOptions) error { return nil } -func triggerClone(repo abaputils.Repository, cloneConnectionDetails abaputils.ConnectionDetailsHTTP, client piperhttp.Sender) (abaputils.ConnectionDetailsHTTP, error, bool) { +func triggerClone(repo abaputils.Repository, api abaputils.SoftwareComponentApiInterface) (error, bool) { - uriConnectionDetails := cloneConnectionDetails - cloneConnectionDetails.XCsrfToken = "fetch" - - cloneConnectionDetails.URL = cloneConnectionDetails.URL + "/sap/opu/odata/sap/MANAGE_GIT_REPOSITORY/Clones" - - // Loging into the ABAP System - getting the x-csrf-token and cookies - resp, err := abaputils.GetHTTPResponse("HEAD", cloneConnectionDetails, nil, client) - if err != nil { - err = abaputils.HandleHTTPError(resp, err, "Authentication on the ABAP system failed", cloneConnectionDetails) - return uriConnectionDetails, err, false - } - defer resp.Body.Close() - - log.Entry().WithField("StatusCode", resp.Status).WithField("ABAP Endpoint", cloneConnectionDetails.URL).Debug("Authentication on the ABAP system successful") - uriConnectionDetails.XCsrfToken = resp.Header.Get("X-Csrf-Token") - cloneConnectionDetails.XCsrfToken = uriConnectionDetails.XCsrfToken - - // Trigger the Clone of a Repository - if repo.Name == "" { - return uriConnectionDetails, errors.New("An empty string was passed for the parameter 'repositoryName'"), false - } - - jsonBody := []byte(repo.GetCloneRequestBody()) - resp, err = abaputils.GetHTTPResponse("POST", cloneConnectionDetails, jsonBody, client) - if err != nil { - err, alreadyCloned := handleCloneError(resp, err, cloneConnectionDetails, client, repo) - return uriConnectionDetails, err, alreadyCloned - } - defer resp.Body.Close() - log.Entry().WithField("StatusCode", resp.Status).WithField("repositoryName", repo.Name).WithField("branchName", repo.Branch).WithField("commitID", repo.CommitID).WithField("Tag", repo.Tag).Info("Triggered Clone of Repository / Software Component") - - // Parse Response - var body abaputils.CloneEntity - var abapResp map[string]*json.RawMessage - bodyText, errRead := io.ReadAll(resp.Body) - if errRead != nil { - return uriConnectionDetails, err, false - } - if err := json.Unmarshal(bodyText, &abapResp); err != nil { - return uriConnectionDetails, err, false - } - if err := json.Unmarshal(*abapResp["d"], &body); err != nil { - return uriConnectionDetails, err, false - } - if reflect.DeepEqual(abaputils.CloneEntity{}, body) { - log.Entry().WithField("StatusCode", resp.Status).WithField("repositoryName", repo.Name).WithField("branchName", repo.Branch).WithField("commitID", repo.CommitID).WithField("Tag", repo.Tag).Error("Could not Clone the Repository / Software Component") - err := errors.New("Request to ABAP System not successful") - return uriConnectionDetails, err, false - } + //cloneConnectionDetails.URL = cloneConnectionDetails.URL + "/sap/opu/odata/sap/MANAGE_GIT_REPOSITORY/Clones" // The entity "Clones" does not allow for polling. To poll the progress, the related entity "Pull" has to be called // While "Clones" has the key fields UUID, SC_NAME and BRANCH_NAME, "Pull" only has the key field UUID - uriConnectionDetails.URL = uriConnectionDetails.URL + "/sap/opu/odata/sap/MANAGE_GIT_REPOSITORY/Pull(uuid=guid'" + body.UUID + "')" - return uriConnectionDetails, nil, false -} - -func handleCloneError(resp *http.Response, err error, cloneConnectionDetails abaputils.ConnectionDetailsHTTP, client piperhttp.Sender, repo abaputils.Repository) (returnedError error, alreadyCloned bool) { - alreadyCloned = false - returnedError = nil - if resp == nil { - log.Entry().WithError(err).WithField("ABAP Endpoint", cloneConnectionDetails.URL).Error("Request failed") - returnedError = errors.New("Response is nil") - return - } - defer resp.Body.Close() - errorText, errorCode, parsingError := abaputils.GetErrorDetailsFromResponse(resp) - if parsingError != nil { - returnedError = err - return - } - if errorCode == "A4C_A2G/257" { - // With the latest release, a repeated "clone" was prohibited - // As an intermediate workaround, we react to the error message A4C_A2G/257 that gets thrown, if the repository had already been cloned - // In this case, a checkout branch and a pull will be performed - alreadyCloned = true - abaputils.AddDefaultDashedLine() - abaputils.AddDefaultDashedLine() - log.Entry().Infof("%s", "The repository / software component has already been cloned on the ABAP Environment system ") - log.Entry().Infof("%s", "A `checkout branch` and a `pull` will be performed instead") - abaputils.AddDefaultDashedLine() - abaputils.AddDefaultDashedLine() - checkoutOptions := abapEnvironmentCheckoutBranchOptions{ - Username: cloneConnectionDetails.User, - Password: cloneConnectionDetails.Password, - Host: cloneConnectionDetails.Host, - RepositoryName: repo.Name, - BranchName: repo.Branch, - } - c := command.Command{} - c.Stdout(log.Writer()) - c.Stderr(log.Writer()) - com := abaputils.AbapUtils{ - Exec: &c, - } - returnedError = runAbapEnvironmentCheckoutBranch(&checkoutOptions, &com, client) - if returnedError != nil { - return - } - abaputils.AddDefaultDashedLine() - abaputils.AddDefaultDashedLine() - pullOptions := abapEnvironmentPullGitRepoOptions{ - Username: cloneConnectionDetails.User, - Password: cloneConnectionDetails.Password, - Host: cloneConnectionDetails.Host, - RepositoryName: repo.Name, - CommitID: repo.CommitID, - } - returnedError = runAbapEnvironmentPullGitRepo(&pullOptions, &com, client) - if returnedError != nil { - return - } - } else { - log.Entry().WithField("StatusCode", resp.Status).Error("Could not clone the " + repo.GetCloneLogString()) - abapError := errors.New(fmt.Sprintf("%s - %s", errorCode, errorText)) - returnedError = errors.Wrap(abapError, err.Error()) - } - return + //uriConnectionDetails.URL = uriConnectionDetails.URL + "/sap/opu/odata/sap/MANAGE_GIT_REPOSITORY/Pull(uuid=guid'" + body.UUID + "')" + return nil, false } func convertCloneConfig(config *abapEnvironmentCloneGitRepoOptions) abaputils.AbapEnvironmentOptions { diff --git a/cmd/abapEnvironmentCloneGitRepo_generated.go b/cmd/abapEnvironmentCloneGitRepo_generated.go index 1e7ca3a43c..3e3b9351d7 100644 --- a/cmd/abapEnvironmentCloneGitRepo_generated.go +++ b/cmd/abapEnvironmentCloneGitRepo_generated.go @@ -73,7 +73,7 @@ Please provide either of the following options: log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/abapEnvironmentCloneGitRepo_test.go b/cmd/abapEnvironmentCloneGitRepo_test.go index b525a73e34..ae6f47f75f 100644 --- a/cmd/abapEnvironmentCloneGitRepo_test.go +++ b/cmd/abapEnvironmentCloneGitRepo_test.go @@ -4,19 +4,17 @@ package cmd import ( - "bytes" "encoding/json" - "io" - "net/http" "os" "testing" + "time" "github.com/SAP/jenkins-library/pkg/abaputils" - "github.com/pkg/errors" "github.com/stretchr/testify/assert" ) var executionLogStringClone string +var apiManager abaputils.SoftwareComponentApiManagerInterface func init() { executionLog := abaputils.LogProtocolResults{ @@ -29,9 +27,11 @@ func init() { Timestamp: "/Date(1644332299000+0000)/", }, }, + Count: "1", } executionLogResponse, _ := json.Marshal(executionLog) executionLogStringClone = string(executionLogResponse) + } func TestCloneStep(t *testing.T) { @@ -80,13 +80,13 @@ repositories: logResultSuccess := `{"d": { "sc_name": "/DMO/SWC", "status": "S", "to_Log_Overview": { "results": [ { "log_index": 1, "log_name": "Main Import", "type_of_found_issues": "Success", "timestamp": "/Date(1644332299000+0000)/", "to_Log_Protocol": { "results": [ { "log_index": 1, "index_no": "1", "log_name": "", "type": "Info", "descr": "Main import", "timestamp": null, "criticality": 0 } ] } } ] } } }` client := &abaputils.ClientMock{ BodyList: []string{ - `{"d" : [] }`, `{"d" : ` + executionLogStringClone + `}`, logResultSuccess, `{"d" : { "status" : "S" } }`, `{"d" : { "status" : "R" } }`, `{"d" : { "status" : "R" } }`, `{"d" : { "status" : "R" } }`, + `{"d" : { "sc_name" : "/DMO/REPO_B", "avail_on_instance" : false, "active_branch": "branchB" } }`, `{"d" : [] }`, `{"d" : ` + executionLogStringClone + `}`, logResultSuccess, @@ -94,11 +94,14 @@ repositories: `{"d" : { "status" : "R" } }`, `{"d" : { "status" : "R" } }`, `{"d" : { "status" : "R" } }`, + `{"d" : { "sc_name" : "/DMO/REPO_A", "avail_on_instance" : true, "active_branch": "branchA" } }`, + `{"d" : [] }`, }, Token: "myToken", } - err = runAbapEnvironmentCloneGitRepo(&config, &autils, client) + apiManager = &abaputils.SoftwareComponentApiManager{Client: client, PollIntervall: 1 * time.Nanosecond, Force0510: true} + err = runAbapEnvironmentCloneGitRepo(&config, &autils, apiManager) assert.NoError(t, err, "Did not expect error") assert.Equal(t, 0, len(client.BodyList), "Not all requests were done") }) @@ -120,24 +123,25 @@ repositories: Username: "testUser", Password: "testPassword", RepositoryName: "testRepo1", - BranchName: "testBranch1", } - logResultSuccess := `{"d": { "sc_name": "/DMO/SWC", "status": "S", "to_Log_Overview": { "results": [ { "log_index": 1, "log_name": "Main Import", "type_of_found_issues": "Success", "timestamp": "/Date(1644332299000+0000)/", "to_Log_Protocol": { "results": [ { "log_index": 1, "index_no": "1", "log_name": "", "type": "Info", "descr": "Main import", "timestamp": null, "criticality": 0 } ] } } ] } } }` + logResultSuccess := `{"d": { "sc_name": "testRepo1", "status": "S", "to_Log_Overview": { "results": [ { "log_index": 1, "log_name": "Main Import", "type_of_found_issues": "Success", "timestamp": "/Date(1644332299000+0000)/", "to_Log_Protocol": { "results": [ { "log_index": 1, "index_no": "1", "log_name": "", "type": "Info", "descr": "Main import", "timestamp": null, "criticality": 0 } ] } } ] } } }` client := &abaputils.ClientMock{ BodyList: []string{ - `{"d" : [] }`, `{"d" : ` + executionLogStringClone + `}`, logResultSuccess, `{"d" : { "status" : "S" } }`, `{"d" : { "status" : "R" } }`, `{"d" : { "status" : "R" } }`, + `{"d" : { "sc_name" : "testRepo1", "avail_on_instance" : false, "active_branch": "testBranch1" } }`, + `{"d" : [] }`, }, Token: "myToken", StatusCode: 200, } - err := runAbapEnvironmentCloneGitRepo(&config, &autils, client) + apiManager = &abaputils.SoftwareComponentApiManager{Client: client, PollIntervall: 1 * time.Nanosecond, Force0510: true} + err := runAbapEnvironmentCloneGitRepo(&config, &autils, apiManager) assert.NoError(t, err, "Did not expect error") assert.Equal(t, 0, len(client.BodyList), "Not all requests were done") }) @@ -166,12 +170,15 @@ repositories: BodyList: []string{ `{"d" : {} }`, `{"d" : { "status" : "R" } }`, + `{"d" : { "sc_name" : "testRepo1", "avail_on_instance" : true, "active_branch": "testBranch1" } }`, + `{"d" : [] }`, }, Token: "myToken", StatusCode: 200, } - err := runAbapEnvironmentCloneGitRepo(&config, &autils, client) + apiManager = &abaputils.SoftwareComponentApiManager{Client: client, PollIntervall: 1 * time.Nanosecond, Force0510: true} + err := runAbapEnvironmentCloneGitRepo(&config, &autils, apiManager) if assert.Error(t, err, "Expected error") { assert.Equal(t, "Clone of repository / software component 'testRepo1', branch 'testBranch1' failed on the ABAP system: Request to ABAP System not successful", err.Error(), "Expected different error message") } @@ -232,10 +239,10 @@ repositories: Token: "myToken", StatusCode: 200, } - - err = runAbapEnvironmentCloneGitRepo(&config, &autils, client) + apiManager = &abaputils.SoftwareComponentApiManager{Client: client, PollIntervall: 1 * time.Nanosecond, Force0510: true} + err = runAbapEnvironmentCloneGitRepo(&config, &autils, apiManager) if assert.Error(t, err, "Expected error") { - assert.Equal(t, "Clone of repository / software component '/DMO/REPO_A', branch 'branchA', commit 'ABCD1234' failed on the ABAP System", err.Error(), "Expected different error message") + assert.Equal(t, "Clone of repository / software component '/DMO/REPO_A', branch 'branchA', commit 'ABCD1234' failed on the ABAP system: Request to ABAP System not successful", err.Error(), "Expected different error message") } }) @@ -268,8 +275,8 @@ repositories: Token: "myToken", StatusCode: 200, } - - err := runAbapEnvironmentCloneGitRepo(&config, &autils, client) + apiManager = &abaputils.SoftwareComponentApiManager{Client: client, PollIntervall: 1 * time.Nanosecond, Force0510: true} + err := runAbapEnvironmentCloneGitRepo(&config, &autils, apiManager) if assert.Error(t, err, "Expected error") { assert.Equal(t, "Clone of repository / software component 'testRepo1', branch 'testBranch1' failed on the ABAP system: Request to ABAP System not successful", err.Error(), "Expected different error message") } @@ -303,8 +310,8 @@ repositories: Token: "myToken", StatusCode: 200, } - - err := runAbapEnvironmentCloneGitRepo(&config, &autils, client) + apiManager = &abaputils.SoftwareComponentApiManager{Client: client, PollIntervall: 1 * time.Nanosecond, Force0510: true} + err := runAbapEnvironmentCloneGitRepo(&config, &autils, apiManager) if assert.Error(t, err, "Expected error") { assert.Equal(t, "Clone of repository / software component 'testRepo1', branch 'testBranch1' failed on the ABAP system: Request to ABAP System not successful", err.Error(), "Expected different error message") } @@ -337,10 +344,10 @@ repositories: Token: "myToken", StatusCode: 200, } - - err := runAbapEnvironmentCloneGitRepo(&config, &autils, client) + apiManager = &abaputils.SoftwareComponentApiManager{Client: client, PollIntervall: 1 * time.Nanosecond, Force0510: true} + err := runAbapEnvironmentCloneGitRepo(&config, &autils, apiManager) if assert.Error(t, err, "Expected error") { - assert.Equal(t, "Something failed during the clone: Could not find filename.yaml", err.Error(), "Expected different error message") + assert.Equal(t, "Could not read repositories: Could not find filename.yaml", err.Error(), "Expected different error message") } }) @@ -378,8 +385,8 @@ repositories: Token: "myToken", StatusCode: 200, } - - err := runAbapEnvironmentCloneGitRepo(&config, &autils, client) + apiManager = &abaputils.SoftwareComponentApiManager{Client: client, PollIntervall: 1 * time.Nanosecond, Force0510: true} + err := runAbapEnvironmentCloneGitRepo(&config, &autils, apiManager) if assert.Error(t, err, "Expected error") { assert.Equal(t, "The provided configuration is not allowed: It is not allowed to configure the parameters `repositories`and `repositoryName` at the same time", err.Error(), "Expected different error message") } @@ -387,7 +394,7 @@ repositories: } func TestALreadyCloned(t *testing.T) { - t.Run("Already Cloned", func(t *testing.T) { + t.Run("Already cloned, switch branch and pull instead", func(t *testing.T) { var autils = abaputils.AUtilsMock{} defer autils.Cleanup() @@ -396,46 +403,50 @@ func TestALreadyCloned(t *testing.T) { autils.ReturnedConnectionDetailsHTTP.URL = "https://example.com" autils.ReturnedConnectionDetailsHTTP.Host = "example.com" autils.ReturnedConnectionDetailsHTTP.XCsrfToken = "xcsrftoken" + + config := abapEnvironmentCloneGitRepoOptions{ + CfAPIEndpoint: "https://api.endpoint.com", + CfOrg: "testOrg", + CfSpace: "testSpace", + CfServiceInstance: "testInstance", + CfServiceKeyName: "testServiceKey", + Username: "testUser", + Password: "testPassword", + } + logResultSuccess := `{"d": { "sc_name": "/DMO/SWC", "status": "S", "to_Log_Overview": { "results": [ { "log_index": 1, "log_name": "Main Import", "type_of_found_issues": "Success", "timestamp": "/Date(1644332299000+0000)/", "to_Log_Protocol": { "results": [ { "log_index": 1, "index_no": "1", "log_name": "", "type": "Info", "descr": "Main import", "timestamp": null, "criticality": 0 } ] } } ] } } }` client := &abaputils.ClientMock{ BodyList: []string{ - `{"d" : }`, `{"d" : ` + executionLogStringClone + `}`, logResultSuccess, `{"d" : { "status" : "S" } }`, `{"d" : { "status" : "R" } }`, `{"d" : { "status" : "R" } }`, - `{"d" : }`, + `{"d" : [] }`, `{"d" : ` + executionLogStringClone + `}`, logResultSuccess, `{"d" : { "status" : "S" } }`, `{"d" : { "status" : "R" } }`, `{"d" : { "status" : "R" } }`, + `{"d" : { "sc_name" : "testRepo1", "avail_on_inst" : true, "active_branch": "testBranch1" } }`, + `{"d" : [] }`, }, Token: "myToken", StatusCode: 200, } - bodyString := `{"error" : { "code" : "A4C_A2G/257", "message" : { "lang" : "de", "value" : "Already Cloned"} } }` - body := []byte(bodyString) - resp := http.Response{ - Status: "400 Bad Request", - StatusCode: 400, - Body: io.NopCloser(bytes.NewReader(body)), - } - repo := abaputils.Repository{ - Name: "Test", - Branch: "Branch", + Name: "testRepo1", + Branch: "inactie_branch", CommitID: "abcd1234", } - err := errors.New("Custom Error") - err, _ = handleCloneError(&resp, err, autils.ReturnedConnectionDetailsHTTP, client, repo) + apiManager = &abaputils.SoftwareComponentApiManager{Client: client, PollIntervall: 1 * time.Nanosecond, Force0510: true} + err := cloneSingleRepo(apiManager, autils.ReturnedConnectionDetailsHTTP, repo, &config, &autils) assert.NoError(t, err, "Did not expect error") }) - t.Run("Already Cloned, Pull fails", func(t *testing.T) { + t.Run("Already cloned, branch is already checked out, pull instead", func(t *testing.T) { var autils = abaputils.AUtilsMock{} defer autils.Cleanup() @@ -444,130 +455,41 @@ func TestALreadyCloned(t *testing.T) { autils.ReturnedConnectionDetailsHTTP.URL = "https://example.com" autils.ReturnedConnectionDetailsHTTP.Host = "example.com" autils.ReturnedConnectionDetailsHTTP.XCsrfToken = "xcsrftoken" - logResultSuccess := `{"d": { "sc_name": "/DMO/SWC", "status": "S", "to_Log_Overview": { "results": [ { "log_index": 1, "log_name": "Main Import", "type_of_found_issues": "Success", "timestamp": "/Date(1644332299000+0000)/", "to_Log_Protocol": { "results": [ { "log_index": 1, "index_no": "1", "log_name": "", "type": "Info", "descr": "Main import", "timestamp": null, "criticality": 0 } ] } } ] } } }` - client := &abaputils.ClientMock{ - BodyList: []string{ - `{"d" : ` + executionLogStringClone + `}`, - logResultSuccess, - `{"d" : { "EntitySets" : [ "LogOverviews" ] } }`, - `{"d" : { "status" : "E" } }`, - `{"d" : { "status" : "R" } }`, - `{"d" : { "status" : "R" } }`, - `{"d" : ` + executionLogStringClone + `}`, - logResultSuccess, - `{"d" : { "EntitySets" : [ "LogOverviews" ] } }`, - `{"d" : { "status" : "S" } }`, - `{"d" : { "status" : "R" } }`, - `{"d" : { "status" : "R" } }`, - }, - Token: "myToken", - StatusCode: 200, - } - - bodyString := `{"error" : { "code" : "A4C_A2G/257", "message" : { "lang" : "de", "value" : "Already Cloned"} } }` - body := []byte(bodyString) - resp := http.Response{ - Status: "400 Bad Request", - StatusCode: 400, - Body: io.NopCloser(bytes.NewReader(body)), - } - repo := abaputils.Repository{ - Name: "Test", - Branch: "Branch", - CommitID: "abcd1234", - } - - err := errors.New("Custom Error") - err, _ = handleCloneError(&resp, err, autils.ReturnedConnectionDetailsHTTP, client, repo) - if assert.Error(t, err, "Expected error") { - assert.Equal(t, "Pull of the repository / software component 'Test', commit 'abcd1234' failed on the ABAP system: Request to ABAP System not successful", err.Error(), "Expected different error message") + config := abapEnvironmentCloneGitRepoOptions{ + CfAPIEndpoint: "https://api.endpoint.com", + CfOrg: "testOrg", + CfSpace: "testSpace", + CfServiceInstance: "testInstance", + CfServiceKeyName: "testServiceKey", + Username: "testUser", + Password: "testPassword", } - }) - - t.Run("Already Cloned, checkout fails", func(t *testing.T) { - var autils = abaputils.AUtilsMock{} - defer autils.Cleanup() - autils.ReturnedConnectionDetailsHTTP.Password = "password" - autils.ReturnedConnectionDetailsHTTP.User = "user" - autils.ReturnedConnectionDetailsHTTP.URL = "https://example.com" - autils.ReturnedConnectionDetailsHTTP.Host = "example.com" - autils.ReturnedConnectionDetailsHTTP.XCsrfToken = "xcsrftoken" logResultSuccess := `{"d": { "sc_name": "/DMO/SWC", "status": "S", "to_Log_Overview": { "results": [ { "log_index": 1, "log_name": "Main Import", "type_of_found_issues": "Success", "timestamp": "/Date(1644332299000+0000)/", "to_Log_Protocol": { "results": [ { "log_index": 1, "index_no": "1", "log_name": "", "type": "Info", "descr": "Main import", "timestamp": null, "criticality": 0 } ] } } ] } } }` client := &abaputils.ClientMock{ BodyList: []string{ + `{"d" : ` + executionLogStringClone + `}`, logResultSuccess, - `{"d" : { "EntitySets" : [ "LogOverviews" ] } }`, `{"d" : { "status" : "S" } }`, `{"d" : { "status" : "R" } }`, `{"d" : { "status" : "R" } }`, - logResultSuccess, - `{"d" : { "EntitySets" : [ "LogOverviews" ] } }`, - `{"d" : { "status" : "E" } }`, - `{"d" : { "status" : "R" } }`, - `{"d" : { "status" : "R" } }`, + `{"d" : { "sc_name" : "testRepo1", "avail_on_inst" : true, "active_branch": "testBranch1" } }`, + `{"d" : [] }`, }, Token: "myToken", StatusCode: 200, } - bodyString := `{"error" : { "code" : "A4C_A2G/257", "message" : { "lang" : "de", "value" : "Already Cloned"} } }` - body := []byte(bodyString) - resp := http.Response{ - Status: "400 Bad Request", - StatusCode: 400, - Body: io.NopCloser(bytes.NewReader(body)), - } - repo := abaputils.Repository{ - Name: "Test", - Branch: "Branch", + Name: "testRepo1", + Branch: "testBranch1", CommitID: "abcd1234", } - err := errors.New("Custom Error") - err, _ = handleCloneError(&resp, err, autils.ReturnedConnectionDetailsHTTP, client, repo) - if assert.Error(t, err, "Expected error") { - assert.Equal(t, "Something failed during the checkout: Checkout failed: Checkout of branch Branch failed on the ABAP System", err.Error(), "Expected different error message") - } + apiManager = &abaputils.SoftwareComponentApiManager{Client: client, PollIntervall: 1 * time.Nanosecond, Force0510: true} + err := cloneSingleRepo(apiManager, autils.ReturnedConnectionDetailsHTTP, repo, &config, &autils) + assert.NoError(t, err, "Did not expect error") }) - t.Run("Already Cloned, checkout fails", func(t *testing.T) { - - var autils = abaputils.AUtilsMock{} - defer autils.Cleanup() - autils.ReturnedConnectionDetailsHTTP.Password = "password" - autils.ReturnedConnectionDetailsHTTP.User = "user" - autils.ReturnedConnectionDetailsHTTP.URL = "https://example.com" - autils.ReturnedConnectionDetailsHTTP.Host = "example.com" - autils.ReturnedConnectionDetailsHTTP.XCsrfToken = "xcsrftoken" - client := &abaputils.ClientMock{ - BodyList: []string{ - `{"d" : { "status" : "R" } }`, - }, - Token: "myToken", - StatusCode: 200, - } - - bodyString := `{"error" : { "code" : "A4C_A2G/258", "message" : { "lang" : "de", "value" : "Some error message"} } }` - body := []byte(bodyString) - resp := http.Response{ - Status: "400 Bad Request", - StatusCode: 400, - Body: io.NopCloser(bytes.NewReader(body)), - } - - repo := abaputils.Repository{ - Name: "Test", - Branch: "Branch", - CommitID: "abcd1234", - } - - err := errors.New("Custom Error") - err, _ = handleCloneError(&resp, err, autils.ReturnedConnectionDetailsHTTP, client, repo) - if assert.Error(t, err, "Expected error") { - assert.Equal(t, "Custom Error: A4C_A2G/258 - Some error message", err.Error(), "Expected different error message") - } - }) } diff --git a/cmd/abapEnvironmentCreateSystem_generated.go b/cmd/abapEnvironmentCreateSystem_generated.go index dbaf2c26f6..64e03bc572 100644 --- a/cmd/abapEnvironmentCreateSystem_generated.go +++ b/cmd/abapEnvironmentCreateSystem_generated.go @@ -74,7 +74,7 @@ func AbapEnvironmentCreateSystemCommand() *cobra.Command { log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/abapEnvironmentCreateTag.go b/cmd/abapEnvironmentCreateTag.go index 1681c6c39e..961b062f42 100644 --- a/cmd/abapEnvironmentCreateTag.go +++ b/cmd/abapEnvironmentCreateTag.go @@ -1,10 +1,7 @@ package cmd import ( - "encoding/json" "fmt" - "io" - "net/http/cookiejar" "strings" "time" @@ -16,7 +13,7 @@ import ( "github.com/pkg/errors" ) -func abapEnvironmentCreateTag(config abapEnvironmentCreateTagOptions, telemetryData *telemetry.CustomData) { +func abapEnvironmentCreateTag(config abapEnvironmentCreateTagOptions, _ *telemetry.CustomData) { c := command.Command{} @@ -27,58 +24,36 @@ func abapEnvironmentCreateTag(config abapEnvironmentCreateTagOptions, telemetryD Exec: &c, } - client := piperhttp.Client{} + apiManager := abaputils.SoftwareComponentApiManager{ + Client: &piperhttp.Client{}, + PollIntervall: 5 * time.Second, + } - if err := runAbapEnvironmentCreateTag(&config, telemetryData, &autils, &client); err != nil { + if err := runAbapEnvironmentCreateTag(&config, &autils, &apiManager); err != nil { log.Entry().WithError(err).Fatal("step execution failed") } } -func runAbapEnvironmentCreateTag(config *abapEnvironmentCreateTagOptions, telemetryData *telemetry.CustomData, com abaputils.Communication, client piperhttp.Sender) error { +func runAbapEnvironmentCreateTag(config *abapEnvironmentCreateTagOptions, com abaputils.Communication, apiManager abaputils.SoftwareComponentApiManagerInterface) error { connectionDetails, errorGetInfo := com.GetAbapCommunicationArrangementInfo(convertTagConfig(config), "") if errorGetInfo != nil { return errors.Wrap(errorGetInfo, "Parameters for the ABAP Connection not available") } - // Configuring the HTTP Client and CookieJar - cookieJar, errorCookieJar := cookiejar.New(nil) - if errorCookieJar != nil { - return errors.Wrap(errorCookieJar, "Could not create a Cookie Jar") - } - - client.SetOptions(piperhttp.ClientOptions{ - MaxRequestDuration: 180 * time.Second, - CookieJar: cookieJar, - Username: connectionDetails.User, - Password: connectionDetails.Password, - }) - backlog, errorPrepare := prepareBacklog(config) if errorPrepare != nil { return fmt.Errorf("Something failed during the tag creation: %w", errorPrepare) } - return createTags(backlog, telemetryData, connectionDetails, client, com) + return createTags(backlog, connectionDetails, apiManager) } -func createTags(backlog []CreateTagBacklog, telemetryData *telemetry.CustomData, con abaputils.ConnectionDetailsHTTP, client piperhttp.Sender, com abaputils.Communication) (err error) { - - connection := con - connection.XCsrfToken = "fetch" - connection.URL = con.URL + "/sap/opu/odata/sap/MANAGE_GIT_REPOSITORY/Tags" - resp, err := abaputils.GetHTTPResponse("HEAD", connection, nil, client) - if err != nil { - return abaputils.HandleHTTPError(resp, err, "Authentication on the ABAP system failed", con) - } - defer resp.Body.Close() - - log.Entry().WithField("StatusCode", resp.Status).WithField("ABAP Endpoint", connection.URL).Debug("Authentication on the ABAP system successful") - connection.XCsrfToken = resp.Header.Get("X-Csrf-Token") +func createTags(backlog []abaputils.CreateTagBacklog, con abaputils.ConnectionDetailsHTTP, apiManager abaputils.SoftwareComponentApiManagerInterface) (err error) { errorOccurred := false for _, item := range backlog { - err = createTagsForSingleItem(item, telemetryData, connection, client, com) + err = createTagsForSingleItem(item, con, apiManager) if err != nil { errorOccurred = true } @@ -93,11 +68,11 @@ func createTags(backlog []CreateTagBacklog, telemetryData *telemetry.CustomData, } -func createTagsForSingleItem(item CreateTagBacklog, telemetryData *telemetry.CustomData, con abaputils.ConnectionDetailsHTTP, client piperhttp.Sender, com abaputils.Communication) (err error) { +func createTagsForSingleItem(item abaputils.CreateTagBacklog, con abaputils.ConnectionDetailsHTTP, apiManager abaputils.SoftwareComponentApiManagerInterface) (err error) { errorOccurred := false - for index := range item.tags { - err = createSingleTag(item, index, telemetryData, con, client, com) + for index := range item.Tags { + err = createSingleTag(item, index, con, apiManager) if err != nil { errorOccurred = true } @@ -109,79 +84,38 @@ func createTagsForSingleItem(item CreateTagBacklog, telemetryData *telemetry.Cus return err } -func createSingleTag(item CreateTagBacklog, index int, telemetryData *telemetry.CustomData, con abaputils.ConnectionDetailsHTTP, client piperhttp.Sender, com abaputils.Communication) (err error) { +func createSingleTag(item abaputils.CreateTagBacklog, index int, con abaputils.ConnectionDetailsHTTP, apiManager abaputils.SoftwareComponentApiManagerInterface) (err error) { - requestBodyStruct := CreateTagBody{RepositoryName: item.repositoryName, CommitID: item.commitID, Tag: item.tags[index].tagName, Description: item.tags[index].tagDescription} - requestBodyJson, err := json.Marshal(&requestBodyStruct) - if err != nil { - return err + api, errGetAPI := apiManager.GetAPI(con, abaputils.Repository{Name: item.RepositoryName, CommitID: item.CommitID}) + if errGetAPI != nil { + return errors.Wrap(errGetAPI, "Could not initialize the connection to the system") } - log.Entry().Debugf("Request body: %s", requestBodyJson) - resp, err := abaputils.GetHTTPResponse("POST", con, requestBodyJson, client) - if err != nil { - errorMessage := "Could not create tag " + requestBodyStruct.Tag + " for repository " + requestBodyStruct.RepositoryName + " with commitID " + requestBodyStruct.CommitID - err = abaputils.HandleHTTPError(resp, err, errorMessage, con) - return err + createTagError := api.CreateTag(item.Tags[index]) + if createTagError != nil { + return errors.Wrapf(err, "Creation of Tag failed on the ABAP system") } - defer resp.Body.Close() - // Parse response - var createTagResponse CreateTagResponse - var abapResp map[string]*json.RawMessage - bodyText, _ := io.ReadAll(resp.Body) + status, errorPollEntity := abaputils.PollEntity(api, apiManager.GetPollIntervall()) - if err = json.Unmarshal(bodyText, &abapResp); err != nil { - return err - } - if err = json.Unmarshal(*abapResp["d"], &createTagResponse); err != nil { - return err - } - - con.URL = con.Host + "/sap/opu/odata/sap/MANAGE_GIT_REPOSITORY/Pull(guid'" + createTagResponse.UUID + "')" - err = checkStatus(con, client, com) - - if err == nil { - log.Entry().Info("Created tag " + requestBodyStruct.Tag + " for repository " + requestBodyStruct.RepositoryName + " with commitID " + requestBodyStruct.CommitID) + if errorPollEntity == nil && status == "S" { + log.Entry().Info("Created tag " + item.Tags[index].TagName + " for repository " + item.RepositoryName + " with commitID " + item.CommitID) } else { - log.Entry().Error("NOT created: Tag " + requestBodyStruct.Tag + " for repository " + requestBodyStruct.RepositoryName + " with commitID " + requestBodyStruct.CommitID) + log.Entry().Error("NOT created: Tag " + item.Tags[index].TagName + " for repository " + item.RepositoryName + " with commitID " + item.CommitID) + err = errors.New("Creation of Tag failed on the ABAP system") } return err } -func checkStatus(con abaputils.ConnectionDetailsHTTP, client piperhttp.Sender, com abaputils.Communication) (err error) { - var status string - pollIntervall := com.GetPollIntervall() - count := 0 - for { - count += 1 - entity, _, err := abaputils.GetStatus("Could not create Tag", con, client) - if err != nil { - return err - } - status = entity.Status - if status != "R" { - if status == "E" { - err = errors.New("Could not create Tag") - } - return err - } - if count >= 200 { - return errors.New("Could not create Tag (Timeout)") - } - time.Sleep(pollIntervall) - } -} - -func prepareBacklog(config *abapEnvironmentCreateTagOptions) (backlog []CreateTagBacklog, err error) { +func prepareBacklog(config *abapEnvironmentCreateTagOptions) (backlog []abaputils.CreateTagBacklog, err error) { if config.Repositories != "" && config.RepositoryName != "" { return nil, errors.New("Configuring the parameter repositories and the parameter repositoryName at the same time is not allowed") } if config.RepositoryName != "" && config.CommitID != "" { - backlog = append(backlog, CreateTagBacklog{repositoryName: config.RepositoryName, commitID: config.CommitID}) + backlog = append(backlog, abaputils.CreateTagBacklog{RepositoryName: config.RepositoryName, CommitID: config.CommitID}) } if config.Repositories != "" { @@ -190,10 +124,10 @@ func prepareBacklog(config *abapEnvironmentCreateTagOptions) (backlog []CreateTa return nil, err } for _, repo := range descriptor.Repositories { - backlogInstance := CreateTagBacklog{repositoryName: repo.Name, commitID: repo.CommitID} + backlogInstance := abaputils.CreateTagBacklog{RepositoryName: repo.Name, CommitID: repo.CommitID} if config.GenerateTagForAddonComponentVersion && repo.VersionYAML != "" { - tag := Tag{tagName: "v" + repo.VersionYAML, tagDescription: "Generated by the ABAP Environment Pipeline"} - backlogInstance.tags = append(backlogInstance.tags, tag) + tag := abaputils.Tag{TagName: "v" + repo.VersionYAML, TagDescription: "Generated by the ABAP Environment Pipeline"} + backlogInstance.Tags = append(backlogInstance.Tags, tag) } backlog = append(backlog, backlogInstance) } @@ -212,11 +146,11 @@ func prepareBacklog(config *abapEnvironmentCreateTagOptions) (backlog []CreateTa return backlog, nil } -func addTagToList(backlog []CreateTagBacklog, tag string, description string) []CreateTagBacklog { +func addTagToList(backlog []abaputils.CreateTagBacklog, tag string, description string) []abaputils.CreateTagBacklog { for i, item := range backlog { - tag := Tag{tagName: tag, tagDescription: description} - backlog[i].tags = append(item.tags, tag) + tag := abaputils.Tag{TagName: tag, TagDescription: description} + backlog[i].Tags = append(item.Tags, tag) } return backlog } @@ -235,25 +169,3 @@ func convertTagConfig(config *abapEnvironmentCreateTagOptions) abaputils.AbapEnv return subOptions } - -type CreateTagBacklog struct { - repositoryName string - commitID string - tags []Tag -} - -type Tag struct { - tagName string - tagDescription string -} - -type CreateTagBody struct { - RepositoryName string `json:"sc_name"` - CommitID string `json:"commit_id"` - Tag string `json:"tag_name"` - Description string `json:"tag_description"` -} - -type CreateTagResponse struct { - UUID string `json:"uuid"` -} diff --git a/cmd/abapEnvironmentCreateTag_generated.go b/cmd/abapEnvironmentCreateTag_generated.go index 8a68976dee..2bf73c87b1 100644 --- a/cmd/abapEnvironmentCreateTag_generated.go +++ b/cmd/abapEnvironmentCreateTag_generated.go @@ -77,7 +77,7 @@ Please provide either of the following options: log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/abapEnvironmentCreateTag_test.go b/cmd/abapEnvironmentCreateTag_test.go index 1241d3639c..cdd1310062 100644 --- a/cmd/abapEnvironmentCreateTag_test.go +++ b/cmd/abapEnvironmentCreateTag_test.go @@ -4,8 +4,10 @@ package cmd import ( + "encoding/json" "os" "testing" + "time" "github.com/SAP/jenkins-library/pkg/abaputils" "github.com/SAP/jenkins-library/pkg/log" @@ -13,6 +15,28 @@ import ( "github.com/stretchr/testify/assert" ) +var executionLogStringCreateTag string +var logResultSuccess string + +func init() { + logResultSuccess = `{"d": { "sc_name": "/DMO/SWC", "status": "S", "to_Log_Overview": { "results": [ { "log_index": 1, "log_name": "Main Import", "type_of_found_issues": "Success", "timestamp": "/Date(1644332299000+0000)/", "to_Log_Protocol": { "results": [ { "log_index": 1, "index_no": "1", "log_name": "", "type": "Info", "descr": "Main import", "timestamp": null, "criticality": 0 } ] } } ] } } }` + executionLog := abaputils.LogProtocolResults{ + Results: []abaputils.LogProtocol{ + { + ProtocolLine: 1, + OverviewIndex: 1, + Type: "LogEntry", + Description: "S", + Timestamp: "/Date(1644332299000+0000)/", + }, + }, + Count: "1", + } + executionLogResponse, _ := json.Marshal(executionLog) + executionLogStringCreateTag = string(executionLogResponse) + +} + func TestRunAbapEnvironmentCreateTag(t *testing.T) { t.Run("happy path", func(t *testing.T) { @@ -56,10 +80,16 @@ repositories: } client := &abaputils.ClientMock{ BodyList: []string{ + `{"d" : ` + executionLogStringCreateTag + `}`, + logResultSuccess, `{"d" : { "Status" : "S" } }`, `{"d" : { "uuid" : "abc" } }`, + `{"d" : ` + executionLogStringCreateTag + `}`, + logResultSuccess, `{"d" : { "Status" : "S" } }`, `{"d" : { "uuid" : "abc" } }`, + `{"d" : ` + executionLogStringCreateTag + `}`, + logResultSuccess, `{"d" : { "Status" : "S" } }`, `{"d" : { "uuid" : "abc" } }`, `{"d" : { "empty" : "body" } }`, @@ -71,13 +101,14 @@ repositories: _, hook := test.NewNullLogger() log.RegisterHook(hook) - err = runAbapEnvironmentCreateTag(config, nil, autils, client) + apiManager := &abaputils.SoftwareComponentApiManager{Client: client, PollIntervall: 1 * time.Nanosecond, Force0510: true} + err = runAbapEnvironmentCreateTag(config, autils, apiManager) assert.NoError(t, err, "Did not expect error") - assert.Equal(t, 3, len(hook.Entries), "Expected a different number of entries") - assert.Equal(t, `Created tag v4.5.6 for repository /DMO/SWC with commitID 1234abcd`, hook.AllEntries()[0].Message, "Expected a different message") - assert.Equal(t, `Created tag -DMO-PRODUCT-1.2.3 for repository /DMO/SWC with commitID 1234abcd`, hook.AllEntries()[1].Message, "Expected a different message") - assert.Equal(t, `Created tag tag for repository /DMO/SWC with commitID 1234abcd`, hook.AllEntries()[2].Message, "Expected a different message") + assert.Equal(t, 25, len(hook.Entries), "Expected a different number of entries") + assert.Equal(t, `Created tag v4.5.6 for repository /DMO/SWC with commitID 1234abcd`, hook.AllEntries()[12].Message, "Expected a different message") + assert.Equal(t, `Created tag -DMO-PRODUCT-1.2.3 for repository /DMO/SWC with commitID 1234abcd`, hook.AllEntries()[18].Message, "Expected a different message") + assert.Equal(t, `Created tag tag for repository /DMO/SWC with commitID 1234abcd`, hook.AllEntries()[24].Message, "Expected a different message") hook.Reset() }) @@ -122,10 +153,18 @@ repositories: } client := &abaputils.ClientMock{ BodyList: []string{ + `{"d" : ` + executionLogStringCreateTag + `}`, + logResultSuccess, `{"d" : { "Status" : "E" } }`, `{"d" : { "uuid" : "abc" } }`, + `{"d" : { "empty" : "body" } }`, + `{"d" : ` + executionLogStringCreateTag + `}`, + logResultSuccess, `{"d" : { "Status" : "E" } }`, `{"d" : { "uuid" : "abc" } }`, + `{"d" : { "empty" : "body" } }`, + `{"d" : ` + executionLogStringCreateTag + `}`, + logResultSuccess, `{"d" : { "Status" : "E" } }`, `{"d" : { "uuid" : "abc" } }`, `{"d" : { "empty" : "body" } }`, @@ -137,14 +176,15 @@ repositories: _, hook := test.NewNullLogger() log.RegisterHook(hook) - err = runAbapEnvironmentCreateTag(config, nil, autils, client) + apiManager := &abaputils.SoftwareComponentApiManager{Client: client, PollIntervall: 1 * time.Nanosecond, Force0510: true} + err = runAbapEnvironmentCreateTag(config, autils, apiManager) assert.Error(t, err, "Did expect error") - assert.Equal(t, 4, len(hook.Entries), "Expected a different number of entries") - assert.Equal(t, `NOT created: Tag v4.5.6 for repository /DMO/SWC with commitID 1234abcd`, hook.AllEntries()[0].Message, "Expected a different message") - assert.Equal(t, `NOT created: Tag -DMO-PRODUCT-1.2.3 for repository /DMO/SWC with commitID 1234abcd`, hook.AllEntries()[1].Message, "Expected a different message") - assert.Equal(t, `NOT created: Tag tag for repository /DMO/SWC with commitID 1234abcd`, hook.AllEntries()[2].Message, "Expected a different message") - assert.Equal(t, `At least one tag has not been created`, hook.AllEntries()[3].Message, "Expected a different message") + assert.Equal(t, 40, len(hook.Entries), "Expected a different number of entries") + assert.Equal(t, `NOT created: Tag v4.5.6 for repository /DMO/SWC with commitID 1234abcd`, hook.AllEntries()[12].Message, "Expected a different message") + assert.Equal(t, `NOT created: Tag -DMO-PRODUCT-1.2.3 for repository /DMO/SWC with commitID 1234abcd`, hook.AllEntries()[25].Message, "Expected a different message") + assert.Equal(t, `NOT created: Tag tag for repository /DMO/SWC with commitID 1234abcd`, hook.AllEntries()[38].Message, "Expected a different message") + assert.Equal(t, `At least one tag has not been created`, hook.AllEntries()[39].Message, "Expected a different message") hook.Reset() }) @@ -175,6 +215,8 @@ func TestRunAbapEnvironmentCreateTagConfigurations(t *testing.T) { } client := &abaputils.ClientMock{ BodyList: []string{ + `{"d" : ` + executionLogStringCreateTag + `}`, + logResultSuccess, `{"d" : { "Status" : "S" } }`, `{"d" : { "uuid" : "abc" } }`, `{"d" : { "empty" : "body" } }`, @@ -186,11 +228,12 @@ func TestRunAbapEnvironmentCreateTagConfigurations(t *testing.T) { _, hook := test.NewNullLogger() log.RegisterHook(hook) - err := runAbapEnvironmentCreateTag(config, nil, autils, client) + apiManager := &abaputils.SoftwareComponentApiManager{Client: client, PollIntervall: 1 * time.Nanosecond, Force0510: true} + err := runAbapEnvironmentCreateTag(config, autils, apiManager) assert.NoError(t, err, "Did not expect error") - assert.Equal(t, 1, len(hook.Entries), "Expected a different number of entries") - assert.Equal(t, `Created tag tag for repository /DMO/SWC with commitID 1234abcd`, hook.AllEntries()[0].Message, "Expected a different message") + assert.Equal(t, 13, len(hook.Entries), "Expected a different number of entries") + assert.Equal(t, `Created tag tag for repository /DMO/SWC with commitID 1234abcd`, hook.AllEntries()[12].Message, "Expected a different message") hook.Reset() }) @@ -253,7 +296,8 @@ repositories: StatusCode: 200, } - err = runAbapEnvironmentCreateTag(config, nil, autils, client) + apiManager := &abaputils.SoftwareComponentApiManager{Client: client, PollIntervall: 1 * time.Nanosecond, Force0510: true} + err = runAbapEnvironmentCreateTag(config, autils, apiManager) assert.Error(t, err, "Did expect error") assert.Equal(t, "Something failed during the tag creation: Configuring the parameter repositories and the parameter repositoryName at the same time is not allowed", err.Error(), "Expected different error message") @@ -315,11 +359,12 @@ repositories: _, hook := test.NewNullLogger() log.RegisterHook(hook) - err = runAbapEnvironmentCreateTag(config, nil, autils, client) + apiManager := &abaputils.SoftwareComponentApiManager{Client: client, PollIntervall: 1 * time.Nanosecond, Force0510: true} + err = runAbapEnvironmentCreateTag(config, autils, apiManager) assert.NoError(t, err, "Did not expect error") - assert.Equal(t, 1, len(hook.Entries), "Expected a different number of entries") - assert.Equal(t, `Created tag tag for repository /DMO/SWC with commitID 1234abcd`, hook.AllEntries()[0].Message, "Expected a different message") + assert.Equal(t, 6, len(hook.Entries), "Expected a different number of entries") + assert.Equal(t, `Created tag tag for repository /DMO/SWC with commitID 1234abcd`, hook.AllEntries()[5].Message, "Expected a different message") hook.Reset() }) diff --git a/cmd/abapEnvironmentPullGitRepo.go b/cmd/abapEnvironmentPullGitRepo.go index 5dae599e1a..8c93a2d55c 100644 --- a/cmd/abapEnvironmentPullGitRepo.go +++ b/cmd/abapEnvironmentPullGitRepo.go @@ -1,11 +1,7 @@ package cmd import ( - "encoding/json" "fmt" - "io" - "net/http/cookiejar" - "reflect" "time" "github.com/SAP/jenkins-library/pkg/abaputils" @@ -28,38 +24,28 @@ func abapEnvironmentPullGitRepo(options abapEnvironmentPullGitRepoOptions, _ *te Exec: &c, } - client := piperhttp.Client{} + apiManager := abaputils.SoftwareComponentApiManager{ + Client: &piperhttp.Client{}, + PollIntervall: 5 * time.Second, + } // error situations should stop execution through log.Entry().Fatal() call which leads to an os.Exit(1) in the end - err := runAbapEnvironmentPullGitRepo(&options, &autils, &client) + err := runAbapEnvironmentPullGitRepo(&options, &autils, &apiManager) if err != nil { log.Entry().WithError(err).Fatal("step execution failed") } } -func runAbapEnvironmentPullGitRepo(options *abapEnvironmentPullGitRepoOptions, com abaputils.Communication, client piperhttp.Sender) (err error) { +func runAbapEnvironmentPullGitRepo(options *abapEnvironmentPullGitRepoOptions, com abaputils.Communication, apiManager abaputils.SoftwareComponentApiManagerInterface) (err error) { subOptions := convertPullConfig(options) // Determine the host, user and password, either via the input parameters or via a cloud foundry service key - connectionDetails, err := com.GetAbapCommunicationArrangementInfo(subOptions, "/sap/opu/odata/sap/MANAGE_GIT_REPOSITORY/Pull") + connectionDetails, err := com.GetAbapCommunicationArrangementInfo(subOptions, "") if err != nil { return errors.Wrap(err, "Parameters for the ABAP Connection not available") } - cookieJar, err := cookiejar.New(nil) - if err != nil { - return errors.Wrap(err, "Could not create a Cookie Jar") - } - clientOptions := piperhttp.ClientOptions{ - MaxRequestDuration: 180 * time.Second, - CookieJar: cookieJar, - Username: connectionDetails.User, - Password: connectionDetails.Password, - } - client.SetOptions(clientOptions) - pollIntervall := com.GetPollIntervall() - var repositories []abaputils.Repository err = checkPullRepositoryConfiguration(*options) if err != nil { @@ -71,15 +57,15 @@ func runAbapEnvironmentPullGitRepo(options *abapEnvironmentPullGitRepoOptions, c return err } - err = pullRepositories(repositories, connectionDetails, client, pollIntervall) + err = pullRepositories(repositories, connectionDetails, apiManager) return err } -func pullRepositories(repositories []abaputils.Repository, pullConnectionDetails abaputils.ConnectionDetailsHTTP, client piperhttp.Sender, pollIntervall time.Duration) (err error) { +func pullRepositories(repositories []abaputils.Repository, pullConnectionDetails abaputils.ConnectionDetailsHTTP, apiManager abaputils.SoftwareComponentApiManagerInterface) (err error) { log.Entry().Infof("Start pulling %v repositories", len(repositories)) for _, repo := range repositories { - err = handlePull(repo, pullConnectionDetails, client, pollIntervall) + err = handlePull(repo, pullConnectionDetails, apiManager) if err != nil { break } @@ -90,22 +76,27 @@ func pullRepositories(repositories []abaputils.Repository, pullConnectionDetails return err } -func handlePull(repo abaputils.Repository, pullConnectionDetails abaputils.ConnectionDetailsHTTP, client piperhttp.Sender, pollIntervall time.Duration) (err error) { +func handlePull(repo abaputils.Repository, con abaputils.ConnectionDetailsHTTP, apiManager abaputils.SoftwareComponentApiManagerInterface) (err error) { logString := repo.GetPullLogString() errorString := "Pull of the " + logString + " failed on the ABAP system" - abaputils.AddDefaultDashedLine() + abaputils.AddDefaultDashedLine(1) log.Entry().Info("Start pulling the " + logString) - abaputils.AddDefaultDashedLine() + abaputils.AddDefaultDashedLine(1) + + api, errGetAPI := apiManager.GetAPI(con, repo) + if errGetAPI != nil { + return errors.Wrap(errGetAPI, "Could not initialize the connection to the system") + } - uriConnectionDetails, err := triggerPull(repo, pullConnectionDetails, client) + err = api.Pull() if err != nil { return errors.Wrapf(err, errorString) } // Polling the status of the repository import on the ABAP Environment system - status, errorPollEntity := abaputils.PollEntity(repo.Name, uriConnectionDetails, client, pollIntervall) + status, errorPollEntity := abaputils.PollEntity(api, apiManager.GetPollIntervall()) if errorPollEntity != nil { return errors.Wrapf(errorPollEntity, errorString) } @@ -116,61 +107,6 @@ func handlePull(repo abaputils.Repository, pullConnectionDetails abaputils.Conne return err } -func triggerPull(repo abaputils.Repository, pullConnectionDetails abaputils.ConnectionDetailsHTTP, client piperhttp.Sender) (abaputils.ConnectionDetailsHTTP, error) { - - uriConnectionDetails := pullConnectionDetails - uriConnectionDetails.URL = "" - pullConnectionDetails.XCsrfToken = "fetch" - - // Loging into the ABAP System - getting the x-csrf-token and cookies - resp, err := abaputils.GetHTTPResponse("HEAD", pullConnectionDetails, nil, client) - if err != nil { - err = abaputils.HandleHTTPError(resp, err, "Authentication on the ABAP system failed", pullConnectionDetails) - return uriConnectionDetails, err - } - defer resp.Body.Close() - - log.Entry().WithField("StatusCode", resp.Status).WithField("ABAP Endpoint", pullConnectionDetails.URL).Debug("Authentication on the ABAP system successful") - uriConnectionDetails.XCsrfToken = resp.Header.Get("X-Csrf-Token") - pullConnectionDetails.XCsrfToken = uriConnectionDetails.XCsrfToken - - // Trigger the Pull of a Repository - if repo.Name == "" { - return uriConnectionDetails, errors.New("An empty string was passed for the parameter 'repositoryName'") - } - - jsonBody := []byte(repo.GetPullRequestBody()) - resp, err = abaputils.GetHTTPResponse("POST", pullConnectionDetails, jsonBody, client) - if err != nil { - err = abaputils.HandleHTTPError(resp, err, "Could not pull the "+repo.GetPullLogString(), uriConnectionDetails) - return uriConnectionDetails, err - } - defer resp.Body.Close() - log.Entry().WithField("StatusCode", resp.Status).WithField("repositoryName", repo.Name).WithField("commitID", repo.CommitID).WithField("Tag", repo.Tag).Debug("Triggered Pull of repository / software component") - - // Parse Response - var body abaputils.PullEntity - var abapResp map[string]*json.RawMessage - bodyText, errRead := io.ReadAll(resp.Body) - if errRead != nil { - return uriConnectionDetails, err - } - if err := json.Unmarshal(bodyText, &abapResp); err != nil { - return uriConnectionDetails, err - } - if err := json.Unmarshal(*abapResp["d"], &body); err != nil { - return uriConnectionDetails, err - } - if reflect.DeepEqual(abaputils.PullEntity{}, body) { - log.Entry().WithField("StatusCode", resp.Status).WithField("repositoryName", repo.Name).WithField("commitID", repo.CommitID).WithField("Tag", repo.Tag).Error("Could not pull the repository / software component") - err := errors.New("Request to ABAP System not successful") - return uriConnectionDetails, err - } - - uriConnectionDetails.URL = body.Metadata.URI - return uriConnectionDetails, nil -} - func checkPullRepositoryConfiguration(options abapEnvironmentPullGitRepoOptions) error { if (len(options.RepositoryNames) > 0 && options.Repositories != "") || (len(options.RepositoryNames) > 0 && options.RepositoryName != "") || (options.RepositoryName != "" && options.Repositories != "") { @@ -183,7 +119,7 @@ func checkPullRepositoryConfiguration(options abapEnvironmentPullGitRepoOptions) } func finishPullLogs() { - abaputils.AddDefaultDashedLine() + abaputils.AddDefaultDashedLine(1) log.Entry().Info("All repositories were pulled successfully") } diff --git a/cmd/abapEnvironmentPullGitRepo_generated.go b/cmd/abapEnvironmentPullGitRepo_generated.go index 80a8426f76..273ab2642f 100644 --- a/cmd/abapEnvironmentPullGitRepo_generated.go +++ b/cmd/abapEnvironmentPullGitRepo_generated.go @@ -75,7 +75,7 @@ Please provide either of the following options: log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/abapEnvironmentPullGitRepo_test.go b/cmd/abapEnvironmentPullGitRepo_test.go index a941b7b91f..cd805f37d5 100644 --- a/cmd/abapEnvironmentPullGitRepo_test.go +++ b/cmd/abapEnvironmentPullGitRepo_test.go @@ -7,9 +7,9 @@ import ( "encoding/json" "os" "testing" + "time" "github.com/SAP/jenkins-library/pkg/abaputils" - "github.com/pkg/errors" "github.com/stretchr/testify/assert" ) @@ -58,7 +58,6 @@ func TestPullStep(t *testing.T) { logResultSuccess := `{"d": { "sc_name": "/DMO/SWC", "status": "S", "to_Log_Overview": { "results": [ { "log_index": 1, "log_name": "Main Import", "type_of_found_issues": "Success", "timestamp": "/Date(1644332299000+0000)/", "to_Log_Protocol": { "results": [ { "log_index": 1, "index_no": "1", "log_name": "", "type": "Info", "descr": "Main import", "timestamp": null, "criticality": 0 } ] } } ] } } }` client := &abaputils.ClientMock{ BodyList: []string{ - `{"d" : [] }`, `{"d" : ` + executionLogStringPull + `}`, logResultSuccess, `{"d" : { "status" : "S" } }`, @@ -70,7 +69,8 @@ func TestPullStep(t *testing.T) { StatusCode: 200, } - err := runAbapEnvironmentPullGitRepo(&config, &autils, client) + apiManager = &abaputils.SoftwareComponentApiManager{Client: client, PollIntervall: 1 * time.Nanosecond, Force0510: true} + err := runAbapEnvironmentPullGitRepo(&config, &autils, apiManager) assert.NoError(t, err, "Did not expect error") assert.Equal(t, 0, len(client.BodyList), "Not all requests were done") }) @@ -95,7 +95,9 @@ func TestPullStep(t *testing.T) { } config := abapEnvironmentPullGitRepoOptions{} - err := runAbapEnvironmentPullGitRepo(&config, &autils, client) + + apiManager = &abaputils.SoftwareComponentApiManager{Client: client, PollIntervall: 1 * time.Nanosecond, Force0510: true} + err := runAbapEnvironmentPullGitRepo(&config, &autils, apiManager) assert.Equal(t, expectedErrorMessage, err.Error(), "Different error message expected") }) @@ -145,7 +147,8 @@ repositories: Password: "testPassword", Repositories: "repositoriesTest.yml", } - err = runAbapEnvironmentPullGitRepo(&config, &autils, client) + apiManager = &abaputils.SoftwareComponentApiManager{Client: client, PollIntervall: 1 * time.Nanosecond, Force0510: true} + err = runAbapEnvironmentPullGitRepo(&config, &autils, apiManager) assert.NoError(t, err) }) @@ -200,7 +203,8 @@ repositories: StatusCode: 200, } - err = runAbapEnvironmentPullGitRepo(&config, &autils, client) + apiManager = &abaputils.SoftwareComponentApiManager{Client: client, PollIntervall: 1 * time.Nanosecond, Force0510: true} + err = runAbapEnvironmentPullGitRepo(&config, &autils, apiManager) if assert.Error(t, err, "Expected error") { assert.Equal(t, "Pull of the repository / software component '/DMO/REPO_A', commit 'ABCD1234' failed on the ABAP system", err.Error(), "Expected different error message") } @@ -258,7 +262,8 @@ repositories: StatusCode: 200, } - err = runAbapEnvironmentPullGitRepo(&config, &autils, client) + apiManager = &abaputils.SoftwareComponentApiManager{Client: client, PollIntervall: 1 * time.Nanosecond, Force0510: true} + err = runAbapEnvironmentPullGitRepo(&config, &autils, apiManager) if assert.Error(t, err, "Expected error") { assert.Equal(t, "Pull of the repository / software component '/DMO/REPO_A', tag 'v-1.0.1-build-0001' failed on the ABAP system", err.Error(), "Expected different error message") } @@ -297,7 +302,8 @@ repositories: StatusCode: 200, } - err := runAbapEnvironmentPullGitRepo(&config, &autils, client) + apiManager = &abaputils.SoftwareComponentApiManager{Client: client, PollIntervall: 1 * time.Nanosecond, Force0510: true} + err := runAbapEnvironmentPullGitRepo(&config, &autils, apiManager) if assert.Error(t, err, "Expected error") { assert.Equal(t, "Pull of the repository / software component '/DMO/SWC', commit '123456' failed on the ABAP system", err.Error(), "Expected different error message") } @@ -335,7 +341,8 @@ repositories: StatusCode: 200, } - err := runAbapEnvironmentPullGitRepo(&config, &autils, client) + apiManager = &abaputils.SoftwareComponentApiManager{Client: client, PollIntervall: 1 * time.Nanosecond, Force0510: true} + err := runAbapEnvironmentPullGitRepo(&config, &autils, apiManager) if assert.Error(t, err, "Expected error") { assert.Equal(t, "Pull of the repository / software component '/DMO/SWC' failed on the ABAP system", err.Error(), "Expected different error message") } @@ -381,7 +388,8 @@ repositories: Password: "testPassword", Repositories: "repositoriesTest.yml", } - err = runAbapEnvironmentPullGitRepo(&config, &autils, client) + apiManager = &abaputils.SoftwareComponentApiManager{Client: client, PollIntervall: 1 * time.Nanosecond, Force0510: true} + err = runAbapEnvironmentPullGitRepo(&config, &autils, apiManager) assert.EqualError(t, err, expectedErrorMessage) }) @@ -430,66 +438,12 @@ repositories: Password: "testPassword", Repositories: "repositoriesTest.yml", } - err = runAbapEnvironmentPullGitRepo(&config, &autils, client) + apiManager = &abaputils.SoftwareComponentApiManager{Client: client, PollIntervall: 1 * time.Nanosecond, Force0510: true} + err = runAbapEnvironmentPullGitRepo(&config, &autils, apiManager) assert.EqualError(t, err, expectedErrorMessage) }) } -func TestTriggerPull(t *testing.T) { - - t.Run("Test trigger pull: success case", func(t *testing.T) { - - receivedURI := "example.com/Entity" - uriExpected := receivedURI - tokenExpected := "myToken" - - client := &abaputils.ClientMock{ - Body: `{"d" : { "__metadata" : { "uri" : "` + receivedURI + `" } } }`, - Token: tokenExpected, - StatusCode: 200, - } - - repoName := "testRepo1" - testCommit := "9caede7f31028cd52333eb496434275687fefb47" - - con := abaputils.ConnectionDetailsHTTP{ - User: "MY_USER", - Password: "MY_PW", - URL: "https://api.endpoint.com/Entity/", - } - entityConnection, err := triggerPull(abaputils.Repository{Name: repoName, CommitID: testCommit}, con, client) - assert.Nil(t, err) - assert.Equal(t, uriExpected, entityConnection.URL) - assert.Equal(t, tokenExpected, entityConnection.XCsrfToken) - }) - - t.Run("Test trigger pull: ABAP Error", func(t *testing.T) { - - errorMessage := "ABAP Error Message" - errorCode := "ERROR/001" - HTTPErrorMessage := "HTTP Error Message" - combinedErrorMessage := "HTTP Error Message: ERROR/001 - ABAP Error Message" - - client := &abaputils.ClientMock{ - Body: `{"error" : { "code" : "` + errorCode + `", "message" : { "lang" : "en", "value" : "` + errorMessage + `" } } }`, - Token: "myToken", - StatusCode: 400, - Error: errors.New(HTTPErrorMessage), - } - - repoName := "testRepo1" - testCommit := "9caede7f31028cd52333eb496434275687fefb47" - - con := abaputils.ConnectionDetailsHTTP{ - User: "MY_USER", - Password: "MY_PW", - URL: "https://api.endpoint.com/Entity/", - } - _, err := triggerPull(abaputils.Repository{Name: repoName, CommitID: testCommit}, con, client) - assert.Equal(t, combinedErrorMessage, err.Error(), "Different error message expected") - }) -} - func TestPullConfigChecker(t *testing.T) { t.Run("Success case: check config file", func(t *testing.T) { config := abapEnvironmentPullGitRepoOptions{ diff --git a/cmd/abapEnvironmentPushATCSystemConfig.go b/cmd/abapEnvironmentPushATCSystemConfig.go index 22f1cf00a3..2ddcd51ea3 100644 --- a/cmd/abapEnvironmentPushATCSystemConfig.go +++ b/cmd/abapEnvironmentPushATCSystemConfig.go @@ -199,7 +199,7 @@ func fetchXcsrfTokenFromHead(connectionDetails abaputils.ConnectionDetailsHTTP, // Loging into the ABAP System - getting the x-csrf-token and cookies resp, err := abaputils.GetHTTPResponse("HEAD", connectionDetails, nil, client) if err != nil { - err = abaputils.HandleHTTPError(resp, err, "authentication on the ABAP system failed", connectionDetails) + _, err = abaputils.HandleHTTPError(resp, err, "authentication on the ABAP system failed", connectionDetails) return connectionDetails.XCsrfToken, errors.Errorf("X-Csrf-Token fetch failed for Service ATC System Configuration: %v", err) } defer resp.Body.Close() diff --git a/cmd/abapEnvironmentPushATCSystemConfig_generated.go b/cmd/abapEnvironmentPushATCSystemConfig_generated.go index eeab733726..2cc8ee7620 100644 --- a/cmd/abapEnvironmentPushATCSystemConfig_generated.go +++ b/cmd/abapEnvironmentPushATCSystemConfig_generated.go @@ -72,7 +72,7 @@ Please provide either of the following options: log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/abapEnvironmentRunATCCheck.go b/cmd/abapEnvironmentRunATCCheck.go index 1487497b63..3fdabde3ff 100644 --- a/cmd/abapEnvironmentRunATCCheck.go +++ b/cmd/abapEnvironmentRunATCCheck.go @@ -306,7 +306,7 @@ func runATC(requestType string, details abaputils.ConnectionDetailsHTTP, body [] resp, err := client.SendRequest(requestType, details.URL, bytes.NewBuffer(body), header, nil) _ = logResponseBody(resp) if err != nil || (resp != nil && resp.StatusCode == 400) { // send request does not seem to produce error with StatusCode 400!!! - err = abaputils.HandleHTTPError(resp, err, "triggering ATC run failed with Status: "+resp.Status, details) + _, err = abaputils.HandleHTTPError(resp, err, "triggering ATC run failed with Status: "+resp.Status, details) log.SetErrorCategory(log.ErrorService) return resp, errors.Errorf("triggering ATC run failed: %v", err) } diff --git a/cmd/abapEnvironmentRunATCCheck_generated.go b/cmd/abapEnvironmentRunATCCheck_generated.go index 618e4a2740..95f679fb35 100644 --- a/cmd/abapEnvironmentRunATCCheck_generated.go +++ b/cmd/abapEnvironmentRunATCCheck_generated.go @@ -77,7 +77,7 @@ Regardless of the option you chose, please make sure to provide the configuratio log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/abapEnvironmentRunAUnitTest_generated.go b/cmd/abapEnvironmentRunAUnitTest_generated.go index cb0b212880..a4a4c2d7e1 100644 --- a/cmd/abapEnvironmentRunAUnitTest_generated.go +++ b/cmd/abapEnvironmentRunAUnitTest_generated.go @@ -76,7 +76,7 @@ Regardless of the option you chose, please make sure to provide the object set c log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/ansSendEvent_generated.go b/cmd/ansSendEvent_generated.go index 9e110ed13d..5249f73901 100644 --- a/cmd/ansSendEvent_generated.go +++ b/cmd/ansSendEvent_generated.go @@ -68,7 +68,7 @@ func AnsSendEventCommand() *cobra.Command { log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/apiKeyValueMapDownload_generated.go b/cmd/apiKeyValueMapDownload_generated.go index ca6655613f..1d049a73e2 100644 --- a/cmd/apiKeyValueMapDownload_generated.go +++ b/cmd/apiKeyValueMapDownload_generated.go @@ -60,7 +60,7 @@ Learn more about the SAP API Management API for downloading an Key Value Map art log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/apiKeyValueMapUpload_generated.go b/cmd/apiKeyValueMapUpload_generated.go index 0df40387ba..55d585d764 100644 --- a/cmd/apiKeyValueMapUpload_generated.go +++ b/cmd/apiKeyValueMapUpload_generated.go @@ -61,7 +61,7 @@ Learn more about the SAP API Management API for creating an API key value map ar log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/apiProviderDownload_generated.go b/cmd/apiProviderDownload_generated.go index b52952b533..a5bda62547 100644 --- a/cmd/apiProviderDownload_generated.go +++ b/cmd/apiProviderDownload_generated.go @@ -59,7 +59,7 @@ func ApiProviderDownloadCommand() *cobra.Command { log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/apiProviderList_generated.go b/cmd/apiProviderList_generated.go index cac7752cb3..0ff2e1d9f9 100644 --- a/cmd/apiProviderList_generated.go +++ b/cmd/apiProviderList_generated.go @@ -96,7 +96,7 @@ func ApiProviderListCommand() *cobra.Command { log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/apiProviderUpload_generated.go b/cmd/apiProviderUpload_generated.go index b20f68aee0..8d8eac2d5a 100644 --- a/cmd/apiProviderUpload_generated.go +++ b/cmd/apiProviderUpload_generated.go @@ -59,7 +59,7 @@ Learn more about API Management api for creating an API provider artifact [here] log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/apiProxyDownload_generated.go b/cmd/apiProxyDownload_generated.go index b1ad824c10..e900a47f6c 100644 --- a/cmd/apiProxyDownload_generated.go +++ b/cmd/apiProxyDownload_generated.go @@ -59,7 +59,7 @@ func ApiProxyDownloadCommand() *cobra.Command { log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/apiProxyList_generated.go b/cmd/apiProxyList_generated.go index c047939de8..62b6378170 100644 --- a/cmd/apiProxyList_generated.go +++ b/cmd/apiProxyList_generated.go @@ -96,7 +96,7 @@ func ApiProxyListCommand() *cobra.Command { log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/apiProxyUpload_generated.go b/cmd/apiProxyUpload_generated.go index 36bfb25f1e..ca1e31b5e0 100644 --- a/cmd/apiProxyUpload_generated.go +++ b/cmd/apiProxyUpload_generated.go @@ -59,7 +59,7 @@ Learn more about the SAP API Management API for uploading an api proxy artifact log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/artifactPrepareVersion.go b/cmd/artifactPrepareVersion.go index fcc2e38ad1..9634b40b4c 100644 --- a/cmd/artifactPrepareVersion.go +++ b/cmd/artifactPrepareVersion.go @@ -10,6 +10,7 @@ import ( "text/template" "time" + "github.com/SAP/jenkins-library/pkg/certutils" piperhttp "github.com/SAP/jenkins-library/pkg/http" "github.com/SAP/jenkins-library/pkg/piperutils" @@ -55,6 +56,7 @@ type artifactPrepareVersionUtils interface { RunExecutable(e string, p ...string) error DownloadFile(url, filename string, header netHttp.Header, cookies []*netHttp.Cookie) error + piperhttp.Sender Glob(pattern string) (matches []string, err error) FileExists(filename string) (bool, error) @@ -203,8 +205,9 @@ func runArtifactPrepareVersion(config *artifactPrepareVersionOptions, telemetryD } if config.VersioningType == "cloud" { + certs, err := certutils.CertificateDownload(config.CustomTLSCertificateLinks, utils) // commit changes and push to repository (including new version tag) - gitCommitID, err = pushChanges(config, newVersion, repository, worktree, now) + gitCommitID, err = pushChanges(config, newVersion, repository, worktree, now, certs) if err != nil { if strings.Contains(fmt.Sprint(err), "reference already exists") { log.SetErrorCategory(log.ErrorCustom) @@ -334,7 +337,7 @@ func initializeWorktree(gitCommit plumbing.Hash, worktree gitWorktree) error { return nil } -func pushChanges(config *artifactPrepareVersionOptions, newVersion string, repository gitRepository, worktree gitWorktree, t time.Time) (string, error) { +func pushChanges(config *artifactPrepareVersionOptions, newVersion string, repository gitRepository, worktree gitWorktree, t time.Time, certs []byte) (string, error) { var commitID string @@ -355,6 +358,7 @@ func pushChanges(config *artifactPrepareVersionOptions, newVersion string, repos pushOptions := git.PushOptions{ RefSpecs: []gitConfig.RefSpec{gitConfig.RefSpec(ref)}, + CABundle: certs, } currentRemoteOrigin, err := repository.Remote("origin") @@ -497,10 +501,6 @@ func propagateVersion(config *artifactPrepareVersionOptions, utils artifactPrepa } for i, targetTool := range config.AdditionalTargetTools { - if targetTool == config.BuildTool { - // ignore configured build tool - continue - } var buildDescriptors []string if len(config.AdditionalTargetDescriptors) > 0 { diff --git a/cmd/artifactPrepareVersion_generated.go b/cmd/artifactPrepareVersion_generated.go index 29cea8e4c9..ec92b05082 100644 --- a/cmd/artifactPrepareVersion_generated.go +++ b/cmd/artifactPrepareVersion_generated.go @@ -40,6 +40,7 @@ type artifactPrepareVersionOptions struct { Username string `json:"username,omitempty"` VersioningTemplate string `json:"versioningTemplate,omitempty"` VersioningType string `json:"versioningType,omitempty" validate:"possible-values=cloud cloud_noTag library"` + CustomTLSCertificateLinks []string `json:"customTlsCertificateLinks,omitempty"` } type artifactPrepareVersionCommonPipelineEnvironment struct { @@ -186,7 +187,7 @@ Define ` + "`" + `buildTool: custom` + "`" + `, ` + "`" + `filePath: 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) @@ -271,6 +272,7 @@ func addArtifactPrepareVersionFlags(cmd *cobra.Command, stepConfig *artifactPrep cmd.Flags().StringVar(&stepConfig.Username, "username", os.Getenv("PIPER_username"), "User name for git authentication") cmd.Flags().StringVar(&stepConfig.VersioningTemplate, "versioningTemplate", os.Getenv("PIPER_versioningTemplate"), "DEPRECATED: Defines the template for the automatic version which will be created") cmd.Flags().StringVar(&stepConfig.VersioningType, "versioningType", `cloud`, "Defines the type of versioning") + cmd.Flags().StringSliceVar(&stepConfig.CustomTLSCertificateLinks, "customTlsCertificateLinks", []string{}, "List containing download links of custom TLS certificates. This is required to ensure trusted connections to registries with custom certificates.") cmd.MarkFlagRequired("buildTool") } @@ -517,6 +519,16 @@ func artifactPrepareVersionMetadata() config.StepData { Aliases: []config.Alias{}, Default: `cloud`, }, + { + Name: "customTlsCertificateLinks", + ResourceRef: []config.ResourceReference{}, + Scope: []string{"GENERAL", "PARAMETERS", "STAGES", "STEPS"}, + Type: "[]string", + Mandatory: false, + Aliases: []config.Alias{}, + Default: []string{}, + Conditions: []config.Condition{{ConditionRef: "strings-equal", Params: []config.Param{{Name: "buildTool", Value: "maven"}, {Name: "buildTool", Value: "gradle"}}}}, + }, }, }, Containers: []config.Container{ diff --git a/cmd/artifactPrepareVersion_test.go b/cmd/artifactPrepareVersion_test.go index fea7621732..24b8ee79f3 100644 --- a/cmd/artifactPrepareVersion_test.go +++ b/cmd/artifactPrepareVersion_test.go @@ -176,6 +176,7 @@ func (w *gitWorktreeMock) Commit(msg string, opts *git.CommitOptions) (plumbing. type artifactPrepareVersionMockUtils struct { *mock.ExecMockRunner *mock.FilesMock + *mock.HttpClientMock } func newArtifactPrepareVersionMockUtils() *artifactPrepareVersionMockUtils { @@ -619,7 +620,7 @@ func TestPushChanges(t *testing.T) { repo := gitRepositoryMock{remote: remote} worktree := gitWorktreeMock{commitHash: plumbing.ComputeHash(plumbing.CommitObject, []byte{1, 2, 3})} - commitID, err := pushChanges(&config, newVersion, &repo, &worktree, testTime) + commitID, err := pushChanges(&config, newVersion, &repo, &worktree, testTime, nil) assert.NoError(t, err) assert.Equal(t, "428ecf70bc22df0ba3dcf194b5ce53e769abab07", commitID) assert.Equal(t, "update version 1.2.3", worktree.commitMsg) @@ -633,10 +634,11 @@ func TestPushChanges(t *testing.T) { config := artifactPrepareVersionOptions{CommitUserName: "Project Piper"} repo := gitRepositoryMock{remote: remote} worktree := gitWorktreeMock{commitHash: plumbing.ComputeHash(plumbing.CommitObject, []byte{1, 2, 3})} + customCerts := []byte("custom certs") originalSSHAgentAuth := sshAgentAuth sshAgentAuth = func(u string) (*ssh.PublicKeysCallback, error) { return &ssh.PublicKeysCallback{}, nil } - commitID, err := pushChanges(&config, newVersion, &repo, &worktree, testTime) + commitID, err := pushChanges(&config, newVersion, &repo, &worktree, testTime, customCerts) sshAgentAuth = originalSSHAgentAuth assert.NoError(t, err) @@ -645,7 +647,7 @@ func TestPushChanges(t *testing.T) { assert.Equal(t, &git.CommitOptions{All: true, Author: &object.Signature{Name: "Project Piper", When: testTime}}, worktree.commitOpts) assert.Equal(t, "1.2.3", repo.tag) assert.Equal(t, "428ecf70bc22df0ba3dcf194b5ce53e769abab07", repo.tagHash.String()) - assert.Equal(t, &git.PushOptions{RefSpecs: []gitConfig.RefSpec{"refs/tags/1.2.3:refs/tags/1.2.3"}, Auth: &ssh.PublicKeysCallback{}}, repo.pushOptions) + assert.Equal(t, &git.PushOptions{RefSpecs: []gitConfig.RefSpec{"refs/tags/1.2.3:refs/tags/1.2.3"}, Auth: &ssh.PublicKeysCallback{}, CABundle: customCerts}, repo.pushOptions) }) t.Run("success - ssh", func(t *testing.T) { @@ -658,7 +660,7 @@ func TestPushChanges(t *testing.T) { originalSSHAgentAuth := sshAgentAuth sshAgentAuth = func(u string) (*ssh.PublicKeysCallback, error) { return &ssh.PublicKeysCallback{}, nil } - commitID, err := pushChanges(&config, newVersion, &repo, &worktree, testTime) + commitID, err := pushChanges(&config, newVersion, &repo, &worktree, testTime, nil) sshAgentAuth = originalSSHAgentAuth assert.NoError(t, err) @@ -671,7 +673,7 @@ func TestPushChanges(t *testing.T) { repo := gitRepositoryMock{} worktree := gitWorktreeMock{commitError: "commit error", commitHash: plumbing.ComputeHash(plumbing.CommitObject, []byte{1, 2, 3})} - commitID, err := pushChanges(&config, newVersion, &repo, &worktree, testTime) + commitID, err := pushChanges(&config, newVersion, &repo, &worktree, testTime, nil) assert.Equal(t, "0000000000000000000000000000000000000000", commitID) assert.EqualError(t, err, "failed to commit new version: commit error") }) @@ -681,7 +683,7 @@ func TestPushChanges(t *testing.T) { repo := gitRepositoryMock{tagError: "tag error"} worktree := gitWorktreeMock{commitHash: plumbing.ComputeHash(plumbing.CommitObject, []byte{1, 2, 3})} - commitID, err := pushChanges(&config, newVersion, &repo, &worktree, testTime) + commitID, err := pushChanges(&config, newVersion, &repo, &worktree, testTime, nil) assert.Equal(t, "428ecf70bc22df0ba3dcf194b5ce53e769abab07", commitID) assert.EqualError(t, err, "tag error") }) @@ -691,7 +693,7 @@ func TestPushChanges(t *testing.T) { repo := gitRepositoryMock{} worktree := gitWorktreeMock{commitHash: plumbing.ComputeHash(plumbing.CommitObject, []byte{1, 2, 3})} - commitID, err := pushChanges(&config, newVersion, &repo, &worktree, testTime) + commitID, err := pushChanges(&config, newVersion, &repo, &worktree, testTime, nil) assert.Equal(t, "428ecf70bc22df0ba3dcf194b5ce53e769abab07", commitID) assert.EqualError(t, err, "no remote url maintained") }) @@ -720,7 +722,7 @@ func TestPushChanges(t *testing.T) { for _, test := range tt { sshAgentAuth = test.sshAgentAuth - commitID, err := pushChanges(&config, newVersion, &test.repo, &worktree, testTime) + commitID, err := pushChanges(&config, newVersion, &test.repo, &worktree, testTime, nil) sshAgentAuth = originalSSHAgentAuth assert.Equal(t, "428ecf70bc22df0ba3dcf194b5ce53e769abab07", commitID) @@ -733,7 +735,7 @@ func TestPushChanges(t *testing.T) { repo := gitRepositoryMock{remote: remote, pushError: "push error"} worktree := gitWorktreeMock{commitHash: plumbing.ComputeHash(plumbing.CommitObject, []byte{1, 2, 3})} - commitID, err := pushChanges(&config, newVersion, &repo, &worktree, testTime) + commitID, err := pushChanges(&config, newVersion, &repo, &worktree, testTime, nil) assert.Equal(t, "428ecf70bc22df0ba3dcf194b5ce53e769abab07", commitID) assert.EqualError(t, err, "push error") }) diff --git a/cmd/ascAppUpload_generated.go b/cmd/ascAppUpload_generated.go index e69fc9b878..356639fced 100644 --- a/cmd/ascAppUpload_generated.go +++ b/cmd/ascAppUpload_generated.go @@ -67,7 +67,7 @@ For more information about ASC, check out [Application Support Center](https://g log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/awsS3Upload_generated.go b/cmd/awsS3Upload_generated.go index 3751731501..8c1e840778 100644 --- a/cmd/awsS3Upload_generated.go +++ b/cmd/awsS3Upload_generated.go @@ -59,7 +59,7 @@ In case a file is uploaded that is already contained in the S3 bucket, it will b log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/azureBlobUpload_generated.go b/cmd/azureBlobUpload_generated.go index c623a3136a..2c742bf5c5 100644 --- a/cmd/azureBlobUpload_generated.go +++ b/cmd/azureBlobUpload_generated.go @@ -59,7 +59,7 @@ In case a file is uploaded that is already contained in the storage, it will be log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/batsExecuteTests.go b/cmd/batsExecuteTests.go index 6bd28f704f..aa4251ebfa 100644 --- a/cmd/batsExecuteTests.go +++ b/cmd/batsExecuteTests.go @@ -108,7 +108,9 @@ func runBatsExecuteTests(config *batsExecuteTestsOptions, telemetryData *telemet } func (b *batsExecuteTestsUtilsBundle) CloneRepo(URL string) error { - _, err := pipergit.PlainClone("", "", URL, "bats-core") + // ToDo: BatsExecute test needs to check if the repo can come from a + // enterprise github instance and needs ca-cert handelling seperately + _, err := pipergit.PlainClone("", "", URL, "bats-core", []byte{}) return err } diff --git a/cmd/batsExecuteTests_generated.go b/cmd/batsExecuteTests_generated.go index 5d525cf6c0..85e1536656 100644 --- a/cmd/batsExecuteTests_generated.go +++ b/cmd/batsExecuteTests_generated.go @@ -96,7 +96,7 @@ func BatsExecuteTestsCommand() *cobra.Command { log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/checkIfStepActive.go b/cmd/checkIfStepActive.go index 473984fa14..ae39623f89 100644 --- a/cmd/checkIfStepActive.go +++ b/cmd/checkIfStepActive.go @@ -92,6 +92,9 @@ func checkIfStepActive(utils piperutils.FileUtils) error { runSteps = runConfigV1.RunSteps runStages = runConfigV1.RunStages } else { + log.Entry().Warning("This step is using deprecated format of stage conditions which will be removed in Jan 2024. " + + "To avoid pipeline breakage, please call checkIfStepActive command with --useV1 flag.", + ) runConfig := &config.RunConfig{StageConfigFile: stageConfigFile} err = runConfig.InitRunConfig(projectConfig, nil, nil, nil, nil, doublestar.Glob, checkStepActiveOptions.openFile) if err != nil { diff --git a/cmd/checkmarxExecuteScan_generated.go b/cmd/checkmarxExecuteScan_generated.go index bdc77f605c..ec23ea8698 100644 --- a/cmd/checkmarxExecuteScan_generated.go +++ b/cmd/checkmarxExecuteScan_generated.go @@ -275,7 +275,7 @@ thresholds instead of ` + "`" + `percentage` + "`" + ` whereas we strongly recom log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/checkmarxOneExecuteScan.go b/cmd/checkmarxOneExecuteScan.go index 80bcc01e93..af790a3487 100644 --- a/cmd/checkmarxOneExecuteScan.go +++ b/cmd/checkmarxOneExecuteScan.go @@ -141,6 +141,10 @@ func runStep(config checkmarxOneExecuteScanOptions, influx *checkmarxOneExecuteS return fmt.Errorf("failed to determine incremental or full scan configuration: %s", err) } + if config.Incremental { + log.Entry().Warnf("If you change your file filter pattern it is recommended to run a Full scan instead of an incremental, to ensure full code coverage.") + } + zipFile, err := cx1sh.ZipFiles() if err != nil { return fmt.Errorf("failed to create zip file: %s", err) @@ -302,10 +306,31 @@ func (c *checkmarxOneExecuteScanHelper) SetProjectPreset() error { } currentPreset := "" + currentLanguageMode := "multi" // piper default for _, conf := range projectConf { if conf.Key == "scan.config.sast.presetName" { currentPreset = conf.Value - break + } + if conf.Key == "scan.config.sast.languageMode" { + currentLanguageMode = conf.Value + } + } + + if c.config.LanguageMode == "" || strings.EqualFold(c.config.LanguageMode, "multi") { // default multi if blank + if currentLanguageMode != "multi" { + log.Entry().Info("Pipeline yaml requests multi-language scan - updating project configuration") + c.sys.SetProjectLanguageMode(c.Project.ProjectID, "multi", true) + + if c.config.Incremental { + log.Entry().Warn("Pipeline yaml requests incremental scan, but switching from 'primary' to 'multi' language mode requires a full scan - switching from incremental to full") + c.config.Incremental = false + } + } + } else { // primary language mode + if currentLanguageMode != "primary" { + log.Entry().Info("Pipeline yaml requests primary-language scan - updating project configuration") + c.sys.SetProjectLanguageMode(c.Project.ProjectID, "primary", true) + // no need to switch incremental to full here (multi-language scan includes single-language scan coverage) } } @@ -319,6 +344,11 @@ func (c *checkmarxOneExecuteScanHelper) SetProjectPreset() error { } else if currentPreset != c.config.Preset { log.Entry().Infof("Project configured preset (%v) does not match pipeline yaml (%v) - updating project configuration.", currentPreset, c.config.Preset) c.sys.SetProjectPreset(c.Project.ProjectID, c.config.Preset, true) + + if c.config.Incremental { + log.Entry().Warn("Changing project settings requires a full scan to take effect - switching from incremental to full") + c.config.Incremental = false + } } else { log.Entry().Infof("Project is already configured to use pipeline preset %v", currentPreset) } @@ -717,7 +747,13 @@ func (c *checkmarxOneExecuteScanHelper) getDetailedResults(scan *checkmarxOne.Sc resultMap["LinesOfCodeScanned"] = scanmeta.LOC resultMap["FilesScanned"] = scanmeta.FileCount - resultMap["ToolVersion"] = "Cx1 Gap: No API for this" + + version, err := c.sys.GetVersion() + if err != nil { + resultMap["ToolVersion"] = "Error fetching current version" + } else { + resultMap["ToolVersion"] = fmt.Sprintf("CxOne: %v, SAST: %v, KICS: %v", version.CxOne, version.SAST, version.KICS) + } if scanmeta.IsIncremental { resultMap["ScanType"] = "Incremental" diff --git a/cmd/checkmarxOneExecuteScan_generated.go b/cmd/checkmarxOneExecuteScan_generated.go index 72cfcba5ab..2e15fbff59 100644 --- a/cmd/checkmarxOneExecuteScan_generated.go +++ b/cmd/checkmarxOneExecuteScan_generated.go @@ -282,7 +282,7 @@ thresholds instead of ` + "`" + `percentage` + "`" + ` whereas we strongly recom log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/checkmarxOneExecuteScan_test.go b/cmd/checkmarxOneExecuteScan_test.go index 96db7b8d26..453e8a1780 100644 --- a/cmd/checkmarxOneExecuteScan_test.go +++ b/cmd/checkmarxOneExecuteScan_test.go @@ -240,6 +240,10 @@ func (sys *checkmarxOneSystemMock) UpdateProjectConfiguration(projectID string, return nil } +func (sys *checkmarxOneSystemMock) GetVersion() (checkmarxOne.VersionInfo, error) { + return checkmarxOne.VersionInfo{}, nil +} + type checkmarxOneExecuteScanHelperMock struct { ctx context.Context config checkmarxOneExecuteScanOptions diff --git a/cmd/cloudFoundryCreateServiceKey_generated.go b/cmd/cloudFoundryCreateServiceKey_generated.go index 0cba576f9a..bfef752fbc 100644 --- a/cmd/cloudFoundryCreateServiceKey_generated.go +++ b/cmd/cloudFoundryCreateServiceKey_generated.go @@ -66,7 +66,7 @@ func CloudFoundryCreateServiceKeyCommand() *cobra.Command { log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/cloudFoundryCreateService_generated.go b/cmd/cloudFoundryCreateService_generated.go index 086756f87d..6231a5bb27 100644 --- a/cmd/cloudFoundryCreateService_generated.go +++ b/cmd/cloudFoundryCreateService_generated.go @@ -78,7 +78,7 @@ Please provide either of the following options: log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/cloudFoundryCreateSpace_generated.go b/cmd/cloudFoundryCreateSpace_generated.go index 65401ab65f..4eee673cd2 100644 --- a/cmd/cloudFoundryCreateSpace_generated.go +++ b/cmd/cloudFoundryCreateSpace_generated.go @@ -64,7 +64,7 @@ Mandatory: log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/cloudFoundryDeleteService_generated.go b/cmd/cloudFoundryDeleteService_generated.go index d5bd14cc7e..b3f673d671 100644 --- a/cmd/cloudFoundryDeleteService_generated.go +++ b/cmd/cloudFoundryDeleteService_generated.go @@ -64,7 +64,7 @@ func CloudFoundryDeleteServiceCommand() *cobra.Command { log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/cloudFoundryDeleteSpace_generated.go b/cmd/cloudFoundryDeleteSpace_generated.go index 2c2e8b6381..68b1a3a1e1 100644 --- a/cmd/cloudFoundryDeleteSpace_generated.go +++ b/cmd/cloudFoundryDeleteSpace_generated.go @@ -64,7 +64,7 @@ Mandatory: log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/cloudFoundryDeploy.go b/cmd/cloudFoundryDeploy.go index bdf79ce9e5..b757a2b961 100644 --- a/cmd/cloudFoundryDeploy.go +++ b/cmd/cloudFoundryDeploy.go @@ -248,6 +248,11 @@ func handleCFNativeDeployment(config *cloudFoundryDeployOptions, command command // deploy command will be provided by the prepare functions below if deployType == "blue-green" { + log.Entry().Warn("[WARN] Blue-green deployment type is deprecated for cf native builds " + + "and will be completely removed by 01.02.2024" + + "Instead set parameter `cfNativeDeployParameters: '--strategy rolling'`. " + + "Please refer to the Cloud Foundry documentation for further information: " + + "https://docs.cloudfoundry.org/devguide/deploy-apps/rolling-deploy.html") deployCommand, deployOptions, smokeTestScript, err = prepareBlueGreenCfNativeDeploy(config) if err != nil { return errors.Wrapf(err, "Cannot prepare cf native deployment. DeployType '%s'", deployType) diff --git a/cmd/cloudFoundryDeploy_generated.go b/cmd/cloudFoundryDeploy_generated.go index b3026cb503..8941043b5e 100644 --- a/cmd/cloudFoundryDeploy_generated.go +++ b/cmd/cloudFoundryDeploy_generated.go @@ -141,7 +141,7 @@ func CloudFoundryDeployCommand() *cobra.Command { log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/cnbBuild.go b/cmd/cnbBuild.go index e676dd6024..3fbd2b9784 100644 --- a/cmd/cnbBuild.go +++ b/cmd/cnbBuild.go @@ -6,6 +6,7 @@ import ( "os" "path" "path/filepath" + "syscall" "github.com/SAP/jenkins-library/pkg/buildpacks" "github.com/SAP/jenkins-library/pkg/buildsettings" @@ -31,7 +32,7 @@ import ( const ( creatorPath = "/cnb/lifecycle/creator" platformPath = "/tmp/platform" - platformAPIVersion = "0.11" + platformAPIVersion = "0.12" ) type cnbBuildUtilsBundle struct { @@ -285,8 +286,14 @@ func (config *cnbBuildOptions) resolvePath(utils cnbutils.BuildUtils) (buildpack func callCnbBuild(config *cnbBuildOptions, telemetryData *telemetry.CustomData, utils cnbutils.BuildUtils, commonPipelineEnvironment *cnbBuildCommonPipelineEnvironment, httpClient piperhttp.Sender) error { stepName := "cnbBuild" - telemetry := buildpacks.NewTelemetry(telemetryData) + err := isBuilder(utils) + if err != nil { + log.SetErrorCategory(log.ErrorConfiguration) + return errors.Wrap(err, "the provided dockerImage is not a valid builder") + } + + telemetry := buildpacks.NewTelemetry(telemetryData) dockerImage, err := GetDockerImageValue(stepName) if err != nil { log.Entry().Warnf("failed to retrieve dockerImage configuration: '%v'", err) @@ -362,11 +369,22 @@ func runCnbBuild(config *cnbBuildOptions, telemetry *buildpacks.Telemetry, image return errors.Wrap(err, fmt.Sprintf("failed to clean up platform folder %s", platformPath)) } - tempdir, err := os.MkdirTemp("", "cnbBuild-") + tempdir, err := utils.TempDir("", "cnbBuild-") if err != nil { return errors.Wrap(err, "failed to create tempdir") } - defer os.RemoveAll(tempdir) + defer utils.RemoveAll(tempdir) + + uid, gid, err := cnbutils.CnbUserInfo() + if err != nil { + return errors.Wrap(err, "failed to get user information") + } + + err = utils.Chown(tempdir, uid, gid) + if err != nil { + return errors.Wrap(err, "failed to change tempdir ownership") + } + if config.BuildEnvVars == nil { config.BuildEnvVars = map[string]interface{}{} } @@ -374,12 +392,6 @@ func runCnbBuild(config *cnbBuildOptions, telemetry *buildpacks.Telemetry, image telemetrySegment := createInitialTelemetrySegment(config, utils) - err = isBuilder(utils) - if err != nil { - log.SetErrorCategory(log.ErrorConfiguration) - return errors.Wrap(err, "the provided dockerImage is not a valid builder") - } - include := ignore.CompileIgnoreLines("**/*") exclude := ignore.CompileIgnoreLines("piper", ".pipeline", ".git") @@ -486,6 +498,10 @@ func runCnbBuild(config *cnbBuildOptions, telemetry *buildpacks.Telemetry, image } } + if err := utils.Chown(target, uid, gid); err != nil { + return err + } + if ok, _ := utils.FileExists(filepath.Join(target, "pom.xml")); ok { err = linkTargetFolder(utils, source, target) if err != nil { @@ -565,7 +581,15 @@ func runCnbBuild(config *cnbBuildOptions, telemetry *buildpacks.Telemetry, image } creatorArgs = append(creatorArgs, fmt.Sprintf("%s:%s", containerImage, targetImage.ContainerImageTag)) - err = utils.RunExecutable(creatorPath, creatorArgs...) + attr := &syscall.SysProcAttr{ + Credential: &syscall.Credential{ + Uid: uint32(uid), + Gid: uint32(gid), + NoSetGroups: true, + }, + } + + err = utils.RunExecutableWithAttrs(creatorPath, attr, creatorArgs...) if err != nil { log.SetErrorCategory(log.ErrorBuild) return errors.Wrapf(err, "execution of '%s' failed", creatorArgs) diff --git a/cmd/cnbBuild_generated.go b/cmd/cnbBuild_generated.go index d2d22c488a..5ff450c700 100644 --- a/cmd/cnbBuild_generated.go +++ b/cmd/cnbBuild_generated.go @@ -166,7 +166,7 @@ func CnbBuildCommand() *cobra.Command { log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) @@ -519,7 +519,7 @@ func cnbBuildMetadata() config.StepData { }, }, Containers: []config.Container{ - {Image: "paketobuildpacks/builder:base"}, + {Image: "paketobuildpacks/builder-jammy-base:latest", Options: []config.Option{{Name: "-u", Value: "0"}}}, }, Outputs: config.StepOutputs{ Resources: []config.StepResources{ diff --git a/cmd/cnbBuild_test.go b/cmd/cnbBuild_test.go index 75ba12e4a0..cf62cc5bc7 100644 --- a/cmd/cnbBuild_test.go +++ b/cmd/cnbBuild_test.go @@ -105,7 +105,10 @@ func assetBuildEnv(t *testing.T, utils cnbutils.MockUtils, key, value string) bo } func TestRunCnbBuild(t *testing.T) { - configOptions.openFile = piperconf.OpenPiperFile + configOptions.OpenFile = piperconf.OpenPiperFile + + t.Setenv("CNB_USER_ID", "1000") + t.Setenv("CNB_GROUP_ID", "1000") t.Run("prefers direct configuration", func(t *testing.T) { t.Parallel() @@ -141,7 +144,7 @@ func TestRunCnbBuild(t *testing.T) { assert.Contains(t, runner.Calls[1].Params, "my-process") assert.Equal(t, config.ContainerRegistryURL, commonPipelineEnvironment.container.registryURL) assert.Equal(t, "my-image:0.0.1", commonPipelineEnvironment.container.imageNameTag) - assert.Equal(t, `{"cnbBuild":[{"dockerImage":"paketobuildpacks/builder:base"}]}`, commonPipelineEnvironment.custom.buildSettingsInfo) + assert.Equal(t, `{"cnbBuild":[{"dockerImage":"paketobuildpacks/builder-jammy-base:latest"}]}`, commonPipelineEnvironment.custom.buildSettingsInfo) }) t.Run("prefers project descriptor", func(t *testing.T) { @@ -617,7 +620,7 @@ uri = "some-buildpack"`)) assert.Equal(t, "folder", string(customData.Data[0].Path)) assert.Contains(t, customData.Data[0].AdditionalTags, "latest") assert.Contains(t, customData.Data[0].BindingKeys, "SECRET") - assert.Equal(t, "paketobuildpacks/builder:base", customData.Data[0].Builder) + assert.Equal(t, "paketobuildpacks/builder-jammy-base:latest", customData.Data[0].Builder) assert.Contains(t, customData.Data[0].Buildpacks.FromConfig, "paketobuildpacks/java") assert.NotContains(t, customData.Data[0].Buildpacks.FromProjectDescriptor, "paketobuildpacks/java") diff --git a/cmd/codeqlExecuteScan.go b/cmd/codeqlExecuteScan.go index 84525c2822..d9ac049512 100644 --- a/cmd/codeqlExecuteScan.go +++ b/cmd/codeqlExecuteScan.go @@ -3,6 +3,7 @@ package cmd import ( "bytes" "fmt" + "net/http" "os" "path/filepath" "regexp" @@ -11,11 +12,12 @@ import ( "github.com/SAP/jenkins-library/pkg/codeql" "github.com/SAP/jenkins-library/pkg/command" + piperhttp "github.com/SAP/jenkins-library/pkg/http" "github.com/SAP/jenkins-library/pkg/log" + "github.com/SAP/jenkins-library/pkg/maven" "github.com/SAP/jenkins-library/pkg/orchestrator" "github.com/SAP/jenkins-library/pkg/piperutils" "github.com/SAP/jenkins-library/pkg/telemetry" - "github.com/SAP/jenkins-library/pkg/toolrecord" "github.com/pkg/errors" ) @@ -23,28 +25,26 @@ type codeqlExecuteScanUtils interface { command.ExecRunner piperutils.FileUtils -} -type RepoInfo struct { - serverUrl string - repo string - commitId string - ref string - owner string + DownloadFile(url, filename string, header http.Header, cookies []*http.Cookie) error } type codeqlExecuteScanUtilsBundle struct { *command.Command *piperutils.Files + *piperhttp.Client } -const sarifUploadComplete = "complete" -const sarifUploadFailed = "failed" +const ( + sarifUploadComplete = "complete" + sarifUploadFailed = "failed" +) func newCodeqlExecuteScanUtils() codeqlExecuteScanUtils { utils := codeqlExecuteScanUtilsBundle{ Command: &command.Command{}, Files: &piperutils.Files{}, + Client: &piperhttp.Client{}, } utils.Stdout(log.Writer()) @@ -97,7 +97,7 @@ func getLangFromBuildTool(buildTool string) string { } } -func getGitRepoInfo(repoUri string, repoInfo *RepoInfo) error { +func getGitRepoInfo(repoUri string, repoInfo *codeql.RepoInfo) error { if repoUri == "" { return errors.New("repository param is not set or it cannot be auto populated") } @@ -106,50 +106,68 @@ func getGitRepoInfo(repoUri string, repoInfo *RepoInfo) error { matches := pat.FindAllStringSubmatch(repoUri, -1) if len(matches) > 0 { match := matches[0] - repoInfo.serverUrl = "https://" + match[3] + repoInfo.ServerUrl = "https://" + match[3] repoData := strings.Split(strings.TrimSuffix(match[4], ".git"), "/") if len(repoData) != 2 { return fmt.Errorf("Invalid repository %s", repoUri) } - repoInfo.owner = repoData[0] - repoInfo.repo = repoData[1] + repoInfo.Owner = repoData[0] + repoInfo.Repo = repoData[1] return nil } return fmt.Errorf("Invalid repository %s", repoUri) } -func initGitInfo(config *codeqlExecuteScanOptions) RepoInfo { - var repoInfo RepoInfo +func initGitInfo(config *codeqlExecuteScanOptions) (codeql.RepoInfo, error) { + var repoInfo codeql.RepoInfo err := getGitRepoInfo(config.Repository, &repoInfo) if err != nil { log.Entry().Error(err) } - repoInfo.ref = config.AnalyzedRef - repoInfo.commitId = config.CommitID + + repoInfo.Ref = config.AnalyzedRef + repoInfo.CommitId = config.CommitID provider, err := orchestrator.NewOrchestratorSpecificConfigProvider() if err != nil { log.Entry().Warn("No orchestrator found. We assume piper is running locally.") } else { - if repoInfo.ref == "" { - repoInfo.ref = provider.GetReference() + if repoInfo.Ref == "" { + repoInfo.Ref = provider.GetReference() } - if repoInfo.commitId == "" || repoInfo.commitId == "NA" { - repoInfo.commitId = provider.GetCommit() + if repoInfo.CommitId == "" || repoInfo.CommitId == "NA" { + repoInfo.CommitId = provider.GetCommit() } - if repoInfo.serverUrl == "" { + if repoInfo.ServerUrl == "" { err = getGitRepoInfo(provider.GetRepoURL(), &repoInfo) if err != nil { log.Entry().Error(err) } } } + if len(config.TargetGithubRepoURL) > 0 { + if strings.Contains(repoInfo.ServerUrl, "github") { + log.Entry().Errorf("TargetGithubRepoURL should not be set as the source repo is on github.") + return repoInfo, errors.New("TargetGithubRepoURL should not be set as the source repo is on github.") + } + err := getGitRepoInfo(config.TargetGithubRepoURL, &repoInfo) + if err != nil { + log.Entry().Error(err) + return repoInfo, err + } + if len(config.TargetGithubBranchName) > 0 { + repoInfo.Ref = config.TargetGithubBranchName + if len(strings.Split(config.TargetGithubBranchName, "/")) < 3 { + repoInfo.Ref = "refs/heads/" + config.TargetGithubBranchName + } + } + } - return repoInfo + return repoInfo, nil } func getToken(config *codeqlExecuteScanOptions) (bool, string) { @@ -165,42 +183,50 @@ func getToken(config *codeqlExecuteScanOptions) (bool, string) { return false, "" } -func uploadResults(config *codeqlExecuteScanOptions, repoInfo RepoInfo, token string, utils codeqlExecuteScanUtils) (string, error) { +func uploadResults(config *codeqlExecuteScanOptions, repoInfo codeql.RepoInfo, token string, utils codeqlExecuteScanUtils) (string, error) { cmd := []string{"github", "upload-results", "--sarif=" + filepath.Join(config.ModulePath, "target", "codeqlReport.sarif")} if config.GithubToken != "" { cmd = append(cmd, "-a="+token) } - if repoInfo.commitId != "" { - cmd = append(cmd, "--commit="+repoInfo.commitId) + if repoInfo.CommitId != "" { + cmd = append(cmd, "--commit="+repoInfo.CommitId) } - if repoInfo.serverUrl != "" { - cmd = append(cmd, "--github-url="+repoInfo.serverUrl) + if repoInfo.ServerUrl != "" { + cmd = append(cmd, "--github-url="+repoInfo.ServerUrl) } - if repoInfo.repo != "" { - cmd = append(cmd, "--repository="+(repoInfo.owner+"/"+repoInfo.repo)) + if repoInfo.Repo != "" { + cmd = append(cmd, "--repository="+(repoInfo.Owner+"/"+repoInfo.Repo)) } - if repoInfo.ref != "" { - cmd = append(cmd, "--ref="+repoInfo.ref) + if repoInfo.Ref != "" { + cmd = append(cmd, "--ref="+repoInfo.Ref) } - //if no git pramas are passed(commitId, reference, serverUrl, repository), then codeql tries to auto populate it based on git information of the checkout repository. + //if no git params are passed(commitId, reference, serverUrl, repository), then codeql tries to auto populate it based on git information of the checkout repository. //It also depends on the orchestrator. Some orchestrator keep git information and some not. - var buffer bytes.Buffer - utils.Stdout(&buffer) + var bufferOut, bufferErr bytes.Buffer + utils.Stdout(&bufferOut) + defer utils.Stdout(log.Writer()) + utils.Stderr(&bufferErr) + defer utils.Stderr(log.Writer()) + err := execute(utils, cmd, GeneralConfig.Verbose) if err != nil { + e := bufferErr.String() + log.Entry().Error(e) + if strings.Contains(e, "Unauthorized") { + log.Entry().Error("Either your Github Token is invalid or you use both Vault and Jenkins credentials where your Vault credentials are invalid, to use your Jenkins credentials try setting 'skipVault:true'") + } log.Entry().Error("failed to upload sarif results") return "", err } - utils.Stdout(log.Writer()) - url := buffer.String() + url := bufferOut.String() return strings.TrimSpace(url), nil } @@ -263,9 +289,10 @@ func runCodeqlExecuteScan(config *codeqlExecuteScanOptions, telemetryData *telem cmd = append(cmd, getRamAndThreadsFromConfig(config)...) - //codeql has an autobuilder which tries to build the project based on specified programming language if len(config.BuildCommand) > 0 { - cmd = append(cmd, "--command="+config.BuildCommand) + buildCmd := config.BuildCommand + buildCmd = buildCmd + getMavenSettings(config, utils) + cmd = append(cmd, "--command="+buildCmd) } err = execute(utils, cmd, GeneralConfig.Verbose) @@ -303,10 +330,36 @@ func runCodeqlExecuteScan(config *codeqlExecuteScanOptions, telemetryData *telem reports = append(reports, piperutils.Path{Target: filepath.Join(config.ModulePath, "target", "codeqlReport.csv")}) - repoInfo := initGitInfo(config) - repoUrl := fmt.Sprintf("%s/%s/%s", repoInfo.serverUrl, repoInfo.owner, repoInfo.repo) - repoReference, err := buildRepoReference(repoUrl, repoInfo.ref) - repoCodeqlScanUrl := fmt.Sprintf("%s/security/code-scanning?query=is:open+ref:%s", repoUrl, repoInfo.ref) + repoInfo, err := initGitInfo(config) + if err != nil { + return reports, err + } + repoUrl := fmt.Sprintf("%s/%s/%s", repoInfo.ServerUrl, repoInfo.Owner, repoInfo.Repo) + repoReference, err := codeql.BuildRepoReference(repoUrl, repoInfo.Ref) + repoCodeqlScanUrl := fmt.Sprintf("%s/security/code-scanning?query=is:open+ref:%s", repoUrl, repoInfo.Ref) + + if len(config.TargetGithubRepoURL) > 0 { + hasToken, token := getToken(config) + if !hasToken { + return reports, errors.New("failed running upload db sources to GitHub as githubToken was not specified") + } + repoUploader, err := codeql.NewGitUploaderInstance( + token, + repoInfo.Ref, + config.Database, + repoInfo.CommitId, + config.Repository, + config.TargetGithubRepoURL, + ) + if err != nil { + return reports, err + } + targetCommitId, err := repoUploader.UploadProjectToGithub() + if err != nil { + return reports, errors.Wrap(err, "failed uploading db sources from non-GitHub SCM to GitHub") + } + repoInfo.CommitId = targetCommitId + } if !config.UploadResults { log.Entry().Warn("The sarif results will not be uploaded to the repository and compliance report will not be generated as uploadResults is set to false.") @@ -326,8 +379,8 @@ func runCodeqlExecuteScan(config *codeqlExecuteScanOptions, telemetryData *telem return reports, errors.Wrap(err, "failed to upload sarif") } - codeqlScanAuditInstance := codeql.NewCodeqlScanAuditInstance(repoInfo.serverUrl, repoInfo.owner, repoInfo.repo, token, []string{}) - scanResults, err := codeqlScanAuditInstance.GetVulnerabilities(repoInfo.ref) + codeqlScanAuditInstance := codeql.NewCodeqlScanAuditInstance(repoInfo.ServerUrl, repoInfo.Owner, repoInfo.Repo, token, []string{}) + scanResults, err := codeqlScanAuditInstance.GetVulnerabilities(repoInfo.Ref) if err != nil { return reports, errors.Wrap(err, "failed to get scan results") } @@ -343,14 +396,14 @@ func runCodeqlExecuteScan(config *codeqlExecuteScanOptions, telemetryData *telem for _, scanResult := range scanResults { unaudited := scanResult.Total - scanResult.Audited if unaudited > config.VulnerabilityThresholdTotal { - msg := fmt.Sprintf("Your repository %v with ref %v is not compliant. Total unaudited issues are %v which is greater than the VulnerabilityThresholdTotal count %v", repoUrl, repoInfo.ref, unaudited, config.VulnerabilityThresholdTotal) + msg := fmt.Sprintf("Your repository %v with ref %v is not compliant. Total unaudited issues are %v which is greater than the VulnerabilityThresholdTotal count %v", repoUrl, repoInfo.Ref, unaudited, config.VulnerabilityThresholdTotal) return reports, errors.Errorf(msg) } } } } - toolRecordFileName, err := createAndPersistToolRecord(utils, repoInfo, repoReference, repoUrl, repoCodeqlScanUrl) + toolRecordFileName, err := codeql.CreateAndPersistToolRecord(utils, repoInfo, repoReference, repoUrl, config.ModulePath) if err != nil { log.Entry().Warning("TR_CODEQL: Failed to create toolrecord file ...", err) } else { @@ -360,87 +413,6 @@ func runCodeqlExecuteScan(config *codeqlExecuteScanOptions, telemetryData *telem return reports, nil } -func createAndPersistToolRecord(utils codeqlExecuteScanUtils, repoInfo RepoInfo, repoReference string, repoUrl string, repoCodeqlScanUrl string) (string, error) { - toolRecord, err := createToolRecordCodeql(utils, repoInfo, repoReference, repoUrl, repoCodeqlScanUrl) - if err != nil { - return "", err - } - - toolRecordFileName, err := persistToolRecord(toolRecord) - if err != nil { - return "", err - } - - return toolRecordFileName, nil -} - -func createToolRecordCodeql(utils codeqlExecuteScanUtils, repoInfo RepoInfo, repoUrl string, repoReference string, repoCodeqlScanUrl string) (*toolrecord.Toolrecord, error) { - record := toolrecord.New(utils, "./", "codeql", repoInfo.serverUrl) - - if repoInfo.serverUrl == "" { - return record, errors.New("Repository not set") - } - - if repoInfo.commitId == "" || repoInfo.commitId == "NA" { - return record, errors.New("CommitId not set") - } - - if repoInfo.ref == "" { - return record, errors.New("Analyzed Reference not set") - } - - record.DisplayName = fmt.Sprintf("%s %s - %s %s", repoInfo.owner, repoInfo.repo, repoInfo.ref, repoInfo.commitId) - record.DisplayURL = fmt.Sprintf("%s/security/code-scanning?query=is:open+ref:%s", repoUrl, repoInfo.ref) - - err := record.AddKeyData("repository", - fmt.Sprintf("%s/%s", repoInfo.owner, repoInfo.repo), - fmt.Sprintf("%s %s", repoInfo.owner, repoInfo.repo), - repoUrl) - if err != nil { - return record, err - } - - err = record.AddKeyData("repositoryReference", - repoInfo.ref, - fmt.Sprintf("%s - %s", repoInfo.repo, repoInfo.ref), - repoReference) - if err != nil { - return record, err - } - - err = record.AddKeyData("scanResult", - fmt.Sprintf("%s/%s", repoInfo.ref, repoInfo.commitId), - fmt.Sprintf("%s %s - %s %s", repoInfo.owner, repoInfo.repo, repoInfo.ref, repoInfo.commitId), - fmt.Sprintf("%s/security/code-scanning?query=is:open+ref:%s", repoUrl, repoInfo.ref)) - if err != nil { - return record, err - } - - return record, nil -} - -func buildRepoReference(repository, analyzedRef string) (string, error) { - ref := strings.Split(analyzedRef, "/") - if len(ref) < 3 { - return "", errors.New(fmt.Sprintf("Wrong analyzedRef format: %s", analyzedRef)) - } - if strings.Contains(analyzedRef, "pull") { - if len(ref) < 4 { - return "", errors.New(fmt.Sprintf("Wrong analyzedRef format: %s", analyzedRef)) - } - return fmt.Sprintf("%s/pull/%s", repository, ref[2]), nil - } - return fmt.Sprintf("%s/tree/%s", repository, ref[2]), nil -} - -func persistToolRecord(toolRecord *toolrecord.Toolrecord) (string, error) { - err := toolRecord.Persist() - if err != nil { - return "", err - } - return toolRecord.GetFileName(), nil -} - func getRamAndThreadsFromConfig(config *codeqlExecuteScanOptions) []string { params := make([]string, 0, 2) if len(config.Threads) > 0 { @@ -451,3 +423,18 @@ func getRamAndThreadsFromConfig(config *codeqlExecuteScanOptions) []string { } return params } + +func getMavenSettings(config *codeqlExecuteScanOptions, utils codeqlExecuteScanUtils) string { + params := "" + if len(config.BuildCommand) > 0 && config.BuildTool == "maven" && !strings.Contains(config.BuildCommand, "--global-settings") && !strings.Contains(config.BuildCommand, "--settings") { + mvnParams, err := maven.DownloadAndGetMavenParameters(config.GlobalSettingsFile, config.ProjectSettingsFile, utils) + if err != nil { + log.Entry().Error("failed to download and get maven parameters: ", err) + return params + } + for i := 1; i < len(mvnParams); i += 2 { + params = fmt.Sprintf("%s %s=%s", params, mvnParams[i-1], mvnParams[i]) + } + } + return params +} diff --git a/cmd/codeqlExecuteScan_generated.go b/cmd/codeqlExecuteScan_generated.go index afbb1c9a14..d842486ceb 100644 --- a/cmd/codeqlExecuteScan_generated.go +++ b/cmd/codeqlExecuteScan_generated.go @@ -30,6 +30,8 @@ type codeqlExecuteScanOptions struct { UploadResults bool `json:"uploadResults,omitempty"` SarifCheckMaxRetries int `json:"sarifCheckMaxRetries,omitempty"` SarifCheckRetryInterval int `json:"sarifCheckRetryInterval,omitempty"` + TargetGithubRepoURL string `json:"targetGithubRepoURL,omitempty"` + TargetGithubBranchName string `json:"targetGithubBranchName,omitempty"` Threads string `json:"threads,omitempty"` Ram string `json:"ram,omitempty"` AnalyzedRef string `json:"analyzedRef,omitempty"` @@ -37,6 +39,8 @@ type codeqlExecuteScanOptions struct { CommitID string `json:"commitId,omitempty"` VulnerabilityThresholdTotal int `json:"vulnerabilityThresholdTotal,omitempty"` CheckForCompliance bool `json:"checkForCompliance,omitempty"` + ProjectSettingsFile string `json:"projectSettingsFile,omitempty"` + GlobalSettingsFile string `json:"globalSettingsFile,omitempty"` } type codeqlExecuteScanReports struct { @@ -120,7 +124,7 @@ and Java plus Maven.`, log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) @@ -193,6 +197,8 @@ func addCodeqlExecuteScanFlags(cmd *cobra.Command, stepConfig *codeqlExecuteScan cmd.Flags().BoolVar(&stepConfig.UploadResults, "uploadResults", false, "Allows you to upload codeql SARIF results to your github project. You will need to set githubToken for this.") cmd.Flags().IntVar(&stepConfig.SarifCheckMaxRetries, "sarifCheckMaxRetries", 10, "Maximum number of retries when waiting for the server to finish processing the SARIF upload.") cmd.Flags().IntVar(&stepConfig.SarifCheckRetryInterval, "sarifCheckRetryInterval", 30, "Interval in seconds between retries when waiting for the server to finish processing the SARIF upload.") + cmd.Flags().StringVar(&stepConfig.TargetGithubRepoURL, "targetGithubRepoURL", os.Getenv("PIPER_targetGithubRepoURL"), "") + cmd.Flags().StringVar(&stepConfig.TargetGithubBranchName, "targetGithubBranchName", os.Getenv("PIPER_targetGithubBranchName"), "") cmd.Flags().StringVar(&stepConfig.Threads, "threads", `0`, "Use this many threads for the codeql operations.") cmd.Flags().StringVar(&stepConfig.Ram, "ram", os.Getenv("PIPER_ram"), "Use this much ram (MB) for the codeql operations.") cmd.Flags().StringVar(&stepConfig.AnalyzedRef, "analyzedRef", os.Getenv("PIPER_analyzedRef"), "Name of the ref that was analyzed.") @@ -200,6 +206,8 @@ func addCodeqlExecuteScanFlags(cmd *cobra.Command, stepConfig *codeqlExecuteScan cmd.Flags().StringVar(&stepConfig.CommitID, "commitId", os.Getenv("PIPER_commitId"), "SHA of commit that was analyzed.") cmd.Flags().IntVar(&stepConfig.VulnerabilityThresholdTotal, "vulnerabilityThresholdTotal", 0, "Threashold for maximum number of allowed vulnerabilities.") cmd.Flags().BoolVar(&stepConfig.CheckForCompliance, "checkForCompliance", false, "If set to true, the piper step checks for compliance based on vulnerability threadholds. Example - If total vulnerabilites are 10 and vulnerabilityThresholdTotal is set as 0, then the steps throws an compliance error.") + cmd.Flags().StringVar(&stepConfig.ProjectSettingsFile, "projectSettingsFile", os.Getenv("PIPER_projectSettingsFile"), "Path to the mvn settings file that should be used as project settings file.") + cmd.Flags().StringVar(&stepConfig.GlobalSettingsFile, "globalSettingsFile", os.Getenv("PIPER_globalSettingsFile"), "Path to the mvn settings file that should be used as global settings file.") cmd.MarkFlagRequired("buildTool") } @@ -324,6 +332,24 @@ func codeqlExecuteScanMetadata() config.StepData { Aliases: []config.Alias{}, Default: 30, }, + { + Name: "targetGithubRepoURL", + ResourceRef: []config.ResourceReference{}, + Scope: []string{"PARAMETERS", "STAGES", "STEPS"}, + Type: "string", + Mandatory: false, + Aliases: []config.Alias{}, + Default: os.Getenv("PIPER_targetGithubRepoURL"), + }, + { + Name: "targetGithubBranchName", + ResourceRef: []config.ResourceReference{}, + Scope: []string{"PARAMETERS", "STAGES", "STEPS"}, + Type: "string", + Mandatory: false, + Aliases: []config.Alias{}, + Default: os.Getenv("PIPER_targetGithubBranchName"), + }, { Name: "threads", ResourceRef: []config.ResourceReference{}, @@ -402,6 +428,24 @@ func codeqlExecuteScanMetadata() config.StepData { Aliases: []config.Alias{}, Default: false, }, + { + Name: "projectSettingsFile", + ResourceRef: []config.ResourceReference{}, + Scope: []string{"GENERAL", "STEPS", "STAGES", "PARAMETERS"}, + Type: "string", + Mandatory: false, + Aliases: []config.Alias{{Name: "maven/projectSettingsFile"}}, + Default: os.Getenv("PIPER_projectSettingsFile"), + }, + { + Name: "globalSettingsFile", + ResourceRef: []config.ResourceReference{}, + Scope: []string{"GENERAL", "STEPS", "STAGES", "PARAMETERS"}, + Type: "string", + Mandatory: false, + Aliases: []config.Alias{{Name: "maven/globalSettingsFile"}}, + Default: os.Getenv("PIPER_globalSettingsFile"), + }, }, }, Containers: []config.Container{ diff --git a/cmd/codeqlExecuteScan_test.go b/cmd/codeqlExecuteScan_test.go index b0784a5158..28f056b0c7 100644 --- a/cmd/codeqlExecuteScan_test.go +++ b/cmd/codeqlExecuteScan_test.go @@ -4,7 +4,6 @@ package cmd import ( - "fmt" "testing" "time" @@ -18,12 +17,14 @@ import ( type codeqlExecuteScanMockUtils struct { *mock.ExecMockRunner *mock.FilesMock + *mock.HttpClientMock } func newCodeqlExecuteScanTestsUtils() codeqlExecuteScanMockUtils { utils := codeqlExecuteScanMockUtils{ ExecMockRunner: &mock.ExecMockRunner{}, FilesMock: &mock.FilesMock{}, + HttpClientMock: &mock.HttpClientMock{}, } return utils } @@ -75,104 +76,104 @@ func TestRunCodeqlExecuteScan(t *testing.T) { func TestGetGitRepoInfo(t *testing.T) { t.Run("Valid https URL1", func(t *testing.T) { - var repoInfo RepoInfo + var repoInfo codeql.RepoInfo err := getGitRepoInfo("https://github.hello.test/Testing/fortify.git", &repoInfo) assert.NoError(t, err) - assert.Equal(t, "https://github.hello.test", repoInfo.serverUrl) - assert.Equal(t, "fortify", repoInfo.repo) - assert.Equal(t, "Testing", repoInfo.owner) + assert.Equal(t, "https://github.hello.test", repoInfo.ServerUrl) + assert.Equal(t, "fortify", repoInfo.Repo) + assert.Equal(t, "Testing", repoInfo.Owner) }) t.Run("Valid https URL2", func(t *testing.T) { - var repoInfo RepoInfo + var repoInfo codeql.RepoInfo err := getGitRepoInfo("https://github.hello.test/Testing/fortify", &repoInfo) assert.NoError(t, err) - assert.Equal(t, "https://github.hello.test", repoInfo.serverUrl) - assert.Equal(t, "fortify", repoInfo.repo) - assert.Equal(t, "Testing", repoInfo.owner) + assert.Equal(t, "https://github.hello.test", repoInfo.ServerUrl) + assert.Equal(t, "fortify", repoInfo.Repo) + assert.Equal(t, "Testing", repoInfo.Owner) }) t.Run("Valid https URL1 with dots", func(t *testing.T) { - var repoInfo RepoInfo + var repoInfo codeql.RepoInfo err := getGitRepoInfo("https://github.hello.test/Testing/com.sap.fortify.git", &repoInfo) assert.NoError(t, err) - assert.Equal(t, "https://github.hello.test", repoInfo.serverUrl) - assert.Equal(t, "com.sap.fortify", repoInfo.repo) - assert.Equal(t, "Testing", repoInfo.owner) + assert.Equal(t, "https://github.hello.test", repoInfo.ServerUrl) + assert.Equal(t, "com.sap.fortify", repoInfo.Repo) + assert.Equal(t, "Testing", repoInfo.Owner) }) t.Run("Valid https URL2 with dots", func(t *testing.T) { - var repoInfo RepoInfo + var repoInfo codeql.RepoInfo err := getGitRepoInfo("https://github.hello.test/Testing/com.sap.fortify", &repoInfo) assert.NoError(t, err) - assert.Equal(t, "https://github.hello.test", repoInfo.serverUrl) - assert.Equal(t, "com.sap.fortify", repoInfo.repo) - assert.Equal(t, "Testing", repoInfo.owner) + assert.Equal(t, "https://github.hello.test", repoInfo.ServerUrl) + assert.Equal(t, "com.sap.fortify", repoInfo.Repo) + assert.Equal(t, "Testing", repoInfo.Owner) }) t.Run("Valid https URL1 with username and token", func(t *testing.T) { - var repoInfo RepoInfo + var repoInfo codeql.RepoInfo err := getGitRepoInfo("https://username:token@github.hello.test/Testing/fortify.git", &repoInfo) assert.NoError(t, err) - assert.Equal(t, "https://github.hello.test", repoInfo.serverUrl) - assert.Equal(t, "fortify", repoInfo.repo) - assert.Equal(t, "Testing", repoInfo.owner) + assert.Equal(t, "https://github.hello.test", repoInfo.ServerUrl) + assert.Equal(t, "fortify", repoInfo.Repo) + assert.Equal(t, "Testing", repoInfo.Owner) }) t.Run("Valid https URL2 with username and token", func(t *testing.T) { - var repoInfo RepoInfo + var repoInfo codeql.RepoInfo err := getGitRepoInfo("https://username:token@github.hello.test/Testing/fortify", &repoInfo) assert.NoError(t, err) - assert.Equal(t, "https://github.hello.test", repoInfo.serverUrl) - assert.Equal(t, "fortify", repoInfo.repo) - assert.Equal(t, "Testing", repoInfo.owner) + assert.Equal(t, "https://github.hello.test", repoInfo.ServerUrl) + assert.Equal(t, "fortify", repoInfo.Repo) + assert.Equal(t, "Testing", repoInfo.Owner) }) - t.Run("Invalid https URL as no org/owner passed", func(t *testing.T) { - var repoInfo RepoInfo + t.Run("Invalid https URL as no org/Owner passed", func(t *testing.T) { + var repoInfo codeql.RepoInfo assert.Error(t, getGitRepoInfo("https://github.com/fortify", &repoInfo)) }) t.Run("Invalid URL as no protocol passed", func(t *testing.T) { - var repoInfo RepoInfo + var repoInfo codeql.RepoInfo assert.Error(t, getGitRepoInfo("github.hello.test/Testing/fortify", &repoInfo)) }) t.Run("Valid ssh URL1", func(t *testing.T) { - var repoInfo RepoInfo + var repoInfo codeql.RepoInfo err := getGitRepoInfo("git@github.hello.test/Testing/fortify.git", &repoInfo) assert.NoError(t, err) - assert.Equal(t, "https://github.hello.test", repoInfo.serverUrl) - assert.Equal(t, "fortify", repoInfo.repo) - assert.Equal(t, "Testing", repoInfo.owner) + assert.Equal(t, "https://github.hello.test", repoInfo.ServerUrl) + assert.Equal(t, "fortify", repoInfo.Repo) + assert.Equal(t, "Testing", repoInfo.Owner) }) t.Run("Valid ssh URL2", func(t *testing.T) { - var repoInfo RepoInfo + var repoInfo codeql.RepoInfo err := getGitRepoInfo("git@github.hello.test/Testing/fortify", &repoInfo) assert.NoError(t, err) - assert.Equal(t, "https://github.hello.test", repoInfo.serverUrl) - assert.Equal(t, "fortify", repoInfo.repo) - assert.Equal(t, "Testing", repoInfo.owner) + assert.Equal(t, "https://github.hello.test", repoInfo.ServerUrl) + assert.Equal(t, "fortify", repoInfo.Repo) + assert.Equal(t, "Testing", repoInfo.Owner) }) t.Run("Valid ssh URL1 with dots", func(t *testing.T) { - var repoInfo RepoInfo + var repoInfo codeql.RepoInfo err := getGitRepoInfo("git@github.hello.test/Testing/com.sap.fortify.git", &repoInfo) assert.NoError(t, err) - assert.Equal(t, "https://github.hello.test", repoInfo.serverUrl) - assert.Equal(t, "com.sap.fortify", repoInfo.repo) - assert.Equal(t, "Testing", repoInfo.owner) + assert.Equal(t, "https://github.hello.test", repoInfo.ServerUrl) + assert.Equal(t, "com.sap.fortify", repoInfo.Repo) + assert.Equal(t, "Testing", repoInfo.Owner) }) t.Run("Valid ssh URL2 with dots", func(t *testing.T) { - var repoInfo RepoInfo + var repoInfo codeql.RepoInfo err := getGitRepoInfo("git@github.hello.test/Testing/com.sap.fortify", &repoInfo) assert.NoError(t, err) - assert.Equal(t, "https://github.hello.test", repoInfo.serverUrl) - assert.Equal(t, "com.sap.fortify", repoInfo.repo) - assert.Equal(t, "Testing", repoInfo.owner) + assert.Equal(t, "https://github.hello.test", repoInfo.ServerUrl) + assert.Equal(t, "com.sap.fortify", repoInfo.Repo) + assert.Equal(t, "Testing", repoInfo.Owner) }) - t.Run("Invalid ssh URL as no org/owner passed", func(t *testing.T) { - var repoInfo RepoInfo + t.Run("Invalid ssh URL as no org/Owner passed", func(t *testing.T) { + var repoInfo codeql.RepoInfo assert.Error(t, getGitRepoInfo("git@github.com/fortify", &repoInfo)) }) } @@ -180,163 +181,85 @@ func TestGetGitRepoInfo(t *testing.T) { func TestInitGitInfo(t *testing.T) { t.Run("Valid URL1", func(t *testing.T) { config := codeqlExecuteScanOptions{Repository: "https://github.hello.test/Testing/codeql.git", AnalyzedRef: "refs/head/branch", CommitID: "abcd1234"} - repoInfo := initGitInfo(&config) - assert.Equal(t, "abcd1234", repoInfo.commitId) - assert.Equal(t, "Testing", repoInfo.owner) - assert.Equal(t, "codeql", repoInfo.repo) - assert.Equal(t, "refs/head/branch", repoInfo.ref) - assert.Equal(t, "https://github.hello.test", repoInfo.serverUrl) + repoInfo, err := initGitInfo(&config) + assert.NoError(t, err) + assert.Equal(t, "abcd1234", repoInfo.CommitId) + assert.Equal(t, "Testing", repoInfo.Owner) + assert.Equal(t, "codeql", repoInfo.Repo) + assert.Equal(t, "refs/head/branch", repoInfo.Ref) + assert.Equal(t, "https://github.hello.test", repoInfo.ServerUrl) }) t.Run("Valid URL2", func(t *testing.T) { config := codeqlExecuteScanOptions{Repository: "https://github.hello.test/Testing/codeql", AnalyzedRef: "refs/head/branch", CommitID: "abcd1234"} - repoInfo := initGitInfo(&config) - assert.Equal(t, "abcd1234", repoInfo.commitId) - assert.Equal(t, "Testing", repoInfo.owner) - assert.Equal(t, "codeql", repoInfo.repo) - assert.Equal(t, "refs/head/branch", repoInfo.ref) - assert.Equal(t, "https://github.hello.test", repoInfo.serverUrl) + repoInfo, err := initGitInfo(&config) + assert.NoError(t, err) + assert.Equal(t, "abcd1234", repoInfo.CommitId) + assert.Equal(t, "Testing", repoInfo.Owner) + assert.Equal(t, "codeql", repoInfo.Repo) + assert.Equal(t, "refs/head/branch", repoInfo.Ref) + assert.Equal(t, "https://github.hello.test", repoInfo.ServerUrl) }) t.Run("Valid url with dots URL1", func(t *testing.T) { config := codeqlExecuteScanOptions{Repository: "https://github.hello.test/Testing/com.sap.codeql.git", AnalyzedRef: "refs/head/branch", CommitID: "abcd1234"} - repoInfo := initGitInfo(&config) - assert.Equal(t, "abcd1234", repoInfo.commitId) - assert.Equal(t, "Testing", repoInfo.owner) - assert.Equal(t, "com.sap.codeql", repoInfo.repo) - assert.Equal(t, "refs/head/branch", repoInfo.ref) - assert.Equal(t, "https://github.hello.test", repoInfo.serverUrl) + repoInfo, err := initGitInfo(&config) + assert.NoError(t, err) + assert.Equal(t, "abcd1234", repoInfo.CommitId) + assert.Equal(t, "Testing", repoInfo.Owner) + assert.Equal(t, "com.sap.codeql", repoInfo.Repo) + assert.Equal(t, "refs/head/branch", repoInfo.Ref) + assert.Equal(t, "https://github.hello.test", repoInfo.ServerUrl) }) t.Run("Valid url with dots URL2", func(t *testing.T) { config := codeqlExecuteScanOptions{Repository: "https://github.hello.test/Testing/com.sap.codeql", AnalyzedRef: "refs/head/branch", CommitID: "abcd1234"} - repoInfo := initGitInfo(&config) - assert.Equal(t, "abcd1234", repoInfo.commitId) - assert.Equal(t, "Testing", repoInfo.owner) - assert.Equal(t, "com.sap.codeql", repoInfo.repo) - assert.Equal(t, "refs/head/branch", repoInfo.ref) - assert.Equal(t, "https://github.hello.test", repoInfo.serverUrl) + repoInfo, err := initGitInfo(&config) + assert.NoError(t, err) + assert.Equal(t, "abcd1234", repoInfo.CommitId) + assert.Equal(t, "Testing", repoInfo.Owner) + assert.Equal(t, "com.sap.codeql", repoInfo.Repo) + assert.Equal(t, "refs/head/branch", repoInfo.Ref) + assert.Equal(t, "https://github.hello.test", repoInfo.ServerUrl) }) t.Run("Valid url with username and token URL1", func(t *testing.T) { config := codeqlExecuteScanOptions{Repository: "https://username:token@github.hello.test/Testing/codeql.git", AnalyzedRef: "refs/head/branch", CommitID: "abcd1234"} - repoInfo := initGitInfo(&config) - assert.Equal(t, "abcd1234", repoInfo.commitId) - assert.Equal(t, "Testing", repoInfo.owner) - assert.Equal(t, "codeql", repoInfo.repo) - assert.Equal(t, "refs/head/branch", repoInfo.ref) - assert.Equal(t, "https://github.hello.test", repoInfo.serverUrl) + repoInfo, err := initGitInfo(&config) + assert.NoError(t, err) + assert.Equal(t, "abcd1234", repoInfo.CommitId) + assert.Equal(t, "Testing", repoInfo.Owner) + assert.Equal(t, "codeql", repoInfo.Repo) + assert.Equal(t, "refs/head/branch", repoInfo.Ref) + assert.Equal(t, "https://github.hello.test", repoInfo.ServerUrl) }) t.Run("Valid url with username and token URL2", func(t *testing.T) { config := codeqlExecuteScanOptions{Repository: "https://username:token@github.hello.test/Testing/codeql", AnalyzedRef: "refs/head/branch", CommitID: "abcd1234"} - repoInfo := initGitInfo(&config) - assert.Equal(t, "abcd1234", repoInfo.commitId) - assert.Equal(t, "Testing", repoInfo.owner) - assert.Equal(t, "codeql", repoInfo.repo) - assert.Equal(t, "refs/head/branch", repoInfo.ref) - assert.Equal(t, "https://github.hello.test", repoInfo.serverUrl) + repoInfo, err := initGitInfo(&config) + assert.NoError(t, err) + assert.Equal(t, "abcd1234", repoInfo.CommitId) + assert.Equal(t, "Testing", repoInfo.Owner) + assert.Equal(t, "codeql", repoInfo.Repo) + assert.Equal(t, "refs/head/branch", repoInfo.Ref) + assert.Equal(t, "https://github.hello.test", repoInfo.ServerUrl) }) t.Run("Invalid URL with no org/reponame", func(t *testing.T) { config := codeqlExecuteScanOptions{Repository: "https://github.hello.test", AnalyzedRef: "refs/head/branch", CommitID: "abcd1234"} - repoInfo := initGitInfo(&config) - _, err := orchestrator.NewOrchestratorSpecificConfigProvider() - assert.Equal(t, "abcd1234", repoInfo.commitId) - assert.Equal(t, "refs/head/branch", repoInfo.ref) + repoInfo, err := initGitInfo(&config) + assert.NoError(t, err) + _, err = orchestrator.NewOrchestratorSpecificConfigProvider() + assert.Equal(t, "abcd1234", repoInfo.CommitId) + assert.Equal(t, "refs/head/branch", repoInfo.Ref) if err != nil { - assert.Equal(t, "", repoInfo.owner) - assert.Equal(t, "", repoInfo.repo) - assert.Equal(t, "", repoInfo.serverUrl) + assert.Equal(t, "", repoInfo.Owner) + assert.Equal(t, "", repoInfo.Repo) + assert.Equal(t, "", repoInfo.ServerUrl) } }) } -func TestBuildRepoReference(t *testing.T) { - t.Run("Valid ref with branch", func(t *testing.T) { - repository := "https://github.hello.test/Testing/fortify" - analyzedRef := "refs/head/branch" - ref, err := buildRepoReference(repository, analyzedRef) - assert.NoError(t, err) - assert.Equal(t, "https://github.hello.test/Testing/fortify/tree/branch", ref) - }) - t.Run("Valid ref with PR", func(t *testing.T) { - repository := "https://github.hello.test/Testing/fortify" - analyzedRef := "refs/pull/1/merge" - ref, err := buildRepoReference(repository, analyzedRef) - assert.NoError(t, err) - assert.Equal(t, "https://github.hello.test/Testing/fortify/pull/1", ref) - }) - t.Run("Invalid ref without branch name", func(t *testing.T) { - repository := "https://github.hello.test/Testing/fortify" - analyzedRef := "refs/head" - ref, err := buildRepoReference(repository, analyzedRef) - assert.Error(t, err) - assert.ErrorContains(t, err, "Wrong analyzedRef format") - assert.Equal(t, "", ref) - }) - t.Run("Invalid ref without PR id", func(t *testing.T) { - repository := "https://github.hello.test/Testing/fortify" - analyzedRef := "refs/pull/merge" - ref, err := buildRepoReference(repository, analyzedRef) - assert.Error(t, err) - assert.ErrorContains(t, err, "Wrong analyzedRef format") - assert.Equal(t, "", ref) - }) -} - -func getRepoReferences(repoInfo RepoInfo) (string, string, string) { - repoUrl := fmt.Sprintf("%s/%s/%s", repoInfo.serverUrl, repoInfo.owner, repoInfo.repo) - repoReference, _ := buildRepoReference(repoUrl, repoInfo.ref) - repoCodeqlScanUrl := fmt.Sprintf("%s/security/code-scanning?query=is:open+ref:%s", repoUrl, repoInfo.ref) - return repoUrl, repoReference, repoCodeqlScanUrl -} -func TestCreateToolRecordCodeql(t *testing.T) { - t.Run("Valid toolrun file", func(t *testing.T) { - repoInfo := RepoInfo{serverUrl: "https://github.hello.test", commitId: "test", ref: "refs/head/branch", owner: "Testing", repo: "fortify"} - repoUrl, repoReference, repoCodeqlScanUrl := getRepoReferences(repoInfo) - toolRecord, err := createToolRecordCodeql(newCodeqlExecuteScanTestsUtils(), repoInfo, repoUrl, repoReference, repoCodeqlScanUrl) - assert.NoError(t, err) - assert.Equal(t, toolRecord.ToolName, "codeql") - assert.Equal(t, toolRecord.ToolInstance, "https://github.hello.test") - assert.Equal(t, toolRecord.DisplayName, "Testing fortify - refs/head/branch test") - assert.Equal(t, toolRecord.DisplayURL, "https://github.hello.test/Testing/fortify/security/code-scanning?query=is:open+ref:refs/head/branch") - }) - t.Run("Empty repository URL", func(t *testing.T) { - repoInfo := RepoInfo{serverUrl: "", commitId: "test", ref: "refs/head/branch", owner: "Testing", repo: "fortify"} - repoUrl, repoReference, repoCodeqlScanUrl := getRepoReferences(repoInfo) - _, err := createToolRecordCodeql(newCodeqlExecuteScanTestsUtils(), repoInfo, repoUrl, repoReference, repoCodeqlScanUrl) - - assert.Error(t, err) - assert.ErrorContains(t, err, "Repository not set") - }) - - t.Run("Empty analyzedRef", func(t *testing.T) { - repoInfo := RepoInfo{serverUrl: "https://github.hello.test", commitId: "test", ref: "", owner: "Testing", repo: "fortify"} - repoUrl, repoReference, repoCodeqlScanUrl := getRepoReferences(repoInfo) - _, err := createToolRecordCodeql(newCodeqlExecuteScanTestsUtils(), repoInfo, repoUrl, repoReference, repoCodeqlScanUrl) - - assert.Error(t, err) - assert.ErrorContains(t, err, "Analyzed Reference not set") - }) - - t.Run("Empty CommitId", func(t *testing.T) { - repoInfo := RepoInfo{serverUrl: "https://github.hello.test", commitId: "", ref: "refs/head/branch", owner: "Testing", repo: "fortify"} - repoUrl, repoReference, repoCodeqlScanUrl := getRepoReferences(repoInfo) - _, err := createToolRecordCodeql(newCodeqlExecuteScanTestsUtils(), repoInfo, repoUrl, repoReference, repoCodeqlScanUrl) - - assert.Error(t, err) - assert.ErrorContains(t, err, "CommitId not set") - }) - t.Run("Invalid analyzedRef", func(t *testing.T) { - repoInfo := RepoInfo{serverUrl: "https://github.hello.test", commitId: "", ref: "refs/branch", owner: "Testing", repo: "fortify"} - repoUrl, repoReference, repoCodeqlScanUrl := getRepoReferences(repoInfo) - _, err := createToolRecordCodeql(newCodeqlExecuteScanTestsUtils(), repoInfo, repoUrl, repoReference, repoCodeqlScanUrl) - - assert.Error(t, err) - }) -} - func TestWaitSarifUploaded(t *testing.T) { t.Parallel() config := codeqlExecuteScanOptions{SarifCheckRetryInterval: 1, SarifCheckMaxRetries: 5} @@ -379,6 +302,105 @@ func TestWaitSarifUploaded(t *testing.T) { }) } +func TestGetMavenSettings(t *testing.T) { + t.Parallel() + t.Run("No maven", func(t *testing.T) { + config := codeqlExecuteScanOptions{BuildTool: "npm"} + params := getMavenSettings(&config, newCodeqlExecuteScanTestsUtils()) + assert.Equal(t, "", params) + }) + + t.Run("No build command", func(t *testing.T) { + config := codeqlExecuteScanOptions{BuildTool: "maven"} + params := getMavenSettings(&config, newCodeqlExecuteScanTestsUtils()) + assert.Equal(t, "", params) + }) + + t.Run("Project Settings file", func(t *testing.T) { + config := codeqlExecuteScanOptions{BuildTool: "maven", BuildCommand: "mvn clean install", ProjectSettingsFile: "test.xml"} + params := getMavenSettings(&config, newCodeqlExecuteScanTestsUtils()) + assert.Equal(t, " --settings=test.xml", params) + }) + + t.Run("Skip Project Settings file incase already used", func(t *testing.T) { + config := codeqlExecuteScanOptions{BuildTool: "maven", BuildCommand: "mvn clean install --settings=project.xml", ProjectSettingsFile: "test.xml"} + params := getMavenSettings(&config, newCodeqlExecuteScanTestsUtils()) + assert.Equal(t, "", params) + }) + + t.Run("Global Settings file", func(t *testing.T) { + config := codeqlExecuteScanOptions{BuildTool: "maven", BuildCommand: "mvn clean install", GlobalSettingsFile: "gloabl.xml"} + params := getMavenSettings(&config, newCodeqlExecuteScanTestsUtils()) + assert.Equal(t, " --global-settings=gloabl.xml", params) + }) + + t.Run("Project and Global Settings file", func(t *testing.T) { + config := codeqlExecuteScanOptions{BuildTool: "maven", BuildCommand: "mvn clean install", ProjectSettingsFile: "test.xml", GlobalSettingsFile: "global.xml"} + params := getMavenSettings(&config, newCodeqlExecuteScanTestsUtils()) + assert.Equal(t, " --global-settings=global.xml --settings=test.xml", params) + }) + + t.Run("ProjectSettingsFile https url", func(t *testing.T) { + config := codeqlExecuteScanOptions{BuildTool: "maven", BuildCommand: "mvn clean install", ProjectSettingsFile: "https://jenkins-sap-test.com/test.xml"} + params := getMavenSettings(&config, newCodeqlExecuteScanTestsUtils()) + assert.Equal(t, " --settings=.pipeline/mavenProjectSettings.xml", params) + }) + + t.Run("ProjectSettingsFile http url", func(t *testing.T) { + config := codeqlExecuteScanOptions{BuildTool: "maven", BuildCommand: "mvn clean install", ProjectSettingsFile: "http://jenkins-sap-test.com/test.xml"} + params := getMavenSettings(&config, newCodeqlExecuteScanTestsUtils()) + assert.Equal(t, " --settings=.pipeline/mavenProjectSettings.xml", params) + }) + + t.Run("GlobalSettingsFile https url", func(t *testing.T) { + config := codeqlExecuteScanOptions{BuildTool: "maven", BuildCommand: "mvn clean install", GlobalSettingsFile: "https://jenkins-sap-test.com/test.xml"} + params := getMavenSettings(&config, newCodeqlExecuteScanTestsUtils()) + assert.Equal(t, " --global-settings=.pipeline/mavenGlobalSettings.xml", params) + }) + + t.Run("GlobalSettingsFile http url", func(t *testing.T) { + config := codeqlExecuteScanOptions{BuildTool: "maven", BuildCommand: "mvn clean install", GlobalSettingsFile: "http://jenkins-sap-test.com/test.xml"} + params := getMavenSettings(&config, newCodeqlExecuteScanTestsUtils()) + assert.Equal(t, " --global-settings=.pipeline/mavenGlobalSettings.xml", params) + }) + + t.Run("ProjectSettingsFile and GlobalSettingsFile https url", func(t *testing.T) { + config := codeqlExecuteScanOptions{BuildTool: "maven", BuildCommand: "mvn clean install", GlobalSettingsFile: "https://jenkins-sap-test.com/test.xml", ProjectSettingsFile: "http://jenkins-sap-test.com/test.xml"} + params := getMavenSettings(&config, newCodeqlExecuteScanTestsUtils()) + assert.Equal(t, " --global-settings=.pipeline/mavenGlobalSettings.xml --settings=.pipeline/mavenProjectSettings.xml", params) + }) + + t.Run("ProjectSettingsFile and GlobalSettingsFile http url", func(t *testing.T) { + config := codeqlExecuteScanOptions{BuildTool: "maven", BuildCommand: "mvn clean install", GlobalSettingsFile: "http://jenkins-sap-test.com/test.xml", ProjectSettingsFile: "http://jenkins-sap-test.com/test.xml"} + params := getMavenSettings(&config, newCodeqlExecuteScanTestsUtils()) + assert.Equal(t, " --global-settings=.pipeline/mavenGlobalSettings.xml --settings=.pipeline/mavenProjectSettings.xml", params) + }) + + t.Run("ProjectSettingsFile file and GlobalSettingsFile https url", func(t *testing.T) { + config := codeqlExecuteScanOptions{BuildTool: "maven", BuildCommand: "mvn clean install", GlobalSettingsFile: "https://jenkins-sap-test.com/test.xml", ProjectSettingsFile: "test.xml"} + params := getMavenSettings(&config, newCodeqlExecuteScanTestsUtils()) + assert.Equal(t, " --global-settings=.pipeline/mavenGlobalSettings.xml --settings=test.xml", params) + }) + + t.Run("ProjectSettingsFile file and GlobalSettingsFile https url", func(t *testing.T) { + config := codeqlExecuteScanOptions{BuildTool: "maven", BuildCommand: "mvn clean install", GlobalSettingsFile: "http://jenkins-sap-test.com/test.xml", ProjectSettingsFile: "test.xml"} + params := getMavenSettings(&config, newCodeqlExecuteScanTestsUtils()) + assert.Equal(t, " --global-settings=.pipeline/mavenGlobalSettings.xml --settings=test.xml", params) + }) + + t.Run("ProjectSettingsFile https url and GlobalSettingsFile file", func(t *testing.T) { + config := codeqlExecuteScanOptions{BuildTool: "maven", BuildCommand: "mvn clean install", GlobalSettingsFile: "global.xml", ProjectSettingsFile: "http://jenkins-sap-test.com/test.xml"} + params := getMavenSettings(&config, newCodeqlExecuteScanTestsUtils()) + assert.Equal(t, " --global-settings=global.xml --settings=.pipeline/mavenProjectSettings.xml", params) + }) + + t.Run("ProjectSettingsFile http url and GlobalSettingsFile file", func(t *testing.T) { + config := codeqlExecuteScanOptions{BuildTool: "maven", BuildCommand: "mvn clean install", GlobalSettingsFile: "global.xml", ProjectSettingsFile: "http://jenkins-sap-test.com/test.xml"} + params := getMavenSettings(&config, newCodeqlExecuteScanTestsUtils()) + assert.Equal(t, " --global-settings=global.xml --settings=.pipeline/mavenProjectSettings.xml", params) + }) +} + type CodeqlSarifUploaderMock struct { counter int } diff --git a/cmd/containerExecuteStructureTests_generated.go b/cmd/containerExecuteStructureTests_generated.go index 1db69d787d..d43dfa36f2 100644 --- a/cmd/containerExecuteStructureTests_generated.go +++ b/cmd/containerExecuteStructureTests_generated.go @@ -64,7 +64,7 @@ func ContainerExecuteStructureTestsCommand() *cobra.Command { log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/containerSaveImage_generated.go b/cmd/containerSaveImage_generated.go index 43d58d50df..010cd5bdc5 100644 --- a/cmd/containerSaveImage_generated.go +++ b/cmd/containerSaveImage_generated.go @@ -67,7 +67,7 @@ It can be used no matter if a Docker daemon is available or not. It will also wo log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/credentialdiggerScan_generated.go b/cmd/credentialdiggerScan_generated.go index a4667e8336..518eb8b657 100644 --- a/cmd/credentialdiggerScan_generated.go +++ b/cmd/credentialdiggerScan_generated.go @@ -69,7 +69,7 @@ It supports several scan flavors, i.e., full scans of a repo, scan of a snapshot log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/detectExecuteScan.go b/cmd/detectExecuteScan.go index 5d04842924..7c77192df3 100644 --- a/cmd/detectExecuteScan.go +++ b/cmd/detectExecuteScan.go @@ -15,6 +15,7 @@ import ( bd "github.com/SAP/jenkins-library/pkg/blackduck" "github.com/SAP/jenkins-library/pkg/command" piperGithub "github.com/SAP/jenkins-library/pkg/github" + "github.com/SAP/jenkins-library/pkg/golang" piperhttp "github.com/SAP/jenkins-library/pkg/http" "github.com/SAP/jenkins-library/pkg/log" "github.com/SAP/jenkins-library/pkg/maven" @@ -138,6 +139,14 @@ func detectExecuteScan(config detectExecuteScanOptions, _ *telemetry.CustomData, if err != nil { log.Entry().WithError(err).Warning("Failed to get GitHub client") } + + if config.PrivateModules != "" && config.PrivateModulesGitToken != "" { + //configuring go private packages + if err := golang.PrepareGolangPrivatePackages("detectExecuteStep", config.PrivateModules, config.PrivateModulesGitToken); err != nil { + log.Entry().Warningf("couldn't set private packages for golang, error: %s", err.Error()) + } + } + utils := newDetectUtils(client) if err := runDetect(ctx, config, utils, influx); err != nil { log.Entry(). diff --git a/cmd/detectExecuteScan_generated.go b/cmd/detectExecuteScan_generated.go index f4cb28a62f..3f70196b7d 100644 --- a/cmd/detectExecuteScan_generated.go +++ b/cmd/detectExecuteScan_generated.go @@ -62,6 +62,8 @@ type detectExecuteScanOptions struct { ExcludedDirectories []string `json:"excludedDirectories,omitempty"` NpmDependencyTypesExcluded []string `json:"npmDependencyTypesExcluded,omitempty" validate:"possible-values=NONE DEV PEER"` NpmArguments []string `json:"npmArguments,omitempty"` + PrivateModules string `json:"privateModules,omitempty"` + PrivateModulesGitToken string `json:"privateModulesGitToken,omitempty"` } type detectExecuteScanInflux struct { @@ -195,13 +197,14 @@ Please configure your BlackDuck server Url using the serverUrl parameter and the } log.RegisterSecret(stepConfig.Token) log.RegisterSecret(stepConfig.GithubToken) + log.RegisterSecret(stepConfig.PrivateModulesGitToken) if len(GeneralConfig.HookConfig.SentryConfig.Dsn) > 0 { sentryHook := log.NewSentryHook(GeneralConfig.HookConfig.SentryConfig.Dsn, GeneralConfig.CorrelationID) log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) @@ -305,6 +308,8 @@ func addDetectExecuteScanFlags(cmd *cobra.Command, stepConfig *detectExecuteScan cmd.Flags().StringSliceVar(&stepConfig.ExcludedDirectories, "excludedDirectories", []string{}, "List of directories which should be excluded from the scan.") cmd.Flags().StringSliceVar(&stepConfig.NpmDependencyTypesExcluded, "npmDependencyTypesExcluded", []string{}, "List of npm dependency types which Detect should exclude from the BOM.") cmd.Flags().StringSliceVar(&stepConfig.NpmArguments, "npmArguments", []string{}, "List of additional arguments that Detect will add at then end of the npm ls command line when Detect executes the NPM CLI Detector on an NPM project.") + cmd.Flags().StringVar(&stepConfig.PrivateModules, "privateModules", os.Getenv("PIPER_privateModules"), "Tells go which modules shall be considered to be private (by setting [GOPRIVATE](https://pkg.go.dev/cmd/go#hdr-Configuration_for_downloading_non_public_code)).") + cmd.Flags().StringVar(&stepConfig.PrivateModulesGitToken, "privateModulesGitToken", os.Getenv("PIPER_privateModulesGitToken"), "GitHub personal access token as per https://help.github.com/en/github/authenticating-to-github/creating-a-personal-access-token-for-the-command-line.") cmd.MarkFlagRequired("token") cmd.MarkFlagRequired("projectName") @@ -324,6 +329,7 @@ func detectExecuteScanMetadata() config.StepData { Secrets: []config.StepSecrets{ {Name: "detectTokenCredentialsId", Description: "Jenkins 'Secret text' credentials ID containing the API token used to authenticate with the Synopsis Detect (formerly BlackDuck) Server.", Type: "jenkins", Aliases: []config.Alias{{Name: "apiTokenCredentialsId", Deprecated: false}}}, {Name: "githubTokenCredentialsId", Description: "Jenkins 'Secret text' credentials ID containing token to authenticate to GitHub.", Type: "jenkins"}, + {Name: "golangPrivateModulesGitTokenCredentialsId", Description: "Jenkins 'Username with password' credentials ID containing username/password for http access to your git repos where your go private modules are stored.", Type: "jenkins"}, }, Resources: []config.StepResources{ {Name: "buildDescriptor", Type: "stash"}, @@ -419,7 +425,7 @@ func detectExecuteScanMetadata() config.StepData { Scope: []string{"PARAMETERS", "STAGES", "STEPS"}, Type: "string", Mandatory: true, - Aliases: []config.Alias{{Name: "detect/serverUrl"}}, + Aliases: []config.Alias{{Name: "detect/serverUrl"}, {Name: "detectServerUrl"}}, Default: os.Getenv("PIPER_serverUrl"), }, { @@ -737,6 +743,36 @@ func detectExecuteScanMetadata() config.StepData { Aliases: []config.Alias{{Name: "detect/npmArguments"}}, Default: []string{}, }, + { + Name: "privateModules", + ResourceRef: []config.ResourceReference{}, + Scope: []string{"GENERAL", "STEPS", "STAGES", "PARAMETERS"}, + Type: "string", + Mandatory: false, + Aliases: []config.Alias{}, + Default: os.Getenv("PIPER_privateModules"), + }, + { + Name: "privateModulesGitToken", + ResourceRef: []config.ResourceReference{ + { + Name: "golangPrivateModulesGitTokenCredentialsId", + Param: "password", + Type: "secret", + }, + + { + Name: "golangPrivateModulesGitTokenVaultSecret", + Type: "vaultSecret", + Default: "golang", + }, + }, + Scope: []string{"GENERAL", "PARAMETERS", "STAGES", "STEPS"}, + Type: "string", + Mandatory: false, + Aliases: []config.Alias{}, + Default: os.Getenv("PIPER_privateModulesGitToken"), + }, }, }, Containers: []config.Container{ diff --git a/cmd/fortifyExecuteScan_generated.go b/cmd/fortifyExecuteScan_generated.go index 0057bbaf52..4488e5e844 100644 --- a/cmd/fortifyExecuteScan_generated.go +++ b/cmd/fortifyExecuteScan_generated.go @@ -247,7 +247,7 @@ Besides triggering a scan the step verifies the results after they have been upl log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/gaugeExecuteTests_generated.go b/cmd/gaugeExecuteTests_generated.go index 91d93faffa..492d9f2254 100644 --- a/cmd/gaugeExecuteTests_generated.go +++ b/cmd/gaugeExecuteTests_generated.go @@ -148,7 +148,7 @@ You can use the [sample projects](https://github.com/getgauge/gauge-mvn-archetyp log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/gctsCloneRepository_generated.go b/cmd/gctsCloneRepository_generated.go index dc5f34540c..6105a6203a 100644 --- a/cmd/gctsCloneRepository_generated.go +++ b/cmd/gctsCloneRepository_generated.go @@ -64,7 +64,7 @@ func GctsCloneRepositoryCommand() *cobra.Command { log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/gctsCreateRepository_generated.go b/cmd/gctsCreateRepository_generated.go index 4130ef9984..ec858932b0 100644 --- a/cmd/gctsCreateRepository_generated.go +++ b/cmd/gctsCreateRepository_generated.go @@ -68,7 +68,7 @@ func GctsCreateRepositoryCommand() *cobra.Command { log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/gctsDeploy_generated.go b/cmd/gctsDeploy_generated.go index 9dec729a55..1f0dac8132 100644 --- a/cmd/gctsDeploy_generated.go +++ b/cmd/gctsDeploy_generated.go @@ -77,7 +77,7 @@ You can use this step for gCTS as of SAP S/4HANA 2020.`, log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/gctsExecuteABAPQualityChecks.go b/cmd/gctsExecuteABAPQualityChecks.go index 5f7e3581b2..30d190655b 100644 --- a/cmd/gctsExecuteABAPQualityChecks.go +++ b/cmd/gctsExecuteABAPQualityChecks.go @@ -518,9 +518,9 @@ func executeAUnitTest(config *gctsExecuteABAPQualityChecksOptions, client piperh switch object.Type { case "CLAS": - innerXml = innerXml + `` + innerXml = innerXml + `` case "DEVC": - innerXml = innerXml + `` + innerXml = innerXml + `` } @@ -757,7 +757,7 @@ func executeATCCheck(config *gctsExecuteABAPQualityChecksOptions, client piperht switch object.Type { case "CLAS": - innerXml = innerXml + `` + innerXml = innerXml + `` case "INTF": innerXml = innerXml + `` case "DEVC": diff --git a/cmd/gctsExecuteABAPQualityChecks_generated.go b/cmd/gctsExecuteABAPQualityChecks_generated.go index 75f6853f16..d3cc0b0c71 100644 --- a/cmd/gctsExecuteABAPQualityChecks_generated.go +++ b/cmd/gctsExecuteABAPQualityChecks_generated.go @@ -79,7 +79,7 @@ You can use this step as of SAP S/4HANA 2020 with SAP Note [3159798](https://lau log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/gctsExecuteABAPUnitTests_generated.go b/cmd/gctsExecuteABAPUnitTests_generated.go index 5b8a299f6a..5f9a9c37b0 100644 --- a/cmd/gctsExecuteABAPUnitTests_generated.go +++ b/cmd/gctsExecuteABAPUnitTests_generated.go @@ -72,7 +72,7 @@ func GctsExecuteABAPUnitTestsCommand() *cobra.Command { log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/gctsRollback_generated.go b/cmd/gctsRollback_generated.go index 43f5c3ccb7..4392548450 100644 --- a/cmd/gctsRollback_generated.go +++ b/cmd/gctsRollback_generated.go @@ -69,7 +69,7 @@ If no ` + "`" + `commit` + "`" + ` parameter is specified and the remote reposit log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/getConfig.go b/cmd/getConfig.go index 6ea45de881..aa4734a75c 100644 --- a/cmd/getConfig.go +++ b/cmd/getConfig.go @@ -6,6 +6,7 @@ import ( "os" "path" "path/filepath" + "strings" "github.com/SAP/jenkins-library/pkg/config" "github.com/SAP/jenkins-library/pkg/log" @@ -16,19 +17,31 @@ import ( "github.com/spf13/cobra" ) -type configCommandOptions struct { - output string // output format, so far only JSON - outputFile string // if set: path to file where the output should be written to - parametersJSON string // parameters to be considered in JSON format - stageConfig bool - stageConfigAcceptedParameters []string - stepMetadata string // metadata to be considered, can be filePath or ENV containing JSON in format 'ENV:MY_ENV_VAR' - stepName string - contextConfig bool - openFile func(s string, t map[string]string) (io.ReadCloser, error) +type ConfigCommandOptions struct { + Output string // output format, so far only JSON, YAML + OutputFile string // if set: path to file where the output should be written to + ParametersJSON string // parameters to be considered in JSON format + StageConfig bool + StageConfigAcceptedParameters []string + StepMetadata string // metadata to be considered, can be filePath or ENV containing JSON in format 'ENV:MY_ENV_VAR' + StepName string + ContextConfig bool + OpenFile func(s string, t map[string]string) (io.ReadCloser, error) } -var configOptions configCommandOptions +var configOptions ConfigCommandOptions + +func SetConfigOptions(c ConfigCommandOptions) { + configOptions.ContextConfig = c.ContextConfig + configOptions.OpenFile = c.OpenFile + configOptions.Output = c.Output + configOptions.OutputFile = c.OutputFile + configOptions.ParametersJSON = c.ParametersJSON + configOptions.StageConfig = c.StageConfig + configOptions.StageConfigAcceptedParameters = c.StageConfigAcceptedParameters + configOptions.StepMetadata = c.StepMetadata + configOptions.StepName = c.StepName +} type getConfigUtils interface { FileExists(filename string) (bool, error) @@ -41,16 +54,17 @@ type getConfigUtilsBundle struct { } func newGetConfigUtilsUtils() getConfigUtils { - utils := getConfigUtilsBundle{ + return &getConfigUtilsBundle{ Files: &piperutils.Files{}, } - return &utils } // ConfigCommand is the entry command for loading the configuration of a pipeline step func ConfigCommand() *cobra.Command { + SetConfigOptions(ConfigCommandOptions{ + OpenFile: config.OpenPiperFile, + }) - configOptions.openFile = config.OpenPiperFile var createConfigCmd = &cobra.Command{ Use: "getConfig", Short: "Loads the project 'Piper' configuration respecting defaults and parameters.", @@ -62,9 +76,7 @@ func ConfigCommand() *cobra.Command { GeneralConfig.GitHubAccessTokens = ResolveAccessTokens(GeneralConfig.GitHubTokens) }, Run: func(cmd *cobra.Command, _ []string) { - utils := newGetConfigUtilsUtils() - err := generateConfig(utils) - if err != nil { + if err := generateConfigWrapper(); err != nil { log.SetErrorCategory(log.ErrorConfiguration) log.Entry().WithError(err).Fatal("failed to retrieve configuration") } @@ -77,8 +89,8 @@ func ConfigCommand() *cobra.Command { // GetDockerImageValue provides Piper commands additional access to configuration of step execution image if required func GetDockerImageValue(stepName string) (string, error) { - configOptions.contextConfig = true - configOptions.stepName = stepName + configOptions.ContextConfig = true + configOptions.StepName = stepName stepConfig, err := getConfig() if err != nil { return "", err @@ -94,8 +106,8 @@ func GetDockerImageValue(stepName string) (string, error) { } func getBuildToolFromStageConfig(stepName string) (string, error) { - configOptions.contextConfig = true - configOptions.stepName = stepName + configOptions.ContextConfig = true + configOptions.StepName = stepName stageConfig, err := GetStageConfig() if err != nil { return "", err @@ -116,7 +128,7 @@ func GetStageConfig() (config.StepConfig, error) { stepConfig := config.StepConfig{} projectConfigFile := getProjectConfigFile(GeneralConfig.CustomConfig) - customConfig, err := configOptions.openFile(projectConfigFile, GeneralConfig.GitHubAccessTokens) + customConfig, err := configOptions.OpenFile(projectConfigFile, GeneralConfig.GitHubAccessTokens) if err != nil { if !errors.Is(err, os.ErrNotExist) { return stepConfig, errors.Wrapf(err, "config: open configuration file '%v' failed", projectConfigFile) @@ -126,7 +138,7 @@ func GetStageConfig() (config.StepConfig, error) { defaultConfig := []io.ReadCloser{} for _, f := range GeneralConfig.DefaultConfig { - fc, err := configOptions.openFile(f, GeneralConfig.GitHubAccessTokens) + fc, err := configOptions.OpenFile(f, GeneralConfig.GitHubAccessTokens) // only create error for non-default values if err != nil && f != ".pipeline/defaults.yaml" { return stepConfig, errors.Wrapf(err, "config: getting defaults failed: '%v'", f) @@ -136,7 +148,7 @@ func GetStageConfig() (config.StepConfig, error) { } } - return myConfig.GetStageConfig(GeneralConfig.ParametersJSON, customConfig, defaultConfig, GeneralConfig.IgnoreCustomDefaults, configOptions.stageConfigAcceptedParameters, GeneralConfig.StageName) + return myConfig.GetStageConfig(GeneralConfig.ParametersJSON, customConfig, defaultConfig, GeneralConfig.IgnoreCustomDefaults, configOptions.StageConfigAcceptedParameters, GeneralConfig.StageName) } func getConfig() (config.StepConfig, error) { @@ -144,17 +156,17 @@ func getConfig() (config.StepConfig, error) { var stepConfig config.StepConfig var err error - if configOptions.stageConfig { + if configOptions.StageConfig { stepConfig, err = GetStageConfig() if err != nil { return stepConfig, errors.Wrap(err, "getting stage config failed") } } else { - log.Entry().Infof("Printing stepName %s", configOptions.stepName) + log.Entry().Infof("Printing stepName %s", configOptions.StepName) if GeneralConfig.MetaDataResolver == nil { GeneralConfig.MetaDataResolver = GetAllStepMetadata } - metadata, err := config.ResolveMetadata(GeneralConfig.GitHubAccessTokens, GeneralConfig.MetaDataResolver, configOptions.stepMetadata, configOptions.stepName) + metadata, err := config.ResolveMetadata(GeneralConfig.GitHubAccessTokens, GeneralConfig.MetaDataResolver, configOptions.StepMetadata, configOptions.StepName) if err != nil { return stepConfig, errors.Wrapf(err, "failed to resolve metadata") } @@ -172,7 +184,7 @@ func getConfig() (config.StepConfig, error) { projectConfigFile := getProjectConfigFile(GeneralConfig.CustomConfig) - customConfig, err := configOptions.openFile(projectConfigFile, GeneralConfig.GitHubAccessTokens) + customConfig, err := configOptions.OpenFile(projectConfigFile, GeneralConfig.GitHubAccessTokens) if err != nil { if !errors.Is(err, os.ErrNotExist) { return stepConfig, errors.Wrapf(err, "config: open configuration file '%v' failed", projectConfigFile) @@ -186,7 +198,7 @@ func getConfig() (config.StepConfig, error) { } for _, f := range GeneralConfig.DefaultConfig { - fc, err := configOptions.openFile(f, GeneralConfig.GitHubAccessTokens) + fc, err := configOptions.OpenFile(f, GeneralConfig.GitHubAccessTokens) // only create error for non-default values if err != nil && f != ".pipeline/defaults.yaml" { return stepConfig, errors.Wrapf(err, "config: getting defaults failed: '%v'", f) @@ -198,7 +210,7 @@ func getConfig() (config.StepConfig, error) { var flags map[string]interface{} - if configOptions.contextConfig { + if configOptions.ContextConfig { metadata.Spec.Inputs.Parameters = []config.StepParameters{} } @@ -208,33 +220,46 @@ func getConfig() (config.StepConfig, error) { } // apply context conditions if context configuration is requested - if configOptions.contextConfig { + if configOptions.ContextConfig { applyContextConditions(metadata, &stepConfig) } } return stepConfig, nil } -func generateConfig(utils getConfigUtils) error { +func generateConfigWrapper() error { + var formatter func(interface{}) (string, error) + switch strings.ToLower(configOptions.Output) { + case "yaml", "yml": + formatter = config.GetYAML + case "json": + formatter = config.GetJSON + default: + formatter = config.GetJSON + } + return GenerateConfig(formatter) +} + +func GenerateConfig(formatter func(interface{}) (string, error)) error { + utils := newGetConfigUtilsUtils() stepConfig, err := getConfig() if err != nil { return err } - myConfigJSON, err := config.GetJSON(stepConfig.Config) + myConfig, err := formatter(stepConfig.Config) if err != nil { - return fmt.Errorf("failed to get JSON from config: %w", err) + return fmt.Errorf("failed to marshal config: %w", err) } - if len(configOptions.outputFile) > 0 { - err := utils.FileWrite(configOptions.outputFile, []byte(myConfigJSON), 0666) - if err != nil { - return fmt.Errorf("failed to write output file %v: %w", configOptions.outputFile, err) + if len(configOptions.OutputFile) > 0 { + if err := utils.FileWrite(configOptions.OutputFile, []byte(myConfig), 0666); err != nil { + return fmt.Errorf("failed to write output file %v: %w", configOptions.OutputFile, err) } return nil } - fmt.Println(myConfigJSON) + fmt.Println(myConfig) return nil } @@ -242,20 +267,20 @@ func generateConfig(utils getConfigUtils) error { func addConfigFlags(cmd *cobra.Command) { // ToDo: support more output options, like https://kubernetes.io/docs/reference/kubectl/overview/#formatting-output - cmd.Flags().StringVar(&configOptions.output, "output", "json", "Defines the output format") - cmd.Flags().StringVar(&configOptions.outputFile, "outputFile", "", "Defines a file path. f set, the output will be written to the defines file") + cmd.Flags().StringVar(&configOptions.Output, "output", "json", "Defines the output format") + cmd.Flags().StringVar(&configOptions.OutputFile, "outputFile", "", "Defines a file path. f set, the output will be written to the defines file") - cmd.Flags().StringVar(&configOptions.parametersJSON, "parametersJSON", os.Getenv("PIPER_parametersJSON"), "Parameters to be considered in JSON format") - cmd.Flags().BoolVar(&configOptions.stageConfig, "stageConfig", false, "Defines if step stage configuration should be loaded and no step-specific config") - cmd.Flags().StringArrayVar(&configOptions.stageConfigAcceptedParameters, "stageConfigAcceptedParams", []string{}, "Defines the parameters used for filtering stage/general configuration when accessing stage config") - cmd.Flags().StringVar(&configOptions.stepMetadata, "stepMetadata", "", "Step metadata, passed as path to yaml") - cmd.Flags().StringVar(&configOptions.stepName, "stepName", "", "Step name, used to get step metadata if yaml path is not set") - cmd.Flags().BoolVar(&configOptions.contextConfig, "contextConfig", false, "Defines if step context configuration should be loaded instead of step config") + cmd.Flags().StringVar(&configOptions.ParametersJSON, "parametersJSON", os.Getenv("PIPER_parametersJSON"), "Parameters to be considered in JSON format") + cmd.Flags().BoolVar(&configOptions.StageConfig, "stageConfig", false, "Defines if step stage configuration should be loaded and no step-specific config") + cmd.Flags().StringArrayVar(&configOptions.StageConfigAcceptedParameters, "stageConfigAcceptedParams", []string{}, "Defines the parameters used for filtering stage/general configuration when accessing stage config") + cmd.Flags().StringVar(&configOptions.StepMetadata, "stepMetadata", "", "Step metadata, passed as path to yaml") + cmd.Flags().StringVar(&configOptions.StepName, "stepName", "", "Step name, used to get step metadata if yaml path is not set") + cmd.Flags().BoolVar(&configOptions.ContextConfig, "contextConfig", false, "Defines if step context configuration should be loaded instead of step config") } func defaultsAndFilters(metadata *config.StepData, stepName string) ([]io.ReadCloser, config.StepFilters, error) { - if configOptions.contextConfig { + if configOptions.ContextConfig { defaults, err := metadata.GetContextDefaults(stepName) if err != nil { return nil, config.StepFilters{}, errors.Wrap(err, "metadata: getting context defaults failed") diff --git a/cmd/getConfig_test.go b/cmd/getConfig_test.go index 327a5dc57d..f2acdbf561 100644 --- a/cmd/getConfig_test.go +++ b/cmd/getConfig_test.go @@ -56,8 +56,8 @@ func TestConfigCommand(t *testing.T) { t.Run("Run", func(t *testing.T) { t.Run("Success case", func(t *testing.T) { - configOptions.openFile = configOpenFileMock - configOptions.stepName = "githubCreateIssue" + configOptions.OpenFile = configOpenFileMock + configOptions.StepName = "githubCreateIssue" cmd.Run(cmd, []string{}) }) }) @@ -75,8 +75,8 @@ func TestDefaultsAndFilters(t *testing.T) { } t.Run("Context config", func(t *testing.T) { - configOptions.contextConfig = true - defer func() { configOptions.contextConfig = false }() + configOptions.ContextConfig = true + defer func() { configOptions.ContextConfig = false }() defaults, filters, err := defaultsAndFilters(&metadata, "stepName") assert.Equal(t, 1, len(defaults), "getting defaults failed") diff --git a/cmd/getDefaults.go b/cmd/getDefaults.go index 9930764fee..c69ddd056e 100644 --- a/cmd/getDefaults.go +++ b/cmd/getDefaults.go @@ -81,6 +81,9 @@ func getDefaults() ([]map[string]string, error) { var yamlContent string if !defaultsOptions.useV1 { + log.Entry().Warning("This step is using deprecated format of stage conditions which will be removed in Jan 2024. " + + "To avoid pipeline breakage, please call getDefaults command with --useV1 flag.", + ) var c config.Config c.ReadConfig(fc) @@ -128,7 +131,7 @@ func generateDefaults(utils getDefaultsUtils) ([]byte, error) { if len(defaultsOptions.outputFile) > 0 { err := utils.FileWrite(defaultsOptions.outputFile, []byte(jsonOutput), 0666) if err != nil { - return jsonOutput, fmt.Errorf("failed to write output file %v: %w", configOptions.outputFile, err) + return jsonOutput, fmt.Errorf("failed to write output file %v: %w", defaultsOptions.outputFile, err) } return jsonOutput, nil } diff --git a/cmd/githubCheckBranchProtection_generated.go b/cmd/githubCheckBranchProtection_generated.go index 98ea594c5a..9151dc49d5 100644 --- a/cmd/githubCheckBranchProtection_generated.go +++ b/cmd/githubCheckBranchProtection_generated.go @@ -66,7 +66,7 @@ It can for example be used to verify if certain status checks are mandatory. Thi log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/githubCommentIssue_generated.go b/cmd/githubCommentIssue_generated.go index 3cc492fa31..f425248664 100644 --- a/cmd/githubCommentIssue_generated.go +++ b/cmd/githubCommentIssue_generated.go @@ -65,7 +65,7 @@ This comes in very handy when you want to make developers aware of certain thing log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/githubCreateIssue_generated.go b/cmd/githubCreateIssue_generated.go index 99f44e78c3..ff7a08da10 100644 --- a/cmd/githubCreateIssue_generated.go +++ b/cmd/githubCreateIssue_generated.go @@ -68,7 +68,7 @@ You will be able to use this step for example for regular jobs to report into yo log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/githubCreatePullRequest_generated.go b/cmd/githubCreatePullRequest_generated.go index 7d138cf5fc..6ebb5d7a3e 100644 --- a/cmd/githubCreatePullRequest_generated.go +++ b/cmd/githubCreatePullRequest_generated.go @@ -69,7 +69,7 @@ It can for example be used for GitOps scenarios or for scenarios where you want log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/githubPublishRelease_generated.go b/cmd/githubPublishRelease_generated.go index f87f2c6936..ea9445ca10 100644 --- a/cmd/githubPublishRelease_generated.go +++ b/cmd/githubPublishRelease_generated.go @@ -82,7 +82,7 @@ The result looks like log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/githubSetCommitStatus_generated.go b/cmd/githubSetCommitStatus_generated.go index c6481f650a..0d16036960 100644 --- a/cmd/githubSetCommitStatus_generated.go +++ b/cmd/githubSetCommitStatus_generated.go @@ -74,7 +74,7 @@ It can for example be used to create additional check indicators for a pull requ log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/gitopsUpdateDeployment.go b/cmd/gitopsUpdateDeployment.go index 6695019418..7220025de8 100644 --- a/cmd/gitopsUpdateDeployment.go +++ b/cmd/gitopsUpdateDeployment.go @@ -3,9 +3,19 @@ package cmd import ( "bytes" "fmt" + "io" + "net/http" + "os" + "path" + "path/filepath" + "regexp" + "strings" + "time" + "github.com/SAP/jenkins-library/pkg/command" "github.com/SAP/jenkins-library/pkg/docker" gitUtil "github.com/SAP/jenkins-library/pkg/git" + piperhttp "github.com/SAP/jenkins-library/pkg/http" "github.com/SAP/jenkins-library/pkg/log" "github.com/SAP/jenkins-library/pkg/piperutils" "github.com/SAP/jenkins-library/pkg/telemetry" @@ -13,12 +23,6 @@ import ( "github.com/go-git/go-git/v5/plumbing" "github.com/go-git/go-git/v5/plumbing/object" "github.com/pkg/errors" - "io" - "os" - "path/filepath" - "regexp" - "strings" - "time" ) const toolKubectl = "kubectl" @@ -27,8 +31,8 @@ const toolKustomize = "kustomize" type iGitopsUpdateDeploymentGitUtils interface { CommitFiles(filePaths []string, commitMessage, author string) (plumbing.Hash, error) - PushChangesToRepository(username, password string, force *bool) error - PlainClone(username, password, serverURL, directory string) error + PushChangesToRepository(username, password string, force *bool, caCerts []byte) error + PlainClone(username, password, serverURL, directory string, caCerts []byte) error ChangeBranch(branchName string) error } @@ -36,6 +40,7 @@ type gitopsUpdateDeploymentFileUtils interface { TempDir(dir, pattern string) (name string, err error) RemoveAll(path string) error FileWrite(path string, content []byte, perm os.FileMode) error + FileRead(path string) ([]byte, error) Glob(pattern string) ([]string, error) } @@ -51,6 +56,25 @@ type gitopsUpdateDeploymentGitUtils struct { repository *git.Repository } +type gitopsUpdateDeploymentUtilsBundle struct { + *piperhttp.Client +} + +type gitopsUpdateDeploymentUtils interface { + DownloadFile(url, filename string, header http.Header, cookies []*http.Cookie) error +} + +func newGitopsUpdateDeploymentUtilsBundle() gitopsUpdateDeploymentUtils { + utils := gitopsUpdateDeploymentUtilsBundle{ + Client: &piperhttp.Client{}, + } + return &utils +} + +func (g *gitopsUpdateDeploymentUtilsBundle) DownloadFile(url, filename string, header http.Header, cookies []*http.Cookie) error { + return g.Client.DownloadFile(url, filename, header, cookies) +} + func (g *gitopsUpdateDeploymentGitUtils) CommitFiles(filePaths []string, commitMessage, author string) (plumbing.Hash, error) { for _, path := range filePaths { _, err := g.worktree.Add(path) @@ -71,13 +95,13 @@ func (g *gitopsUpdateDeploymentGitUtils) CommitFiles(filePaths []string, commitM return commit, nil } -func (g *gitopsUpdateDeploymentGitUtils) PushChangesToRepository(username, password string, force *bool) error { - return gitUtil.PushChangesToRepository(username, password, force, g.repository) +func (g *gitopsUpdateDeploymentGitUtils) PushChangesToRepository(username, password string, force *bool, caCerts []byte) error { + return gitUtil.PushChangesToRepository(username, password, force, g.repository, caCerts) } -func (g *gitopsUpdateDeploymentGitUtils) PlainClone(username, password, serverURL, directory string) error { +func (g *gitopsUpdateDeploymentGitUtils) PlainClone(username, password, serverURL, directory string, caCerts []byte) error { var err error - g.repository, err = gitUtil.PlainClone(username, password, serverURL, directory) + g.repository, err = gitUtil.PlainClone(username, password, serverURL, directory, caCerts) if err != nil { return errors.Wrapf(err, "plain clone failed '%s'", serverURL) } @@ -126,7 +150,12 @@ func runGitopsUpdateDeployment(config *gitopsUpdateDeploymentOptions, command gi } }() - err = cloneRepositoryAndChangeBranch(config, gitUtils, temporaryFolder) + certs, err := downloadCACertbunde(config.CustomTLSCertificateLinks, gitUtils, fileUtils) + if err != nil { + return err + } + + err = cloneRepositoryAndChangeBranch(config, gitUtils, fileUtils, temporaryFolder, certs) if err != nil { return errors.Wrap(err, "repository could not get prepared") } @@ -190,7 +219,7 @@ func runGitopsUpdateDeployment(config *gitopsUpdateDeploymentOptions, command gi } } - commit, err := commitAndPushChanges(config, gitUtils, allFiles) + commit, err := commitAndPushChanges(config, gitUtils, allFiles, certs) if err != nil { return errors.Wrap(err, "failed to commit and push changes") } @@ -292,8 +321,9 @@ func logNotRequiredButFilledFieldForKustomize(config *gitopsUpdateDeploymentOpti } } -func cloneRepositoryAndChangeBranch(config *gitopsUpdateDeploymentOptions, gitUtils iGitopsUpdateDeploymentGitUtils, temporaryFolder string) error { - err := gitUtils.PlainClone(config.Username, config.Password, config.ServerURL, temporaryFolder) +func cloneRepositoryAndChangeBranch(config *gitopsUpdateDeploymentOptions, gitUtils iGitopsUpdateDeploymentGitUtils, fileUtils gitopsUpdateDeploymentFileUtils, temporaryFolder string, certs []byte) error { + + err := gitUtils.PlainClone(config.Username, config.Password, config.ServerURL, temporaryFolder, certs) if err != nil { return errors.Wrap(err, "failed to plain clone repository") } @@ -305,6 +335,30 @@ func cloneRepositoryAndChangeBranch(config *gitopsUpdateDeploymentOptions, gitUt return nil } +func downloadCACertbunde(customTlsCertificateLinks []string, gitUtils iGitopsUpdateDeploymentGitUtils, fileUtils gitopsUpdateDeploymentFileUtils) ([]byte, error) { + certs := []byte{} + utils := newGitopsUpdateDeploymentUtilsBundle() + if len(customTlsCertificateLinks) > 0 { + for _, customTlsCertificateLink := range customTlsCertificateLinks { + log.Entry().Infof("Downloading CA certs %s into file '%s'", customTlsCertificateLink, path.Base(customTlsCertificateLink)) + err := utils.DownloadFile(customTlsCertificateLink, path.Base(customTlsCertificateLink), nil, nil) + if err != nil { + return certs, nil + } + + content, err := fileUtils.FileRead(path.Base(customTlsCertificateLink)) + if err != nil { + return certs, nil + } + log.Entry().Infof("CA certs added successfully to cert pool") + + certs = append(certs, content...) + } + } + + return certs, nil +} + func executeKubectl(config *gitopsUpdateDeploymentOptions, command gitopsUpdateDeploymentExecRunner, filePath string) ([]byte, error) { var outputBytes []byte registryImage, err := buildRegistryPlusImage(config) @@ -444,7 +498,7 @@ func buildRegistryPlusImageAndTagSeparately(config *gitopsUpdateDeploymentOption } -func commitAndPushChanges(config *gitopsUpdateDeploymentOptions, gitUtils iGitopsUpdateDeploymentGitUtils, filePaths []string) (plumbing.Hash, error) { +func commitAndPushChanges(config *gitopsUpdateDeploymentOptions, gitUtils iGitopsUpdateDeploymentGitUtils, filePaths []string, certs []byte) (plumbing.Hash, error) { commitMessage := config.CommitMessage if commitMessage == "" { @@ -456,7 +510,7 @@ func commitAndPushChanges(config *gitopsUpdateDeploymentOptions, gitUtils iGitop return [20]byte{}, errors.Wrap(err, "committing changes failed") } - err = gitUtils.PushChangesToRepository(config.Username, config.Password, &config.ForcePush) + err = gitUtils.PushChangesToRepository(config.Username, config.Password, &config.ForcePush, certs) if err != nil { return [20]byte{}, errors.Wrap(err, "pushing changes failed") } diff --git a/cmd/gitopsUpdateDeployment_generated.go b/cmd/gitopsUpdateDeployment_generated.go index 97eb254291..d02e07abe6 100644 --- a/cmd/gitopsUpdateDeployment_generated.go +++ b/cmd/gitopsUpdateDeployment_generated.go @@ -16,20 +16,21 @@ import ( ) type gitopsUpdateDeploymentOptions struct { - BranchName string `json:"branchName,omitempty"` - CommitMessage string `json:"commitMessage,omitempty"` - ServerURL string `json:"serverUrl,omitempty"` - ForcePush bool `json:"forcePush,omitempty"` - Username string `json:"username,omitempty"` - Password string `json:"password,omitempty"` - FilePath string `json:"filePath,omitempty"` - ContainerName string `json:"containerName,omitempty"` - ContainerRegistryURL string `json:"containerRegistryUrl,omitempty"` - ContainerImageNameTag string `json:"containerImageNameTag,omitempty"` - ChartPath string `json:"chartPath,omitempty"` - HelmValues []string `json:"helmValues,omitempty"` - DeploymentName string `json:"deploymentName,omitempty"` - Tool string `json:"tool,omitempty" validate:"possible-values=kubectl helm kustomize"` + BranchName string `json:"branchName,omitempty"` + CommitMessage string `json:"commitMessage,omitempty"` + ServerURL string `json:"serverUrl,omitempty"` + ForcePush bool `json:"forcePush,omitempty"` + Username string `json:"username,omitempty"` + Password string `json:"password,omitempty"` + FilePath string `json:"filePath,omitempty"` + ContainerName string `json:"containerName,omitempty"` + ContainerRegistryURL string `json:"containerRegistryUrl,omitempty"` + ContainerImageNameTag string `json:"containerImageNameTag,omitempty"` + ChartPath string `json:"chartPath,omitempty"` + HelmValues []string `json:"helmValues,omitempty"` + DeploymentName string `json:"deploymentName,omitempty"` + Tool string `json:"tool,omitempty" validate:"possible-values=kubectl helm kustomize"` + CustomTLSCertificateLinks []string `json:"customTlsCertificateLinks,omitempty"` } // GitopsUpdateDeploymentCommand Updates Kubernetes Deployment Manifest in an Infrastructure Git Repository @@ -79,7 +80,7 @@ For *kustomize* the ` + "`" + `images` + "`" + ` section will be update with the log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) @@ -155,6 +156,7 @@ func addGitopsUpdateDeploymentFlags(cmd *cobra.Command, stepConfig *gitopsUpdate cmd.Flags().StringSliceVar(&stepConfig.HelmValues, "helmValues", []string{}, "List of helm values as YAML file reference or URL (as per helm parameter description for `-f` / `--values`)") cmd.Flags().StringVar(&stepConfig.DeploymentName, "deploymentName", os.Getenv("PIPER_deploymentName"), "Defines the name of the deployment. In case of `kustomize` this is the name or alias of the image in the `kustomization.yaml`") cmd.Flags().StringVar(&stepConfig.Tool, "tool", `kubectl`, "Defines the tool which should be used to update the deployment description.") + cmd.Flags().StringSliceVar(&stepConfig.CustomTLSCertificateLinks, "customTlsCertificateLinks", []string{}, "List containing download links of custom TLS certificates. This is required to ensure trusted connections to registries with custom certificates.") cmd.MarkFlagRequired("branchName") cmd.MarkFlagRequired("serverUrl") @@ -343,6 +345,15 @@ func gitopsUpdateDeploymentMetadata() config.StepData { Aliases: []config.Alias{}, Default: `kubectl`, }, + { + Name: "customTlsCertificateLinks", + ResourceRef: []config.ResourceReference{}, + Scope: []string{"PARAMETERS", "STAGES", "STEPS"}, + Type: "[]string", + Mandatory: false, + Aliases: []config.Alias{}, + Default: []string{}, + }, }, }, Containers: []config.Container{ diff --git a/cmd/gitopsUpdateDeployment_test.go b/cmd/gitopsUpdateDeployment_test.go index 2d05f1ce39..a776fdb83c 100644 --- a/cmd/gitopsUpdateDeployment_test.go +++ b/cmd/gitopsUpdateDeployment_test.go @@ -772,6 +772,7 @@ type filesMock struct { failOnCreation bool failOnDeletion bool failOnWrite bool + failOnRead bool failOnGlob bool path string } @@ -783,6 +784,13 @@ func (f filesMock) FileWrite(path string, content []byte, perm os.FileMode) erro return piperutils.Files{}.FileWrite(path, content, perm) } +func (f filesMock) FileRead(path string) ([]byte, error) { + if f.failOnRead { + return []byte{}, errors.New("error appeared") + } + return piperutils.Files{}.FileRead(path) +} + func (f filesMock) TempDir(dir string, pattern string) (name string, err error) { if f.failOnCreation { return "", errors.New("error appeared") @@ -848,7 +856,7 @@ func (v *gitUtilsMock) CommitFiles(newFiles []string, commitMessage string, _ st return [20]byte{123}, nil } -func (v gitUtilsMock) PushChangesToRepository(_ string, _ string, force *bool) error { +func (v gitUtilsMock) PushChangesToRepository(_ string, _ string, force *bool, caCerts []byte) error { if v.failOnPush { return errors.New("error on push") } @@ -858,7 +866,7 @@ func (v gitUtilsMock) PushChangesToRepository(_ string, _ string, force *bool) e return nil } -func (v *gitUtilsMock) PlainClone(_, _, _, directory string) error { +func (v *gitUtilsMock) PlainClone(_, _, _, directory string, caCerts []byte) error { if v.skipClone { return nil } diff --git a/cmd/golangBuild_generated.go b/cmd/golangBuild_generated.go index 676a0539ea..0c0df4ff51 100644 --- a/cmd/golangBuild_generated.go +++ b/cmd/golangBuild_generated.go @@ -164,7 +164,7 @@ If the build is successful the resulting artifact can be uploaded to e.g. a bina log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) @@ -498,7 +498,7 @@ func golangBuildMetadata() config.StepData { { Name: "privateModules", ResourceRef: []config.ResourceReference{}, - Scope: []string{"STEPS", "STAGES", "PARAMETERS"}, + Scope: []string{"GENERAL", "STEPS", "STAGES", "PARAMETERS"}, Type: "string", Mandatory: false, Aliases: []config.Alias{}, diff --git a/cmd/gradleExecuteBuild_generated.go b/cmd/gradleExecuteBuild_generated.go index 79ea263a93..c239f98e2e 100644 --- a/cmd/gradleExecuteBuild_generated.go +++ b/cmd/gradleExecuteBuild_generated.go @@ -144,7 +144,7 @@ func GradleExecuteBuildCommand() *cobra.Command { log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/hadolintExecute_generated.go b/cmd/hadolintExecute_generated.go index 717c746400..4d05bae15a 100644 --- a/cmd/hadolintExecute_generated.go +++ b/cmd/hadolintExecute_generated.go @@ -65,7 +65,7 @@ The linter is parsing the Dockerfile into an abstract syntax tree (AST) and perf log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/helmExecute_generated.go b/cmd/helmExecute_generated.go index 516b471cbc..58fb70fd61 100644 --- a/cmd/helmExecute_generated.go +++ b/cmd/helmExecute_generated.go @@ -144,7 +144,7 @@ Note: piper supports only helm3 version, since helm2 is deprecated.`, log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) @@ -255,7 +255,8 @@ func helmExecuteMetadata() config.StepData { Secrets: []config.StepSecrets{ {Name: "kubeConfigFileCredentialsId", Description: "Jenkins 'Secret file' credentials ID containing kubeconfig file. Details can be found in the [Kubernetes documentation](https://kubernetes.io/docs/concepts/configuration/organize-cluster-access-kubeconfig/).", Type: "jenkins", Aliases: []config.Alias{{Name: "kubeCredentialsId", Deprecated: true}}}, {Name: "dockerConfigJsonCredentialsId", Description: "Jenkins 'Secret file' credentials ID containing Docker config.json (with registry credential(s)).", Type: "jenkins"}, - {Name: "targetRepositoryCredentialsId", Description: "Jenkins 'Username Password' credentials ID containing username and password for the Helm Repository authentication", Type: "jenkins"}, + {Name: "sourceRepositoryCredentialsId", Description: "Jenkins 'Username Password' credentials ID containing username and password for the Helm Repository authentication (source repo)", Type: "jenkins"}, + {Name: "targetRepositoryCredentialsId", Description: "Jenkins 'Username Password' credentials ID containing username and password for the Helm Repository authentication (target repo)", Type: "jenkins"}, }, Resources: []config.StepResources{ {Name: "deployDescriptor", Type: "stash"}, diff --git a/cmd/imagePushToRegistry.go b/cmd/imagePushToRegistry.go new file mode 100644 index 0000000000..75940b043b --- /dev/null +++ b/cmd/imagePushToRegistry.go @@ -0,0 +1,248 @@ +package cmd + +import ( + "context" + "fmt" + "regexp" + + v1 "github.com/google/go-containerregistry/pkg/v1" + "github.com/pkg/errors" + "golang.org/x/sync/errgroup" + + "github.com/SAP/jenkins-library/pkg/command" + "github.com/SAP/jenkins-library/pkg/docker" + "github.com/SAP/jenkins-library/pkg/log" + "github.com/SAP/jenkins-library/pkg/piperutils" + "github.com/SAP/jenkins-library/pkg/telemetry" +) + +const ( + targetDockerConfigPath = "/root/.docker/config.json" +) + +type dockerImageUtils interface { + LoadImage(ctx context.Context, src string) (v1.Image, error) + PushImage(ctx context.Context, im v1.Image, dest, platform string) error + CopyImage(ctx context.Context, src, dest, platform string) error +} + +type imagePushToRegistryUtils interface { + command.ExecRunner + piperutils.FileUtils + dockerImageUtils + + // Add more methods here, or embed additional interfaces, or remove/replace as required. + // The imagePushToRegistryUtils interface should be descriptive of your runtime dependencies, + // i.e. include everything you need to be able to mock in tests. + // Unit tests shall be executable in parallel (not depend on global state), and don't (re-)test dependencies. +} + +type imagePushToRegistryUtilsBundle struct { + *command.Command + *piperutils.Files + dockerImageUtils + + // Embed more structs as necessary to implement methods or interfaces you add to imagePushToRegistryUtils. + // Structs embedded in this way must each have a unique set of methods attached. + // If there is no struct which implements the method you need, attach the method to + // imagePushToRegistryUtilsBundle and forward to the implementation of the dependency. +} + +func newImagePushToRegistryUtils() imagePushToRegistryUtils { + utils := imagePushToRegistryUtilsBundle{ + Command: &command.Command{ + StepName: "imagePushToRegistry", + }, + Files: &piperutils.Files{}, + dockerImageUtils: &docker.CraneUtilsBundle{}, + } + // Reroute command output to logging framework + utils.Stdout(log.Writer()) + utils.Stderr(log.Writer()) + return &utils +} + +func imagePushToRegistry(config imagePushToRegistryOptions, telemetryData *telemetry.CustomData) { + // Utils can be used wherever the command.ExecRunner interface is expected. + // It can also be used for example as a mavenExecRunner. + utils := newImagePushToRegistryUtils() + + // For HTTP calls import piperhttp "github.com/SAP/jenkins-library/pkg/http" + // and use a &piperhttp.Client{} in a custom system + // Example: step checkmarxExecuteScan.go + + // Error situations should be bubbled up until they reach the line below which will then stop execution + // through the log.Entry().Fatal() call leading to an os.Exit(1) in the end. + err := runImagePushToRegistry(&config, telemetryData, utils) + if err != nil { + log.Entry().WithError(err).Fatal("step execution failed") + } +} + +func runImagePushToRegistry(config *imagePushToRegistryOptions, telemetryData *telemetry.CustomData, utils imagePushToRegistryUtils) error { + if !config.PushLocalDockerImage { + if len(config.TargetImages) == 0 { + config.TargetImages = mapSourceTargetImages(config.SourceImages) + } + if len(config.TargetImages) != len(config.SourceImages) { + log.SetErrorCategory(log.ErrorConfiguration) + return errors.New("configuration error: please configure targetImage and sourceImage properly") + } + } + + re := regexp.MustCompile(`^https?://`) + config.SourceRegistryURL = re.ReplaceAllString(config.SourceRegistryURL, "") + config.TargetRegistryURL = re.ReplaceAllString(config.TargetRegistryURL, "") + + log.Entry().Debug("Handling destination registry credentials") + if err := handleCredentialsForPrivateRegistry(config.DockerConfigJSON, config.TargetRegistryURL, config.TargetRegistryUser, config.TargetRegistryPassword, utils); err != nil { + return errors.Wrap(err, "failed to handle credentials for target registry") + } + + if config.PushLocalDockerImage { + if err := pushLocalImageToTargetRegistry(config, utils); err != nil { + return errors.Wrapf(err, "failed to push local image to %q", config.TargetRegistryURL) + } + return nil + } + + log.Entry().Debug("Handling source registry credentials") + if err := handleCredentialsForPrivateRegistry(config.DockerConfigJSON, config.SourceRegistryURL, config.SourceRegistryUser, config.SourceRegistryPassword, utils); err != nil { + return errors.Wrap(err, "failed to handle credentials for source registry") + } + + if err := copyImages(config, utils); err != nil { + return errors.Wrap(err, "failed to copy images") + } + + return nil +} + +func handleCredentialsForPrivateRegistry(dockerConfigJsonPath, registry, username, password string, utils imagePushToRegistryUtils) error { + if len(dockerConfigJsonPath) == 0 { + if len(registry) == 0 || len(username) == 0 || len(password) == 0 { + return errors.New("docker credentials not provided") + } + + if _, err := docker.CreateDockerConfigJSON(registry, username, password, "", targetDockerConfigPath, utils); err != nil { + return errors.Wrap(err, "failed to create new docker config") + } + return nil + } + + if _, err := docker.CreateDockerConfigJSON(registry, username, password, targetDockerConfigPath, dockerConfigJsonPath, utils); err != nil { + return errors.Wrapf(err, "failed to update docker config %q", dockerConfigJsonPath) + } + + if err := docker.MergeDockerConfigJSON(targetDockerConfigPath, dockerConfigJsonPath, utils); err != nil { + return errors.Wrapf(err, "failed to merge docker config files") + } + + return nil +} + +func copyImages(config *imagePushToRegistryOptions, utils imagePushToRegistryUtils) error { + g, ctx := errgroup.WithContext(context.Background()) + g.SetLimit(10) + platform := config.TargetArchitecture + + for _, sourceImage := range config.SourceImages { + sourceImage := sourceImage + src := fmt.Sprintf("%s/%s:%s", config.SourceRegistryURL, sourceImage, config.SourceImageTag) + + targetImage, ok := config.TargetImages[sourceImage].(string) + if !ok { + return fmt.Errorf("incorrect name of target image: %v", config.TargetImages[sourceImage]) + } + + if config.TargetImageTag != "" { + g.Go(func() error { + dst := fmt.Sprintf("%s/%s:%s", config.TargetRegistryURL, targetImage, config.TargetImageTag) + log.Entry().Infof("Copying %s to %s...", src, dst) + if err := utils.CopyImage(ctx, src, dst, platform); err != nil { + return err + } + log.Entry().Infof("Copying %s to %s... Done", src, dst) + return nil + }) + } + + if config.TagLatest { + g.Go(func() error { + dst := fmt.Sprintf("%s/%s", config.TargetRegistryURL, config.TargetImages[sourceImage]) + log.Entry().Infof("Copying %s to %s...", src, dst) + if err := utils.CopyImage(ctx, src, dst, platform); err != nil { + return err + } + log.Entry().Infof("Copying %s to %s... Done", src, dst) + return nil + }) + } + } + + if err := g.Wait(); err != nil { + return err + } + + return nil +} + +func pushLocalImageToTargetRegistry(config *imagePushToRegistryOptions, utils imagePushToRegistryUtils) error { + g, ctx := errgroup.WithContext(context.Background()) + g.SetLimit(10) + platform := config.TargetArchitecture + + log.Entry().Infof("Loading local image...") + img, err := utils.LoadImage(ctx, config.LocalDockerImagePath) + if err != nil { + return err + } + log.Entry().Infof("Loading local image... Done") + + for _, trgImage := range config.TargetImages { + trgImage := trgImage + targetImage, ok := trgImage.(string) + if !ok { + return fmt.Errorf("incorrect name of target image: %v", trgImage) + } + + if config.TargetImageTag != "" { + g.Go(func() error { + dst := fmt.Sprintf("%s/%s:%s", config.TargetRegistryURL, targetImage, config.TargetImageTag) + log.Entry().Infof("Pushing %s...", dst) + if err := utils.PushImage(ctx, img, dst, platform); err != nil { + return err + } + log.Entry().Infof("Pushing %s... Done", dst) + return nil + }) + } + + if config.TagLatest { + g.Go(func() error { + dst := fmt.Sprintf("%s/%s", config.TargetRegistryURL, targetImage) + log.Entry().Infof("Pushing %s...", dst) + if err := utils.PushImage(ctx, img, dst, platform); err != nil { + return err + } + log.Entry().Infof("Pushing %s... Done", dst) + return nil + }) + } + } + + if err := g.Wait(); err != nil { + return err + } + + return nil +} + +func mapSourceTargetImages(sourceImages []string) map[string]any { + targetImages := make(map[string]any, len(sourceImages)) + for _, sourceImage := range sourceImages { + targetImages[sourceImage] = sourceImage + } + + return targetImages +} diff --git a/cmd/imagePushToRegistry_generated.go b/cmd/imagePushToRegistry_generated.go new file mode 100644 index 0000000000..fd50801225 --- /dev/null +++ b/cmd/imagePushToRegistry_generated.go @@ -0,0 +1,381 @@ +// Code generated by piper's step-generator. DO NOT EDIT. + +package cmd + +import ( + "fmt" + "os" + "time" + + "github.com/SAP/jenkins-library/pkg/config" + "github.com/SAP/jenkins-library/pkg/log" + "github.com/SAP/jenkins-library/pkg/splunk" + "github.com/SAP/jenkins-library/pkg/telemetry" + "github.com/SAP/jenkins-library/pkg/validation" + "github.com/spf13/cobra" +) + +type imagePushToRegistryOptions struct { + TargetImages map[string]interface{} `json:"targetImages,omitempty"` + SourceImages []string `json:"sourceImages,omitempty" validate:"required_if=PushLocalDockerImage false"` + SourceImageTag string `json:"sourceImageTag,omitempty" validate:"required_if=PushLocalDockerImage false"` + SourceRegistryURL string `json:"sourceRegistryUrl,omitempty" validate:"required_if=PushLocalDockerImage false"` + SourceRegistryUser string `json:"sourceRegistryUser,omitempty" validate:"required_if=PushLocalDockerImage false"` + SourceRegistryPassword string `json:"sourceRegistryPassword,omitempty" validate:"required_if=PushLocalDockerImage false"` + TargetRegistryURL string `json:"targetRegistryUrl,omitempty"` + TargetRegistryUser string `json:"targetRegistryUser,omitempty"` + TargetRegistryPassword string `json:"targetRegistryPassword,omitempty"` + TargetImageTag string `json:"targetImageTag,omitempty" validate:"required_if=TagLatest false"` + TagLatest bool `json:"tagLatest,omitempty"` + DockerConfigJSON string `json:"dockerConfigJSON,omitempty"` + PushLocalDockerImage bool `json:"pushLocalDockerImage,omitempty"` + LocalDockerImagePath string `json:"localDockerImagePath,omitempty" validate:"required_if=PushLocalDockerImage true"` + TargetArchitecture string `json:"targetArchitecture,omitempty"` +} + +// ImagePushToRegistryCommand Allows you to copy a Docker image from a source container registry to a destination container registry. +func ImagePushToRegistryCommand() *cobra.Command { + const STEP_NAME = "imagePushToRegistry" + + metadata := imagePushToRegistryMetadata() + var stepConfig imagePushToRegistryOptions + var startTime time.Time + var logCollector *log.CollectorHook + var splunkClient *splunk.Splunk + telemetryClient := &telemetry.Telemetry{} + + var createImagePushToRegistryCmd = &cobra.Command{ + Use: STEP_NAME, + Short: "Allows you to copy a Docker image from a source container registry to a destination container registry.", + Long: `In case you want to pull an existing image from a remote container registry, a source image and source registry needs to be specified.
+This makes it possible to move an image from one registry to another. + +The imagePushToRegistry is not similar in functionality to containerPushToRegistry (which is currently a groovy based step and only be used in jenkins). +Currently the imagePushToRegistry only supports copying a local image or image from source remote registry to destination registry.`, + PreRunE: func(cmd *cobra.Command, _ []string) error { + startTime = time.Now() + log.SetStepName(STEP_NAME) + log.SetVerbose(GeneralConfig.Verbose) + + GeneralConfig.GitHubAccessTokens = ResolveAccessTokens(GeneralConfig.GitHubTokens) + + path, _ := os.Getwd() + fatalHook := &log.FatalHook{CorrelationID: GeneralConfig.CorrelationID, Path: path} + log.RegisterHook(fatalHook) + + err := PrepareConfig(cmd, &metadata, STEP_NAME, &stepConfig, config.OpenPiperFile) + if err != nil { + log.SetErrorCategory(log.ErrorConfiguration) + return err + } + log.RegisterSecret(stepConfig.SourceRegistryUser) + log.RegisterSecret(stepConfig.SourceRegistryPassword) + log.RegisterSecret(stepConfig.TargetRegistryUser) + log.RegisterSecret(stepConfig.TargetRegistryPassword) + log.RegisterSecret(stepConfig.DockerConfigJSON) + + if len(GeneralConfig.HookConfig.SentryConfig.Dsn) > 0 { + sentryHook := log.NewSentryHook(GeneralConfig.HookConfig.SentryConfig.Dsn, GeneralConfig.CorrelationID) + log.RegisterHook(&sentryHook) + } + + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { + splunkClient = &splunk.Splunk{} + logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} + log.RegisterHook(logCollector) + } + + if err = log.RegisterANSHookIfConfigured(GeneralConfig.CorrelationID); err != nil { + log.Entry().WithError(err).Warn("failed to set up SAP Alert Notification Service log hook") + } + + validation, err := validation.New(validation.WithJSONNamesForStructFields(), validation.WithPredefinedErrorMessages()) + if err != nil { + return err + } + if err = validation.ValidateStruct(stepConfig); err != nil { + log.SetErrorCategory(log.ErrorConfiguration) + return err + } + + return nil + }, + Run: func(_ *cobra.Command, _ []string) { + stepTelemetryData := telemetry.CustomData{} + stepTelemetryData.ErrorCode = "1" + handler := func() { + config.RemoveVaultSecretFiles() + stepTelemetryData.Duration = fmt.Sprintf("%v", time.Since(startTime).Milliseconds()) + stepTelemetryData.ErrorCategory = log.GetErrorCategory().String() + stepTelemetryData.PiperCommitHash = GitCommit + telemetryClient.SetData(&stepTelemetryData) + telemetryClient.Send() + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + splunkClient.Initialize(GeneralConfig.CorrelationID, + GeneralConfig.HookConfig.SplunkConfig.Dsn, + GeneralConfig.HookConfig.SplunkConfig.Token, + GeneralConfig.HookConfig.SplunkConfig.Index, + GeneralConfig.HookConfig.SplunkConfig.SendLogs) + splunkClient.Send(telemetryClient.GetData(), logCollector) + } + if len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { + splunkClient.Initialize(GeneralConfig.CorrelationID, + GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint, + GeneralConfig.HookConfig.SplunkConfig.ProdCriblToken, + GeneralConfig.HookConfig.SplunkConfig.ProdCriblIndex, + GeneralConfig.HookConfig.SplunkConfig.SendLogs) + splunkClient.Send(telemetryClient.GetData(), logCollector) + } + } + log.DeferExitHandler(handler) + defer handler() + telemetryClient.Initialize(GeneralConfig.NoTelemetry, STEP_NAME) + imagePushToRegistry(stepConfig, &stepTelemetryData) + stepTelemetryData.ErrorCode = "0" + log.Entry().Info("SUCCESS") + }, + } + + addImagePushToRegistryFlags(createImagePushToRegistryCmd, &stepConfig) + return createImagePushToRegistryCmd +} + +func addImagePushToRegistryFlags(cmd *cobra.Command, stepConfig *imagePushToRegistryOptions) { + + cmd.Flags().StringSliceVar(&stepConfig.SourceImages, "sourceImages", []string{}, "Defines the names of the images that will be pulled from source registry. This is helpful for moving images from one location to another.\nPlease ensure that targetImages and sourceImages correspond to each other: the first image in sourceImages should be mapped to the first image in the targetImages parameter.\n\n```yaml\n sourceImages:\n - image-1\n - image-2\n targetImages:\n image-1: target-image-1\n image-2: target-image-2\n```\n") + cmd.Flags().StringVar(&stepConfig.SourceImageTag, "sourceImageTag", os.Getenv("PIPER_sourceImageTag"), "Tag of the sourceImages") + cmd.Flags().StringVar(&stepConfig.SourceRegistryURL, "sourceRegistryUrl", os.Getenv("PIPER_sourceRegistryUrl"), "Defines a registry url from where the image should optionally be pulled from, incl. the protocol like `https://my.registry.com`*\"") + cmd.Flags().StringVar(&stepConfig.SourceRegistryUser, "sourceRegistryUser", os.Getenv("PIPER_sourceRegistryUser"), "Username of the source registry where the image should be pulled from.") + cmd.Flags().StringVar(&stepConfig.SourceRegistryPassword, "sourceRegistryPassword", os.Getenv("PIPER_sourceRegistryPassword"), "Password of the source registry where the image should be pulled from.") + cmd.Flags().StringVar(&stepConfig.TargetRegistryURL, "targetRegistryUrl", os.Getenv("PIPER_targetRegistryUrl"), "Defines a registry url from where the image should optionally be pushed to, incl. the protocol like `https://my.registry.com`*\"") + cmd.Flags().StringVar(&stepConfig.TargetRegistryUser, "targetRegistryUser", os.Getenv("PIPER_targetRegistryUser"), "Username of the target registry where the image should be pushed to.") + cmd.Flags().StringVar(&stepConfig.TargetRegistryPassword, "targetRegistryPassword", os.Getenv("PIPER_targetRegistryPassword"), "Password of the target registry where the image should be pushed to.") + cmd.Flags().StringVar(&stepConfig.TargetImageTag, "targetImageTag", os.Getenv("PIPER_targetImageTag"), "Tag of the targetImages") + cmd.Flags().BoolVar(&stepConfig.TagLatest, "tagLatest", false, "Defines if the image should be tagged as `latest`. The parameter is true if targetImageTag is not specified.") + cmd.Flags().StringVar(&stepConfig.DockerConfigJSON, "dockerConfigJSON", os.Getenv("PIPER_dockerConfigJSON"), "Path to the file `.docker/config.json` - this is typically provided by your CI/CD system. You can find more details about the Docker credentials in the [Docker documentation](https://docs.docker.com/engine/reference/commandline/login/).") + cmd.Flags().BoolVar(&stepConfig.PushLocalDockerImage, "pushLocalDockerImage", false, "Defines if the local image should be pushed to registry") + cmd.Flags().StringVar(&stepConfig.LocalDockerImagePath, "localDockerImagePath", os.Getenv("PIPER_localDockerImagePath"), "If the `localDockerImagePath` is a directory, it will be read as an OCI image layout. Otherwise, `localDockerImagePath` is assumed to be a docker-style tarball.") + cmd.Flags().StringVar(&stepConfig.TargetArchitecture, "targetArchitecture", os.Getenv("PIPER_targetArchitecture"), "Specifies the targetArchitecture in the form os/arch[/variant][:osversion] (e.g. linux/amd64). All OS and architectures of the specified image will be copied if it is a multi-platform image. To only push a single platform to the target registry use this parameter") + + cmd.MarkFlagRequired("targetRegistryUrl") + cmd.MarkFlagRequired("targetRegistryUser") + cmd.MarkFlagRequired("targetRegistryPassword") +} + +// retrieve step metadata +func imagePushToRegistryMetadata() config.StepData { + var theMetaData = config.StepData{ + Metadata: config.StepMetadata{ + Name: "imagePushToRegistry", + Aliases: []config.Alias{}, + Description: "Allows you to copy a Docker image from a source container registry to a destination container registry.", + }, + Spec: config.StepSpec{ + Inputs: config.StepInputs{ + Resources: []config.StepResources{ + {Name: "source", Type: "stash"}, + }, + Parameters: []config.StepParameters{ + { + Name: "targetImages", + ResourceRef: []config.ResourceReference{}, + Scope: []string{"PARAMETERS", "STAGES", "STEPS"}, + Type: "map[string]interface{}", + Mandatory: false, + Aliases: []config.Alias{}, + }, + { + Name: "sourceImages", + ResourceRef: []config.ResourceReference{ + { + Name: "commonPipelineEnvironment", + Param: "container/imageNames", + }, + }, + Scope: []string{"PARAMETERS", "STAGES", "STEPS"}, + Type: "[]string", + Mandatory: false, + Aliases: []config.Alias{}, + Default: []string{}, + }, + { + Name: "sourceImageTag", + ResourceRef: []config.ResourceReference{ + { + Name: "commonPipelineEnvironment", + Param: "artifactVersion", + }, + }, + Scope: []string{"GENERAL", "PARAMETERS", "STAGES", "STEPS"}, + Type: "string", + Mandatory: false, + Aliases: []config.Alias{{Name: "artifactVersion"}, {Name: "containerImageTag"}}, + Default: os.Getenv("PIPER_sourceImageTag"), + }, + { + Name: "sourceRegistryUrl", + ResourceRef: []config.ResourceReference{ + { + Name: "commonPipelineEnvironment", + Param: "container/registryUrl", + }, + }, + Scope: []string{"PARAMETERS", "STAGES", "STEPS"}, + Type: "string", + Mandatory: false, + Aliases: []config.Alias{}, + Default: os.Getenv("PIPER_sourceRegistryUrl"), + }, + { + Name: "sourceRegistryUser", + ResourceRef: []config.ResourceReference{ + { + Name: "commonPipelineEnvironment", + Param: "container/repositoryUsername", + }, + + { + Name: "registryCredentialsVaultSecretName", + Type: "vaultSecret", + Default: "docker-registry", + }, + }, + Scope: []string{"PARAMETERS", "STAGES", "STEPS"}, + Type: "string", + Mandatory: false, + Aliases: []config.Alias{}, + Default: os.Getenv("PIPER_sourceRegistryUser"), + }, + { + Name: "sourceRegistryPassword", + ResourceRef: []config.ResourceReference{ + { + Name: "commonPipelineEnvironment", + Param: "container/repositoryPassword", + }, + + { + Name: "registryCredentialsVaultSecretName", + Type: "vaultSecret", + Default: "docker-registry", + }, + }, + Scope: []string{"PARAMETERS", "STAGES", "STEPS"}, + Type: "string", + Mandatory: false, + Aliases: []config.Alias{}, + Default: os.Getenv("PIPER_sourceRegistryPassword"), + }, + { + Name: "targetRegistryUrl", + ResourceRef: []config.ResourceReference{}, + Scope: []string{"PARAMETERS", "STAGES", "STEPS"}, + Type: "string", + Mandatory: true, + Aliases: []config.Alias{}, + Default: os.Getenv("PIPER_targetRegistryUrl"), + }, + { + Name: "targetRegistryUser", + ResourceRef: []config.ResourceReference{ + { + Name: "registryCredentialsVaultSecretName", + Type: "vaultSecret", + Default: "docker-registry", + }, + }, + Scope: []string{"PARAMETERS", "STAGES", "STEPS"}, + Type: "string", + Mandatory: true, + Aliases: []config.Alias{}, + Default: os.Getenv("PIPER_targetRegistryUser"), + }, + { + Name: "targetRegistryPassword", + ResourceRef: []config.ResourceReference{ + { + Name: "registryCredentialsVaultSecretName", + Type: "vaultSecret", + Default: "docker-registry", + }, + }, + Scope: []string{"PARAMETERS", "STAGES", "STEPS"}, + Type: "string", + Mandatory: true, + Aliases: []config.Alias{}, + Default: os.Getenv("PIPER_targetRegistryPassword"), + }, + { + Name: "targetImageTag", + ResourceRef: []config.ResourceReference{ + { + Name: "commonPipelineEnvironment", + Param: "artifactVersion", + }, + }, + Scope: []string{"GENERAL", "PARAMETERS", "STAGES", "STEPS"}, + Type: "string", + Mandatory: false, + Aliases: []config.Alias{{Name: "artifactVersion"}, {Name: "containerImageTag"}}, + Default: os.Getenv("PIPER_targetImageTag"), + }, + { + Name: "tagLatest", + ResourceRef: []config.ResourceReference{}, + Scope: []string{"PARAMETERS", "STAGES", "STEPS"}, + Type: "bool", + Mandatory: false, + Aliases: []config.Alias{}, + Default: false, + }, + { + Name: "dockerConfigJSON", + ResourceRef: []config.ResourceReference{ + { + Name: "dockerConfigFileVaultSecretName", + Type: "vaultSecretFile", + Default: "docker-config", + }, + }, + Scope: []string{"PARAMETERS", "STAGES", "STEPS"}, + Type: "string", + Mandatory: false, + Aliases: []config.Alias{}, + Default: os.Getenv("PIPER_dockerConfigJSON"), + }, + { + Name: "pushLocalDockerImage", + ResourceRef: []config.ResourceReference{}, + Scope: []string{"PARAMETERS", "STAGES", "STEPS"}, + Type: "bool", + Mandatory: false, + Aliases: []config.Alias{}, + Default: false, + }, + { + Name: "localDockerImagePath", + ResourceRef: []config.ResourceReference{}, + Scope: []string{"PARAMETERS", "STAGES", "STEPS"}, + Type: "string", + Mandatory: false, + Aliases: []config.Alias{}, + Default: os.Getenv("PIPER_localDockerImagePath"), + }, + { + Name: "targetArchitecture", + ResourceRef: []config.ResourceReference{}, + Scope: []string{"STEPS", "PARAMETERS"}, + Type: "string", + Mandatory: false, + Aliases: []config.Alias{}, + Default: os.Getenv("PIPER_targetArchitecture"), + }, + }, + }, + Containers: []config.Container{ + {Image: "gcr.io/go-containerregistry/crane:debug", EnvVars: []config.EnvVar{{Name: "container", Value: "docker"}}, Options: []config.Option{{Name: "-u", Value: "0"}, {Name: "--entrypoint", Value: ""}}}, + }, + }, + } + return theMetaData +} diff --git a/cmd/imagePushToRegistry_generated_test.go b/cmd/imagePushToRegistry_generated_test.go new file mode 100644 index 0000000000..9d434cc7f1 --- /dev/null +++ b/cmd/imagePushToRegistry_generated_test.go @@ -0,0 +1,20 @@ +//go:build unit +// +build unit + +package cmd + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestImagePushToRegistryCommand(t *testing.T) { + t.Parallel() + + testCmd := ImagePushToRegistryCommand() + + // only high level testing performed - details are tested in step generation procedure + assert.Equal(t, "imagePushToRegistry", testCmd.Use, "command name incorrect") + +} diff --git a/cmd/imagePushToRegistry_test.go b/cmd/imagePushToRegistry_test.go new file mode 100644 index 0000000000..db26057e4f --- /dev/null +++ b/cmd/imagePushToRegistry_test.go @@ -0,0 +1,210 @@ +package cmd + +import ( + "testing" + + "github.com/stretchr/testify/assert" + + dockermock "github.com/SAP/jenkins-library/pkg/docker/mock" + "github.com/SAP/jenkins-library/pkg/mock" +) + +const ( + customDockerConfig = `{"auths":{"source.registry":{"auth":"c291cmNldXNlcjpzb3VyY2VwYXNzd29yZA=="},"target.registry":{"auth":"dGFyZ2V0dXNlcjp0YXJnZXRwYXNzd29yZA=="}}}` + dockerConfig = `{ + "auths": { + "source.registry": { + "auth": "c291cmNldXNlcjpzb3VyY2VwYXNzd29yZA==" + }, + "target.registry": { + "auth": "dGFyZ2V0dXNlcjp0YXJnZXRwYXNzd29yZA==" + }, + "test.registry": { + "auth": "dGVzdHVzZXI6dGVzdHBhc3N3b3Jk" + } + } +}` +) + +type imagePushToRegistryMockUtils struct { + *mock.ExecMockRunner + *mock.FilesMock + *dockermock.CraneMockUtils +} + +func newImagePushToRegistryMockUtils(craneUtils *dockermock.CraneMockUtils) *imagePushToRegistryMockUtils { + utils := &imagePushToRegistryMockUtils{ + ExecMockRunner: &mock.ExecMockRunner{}, + FilesMock: &mock.FilesMock{}, + CraneMockUtils: craneUtils, + } + + return utils +} + +func TestRunImagePushToRegistry(t *testing.T) { + t.Parallel() + + t.Run("good case", func(t *testing.T) { + t.Parallel() + + config := imagePushToRegistryOptions{ + SourceRegistryURL: "https://source.registry", + SourceImages: []string{"source-image"}, + SourceRegistryUser: "sourceuser", + SourceRegistryPassword: "sourcepassword", + TargetRegistryURL: "https://target.registry", + TargetImages: map[string]any{"source-image": "target-image"}, + TargetRegistryUser: "targetuser", + TargetRegistryPassword: "targetpassword", + } + craneMockUtils := &dockermock.CraneMockUtils{} + utils := newImagePushToRegistryMockUtils(craneMockUtils) + err := runImagePushToRegistry(&config, nil, utils) + assert.NoError(t, err) + createdConfig, err := utils.FileRead(targetDockerConfigPath) + assert.NoError(t, err) + assert.Equal(t, customDockerConfig, string(createdConfig)) + }) + + t.Run("failed to copy image", func(t *testing.T) { + t.Parallel() + + config := imagePushToRegistryOptions{ + SourceRegistryURL: "https://source.registry", + SourceRegistryUser: "sourceuser", + SourceRegistryPassword: "sourcepassword", + SourceImages: []string{"source-image"}, + TargetRegistryURL: "https://target.registry", + TargetRegistryUser: "targetuser", + TargetRegistryPassword: "targetpassword", + TargetImageTag: "0.0.1", + } + craneMockUtils := &dockermock.CraneMockUtils{ + ErrCopyImage: dockermock.ErrCopyImage, + } + utils := newImagePushToRegistryMockUtils(craneMockUtils) + err := runImagePushToRegistry(&config, nil, utils) + assert.EqualError(t, err, "failed to copy images: copy image err") + }) + + t.Run("failed to push local image", func(t *testing.T) { + t.Parallel() + + config := imagePushToRegistryOptions{ + TargetImages: map[string]any{"img": "source-image"}, + TargetImageTag: "0.0.1", + TargetRegistryURL: "https://target.registry", + TargetRegistryUser: "targetuser", + TargetRegistryPassword: "targetpassword", + LocalDockerImagePath: "/local/path", + PushLocalDockerImage: true, + } + craneMockUtils := &dockermock.CraneMockUtils{ + ErrLoadImage: dockermock.ErrLoadImage, + } + utils := newImagePushToRegistryMockUtils(craneMockUtils) + err := runImagePushToRegistry(&config, nil, utils) + assert.EqualError(t, err, "failed to push local image to \"target.registry\": load image err") + }) +} + +func TestHandleCredentialsForPrivateRegistry(t *testing.T) { + t.Parallel() + + craneMockUtils := &dockermock.CraneMockUtils{} + t.Run("no custom docker config provided", func(t *testing.T) { + t.Parallel() + + utils := newImagePushToRegistryMockUtils(craneMockUtils) + utils.AddFile("targetDockerConfigPath", []byte("abc")) + err := handleCredentialsForPrivateRegistry("", "target.registry", "targetuser", "targetpassword", utils) + assert.NoError(t, err) + createdConfigFile, err := utils.FileRead(targetDockerConfigPath) + assert.NoError(t, err) + assert.Equal(t, `{"auths":{"target.registry":{"auth":"dGFyZ2V0dXNlcjp0YXJnZXRwYXNzd29yZA=="}}}`, string(createdConfigFile)) + }) + + t.Run("custom docker config provided", func(t *testing.T) { + t.Parallel() + + utils := newImagePushToRegistryMockUtils(craneMockUtils) + utils.AddFile(targetDockerConfigPath, []byte(customDockerConfig)) + err := handleCredentialsForPrivateRegistry(targetDockerConfigPath, "test.registry", "testuser", "testpassword", utils) + assert.NoError(t, err) + createdConfigFile, err := utils.FileRead(targetDockerConfigPath) + assert.NoError(t, err) + assert.Equal(t, dockerConfig, string(createdConfigFile)) + }) + + t.Run("wrong format of docker config", func(t *testing.T) { + t.Parallel() + + utils := newImagePushToRegistryMockUtils(craneMockUtils) + utils.AddFile(targetDockerConfigPath, []byte(`{auths:}`)) + err := handleCredentialsForPrivateRegistry("", "test.registry", "testuser", "testpassword", utils) + assert.EqualError(t, err, "failed to create new docker config: failed to unmarshal json file '/root/.docker/config.json': invalid character 'a' looking for beginning of object key string") + }) +} + +func TestPushLocalImageToTargetRegistry(t *testing.T) { + t.Parallel() + t.Run("good case", func(t *testing.T) { + t.Parallel() + + craneMockUtils := &dockermock.CraneMockUtils{} + config := &imagePushToRegistryOptions{ + PushLocalDockerImage: true, + LocalDockerImagePath: "/image/path", + TargetRegistryURL: "https://target.registry", + TagLatest: false, + } + utils := newImagePushToRegistryMockUtils(craneMockUtils) + err := pushLocalImageToTargetRegistry(config, utils) + assert.NoError(t, err) + }) + + t.Run("bad case - failed to load image", func(t *testing.T) { + t.Parallel() + + craneMockUtils := &dockermock.CraneMockUtils{ + ErrLoadImage: dockermock.ErrLoadImage, + } + config := &imagePushToRegistryOptions{ + PushLocalDockerImage: true, + LocalDockerImagePath: "/image/path", + TargetRegistryURL: "https://target.registry", + TagLatest: false, + } + utils := newImagePushToRegistryMockUtils(craneMockUtils) + err := pushLocalImageToTargetRegistry(config, utils) + assert.EqualError(t, err, "load image err") + }) + + t.Run("bad case - failed to push image", func(t *testing.T) { + t.Parallel() + + craneMockUtils := &dockermock.CraneMockUtils{ + ErrPushImage: dockermock.ErrPushImage, + } + config := &imagePushToRegistryOptions{ + PushLocalDockerImage: true, + LocalDockerImagePath: "/image/path", + TargetRegistryURL: "https://target.registry", + TargetImages: map[string]any{"image1": "my-image"}, + TagLatest: true, + } + utils := newImagePushToRegistryMockUtils(craneMockUtils) + err := pushLocalImageToTargetRegistry(config, utils) + assert.EqualError(t, err, "push image err") + }) +} + +func TestMapSourceTargetImages(t *testing.T) { + expected := map[string]any{ + "img1": "img1", "img2": "img2", + } + sourceImages := []string{"img1", "img2"} + got := mapSourceTargetImages(sourceImages) + assert.Equal(t, got, expected) +} diff --git a/cmd/influxWriteData_generated.go b/cmd/influxWriteData_generated.go index 3d83f5a33e..803c51b3dc 100644 --- a/cmd/influxWriteData_generated.go +++ b/cmd/influxWriteData_generated.go @@ -62,7 +62,7 @@ func InfluxWriteDataCommand() *cobra.Command { log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/integrationArtifactDeploy_generated.go b/cmd/integrationArtifactDeploy_generated.go index f71285ebf1..bd973e31f0 100644 --- a/cmd/integrationArtifactDeploy_generated.go +++ b/cmd/integrationArtifactDeploy_generated.go @@ -58,7 +58,7 @@ func IntegrationArtifactDeployCommand() *cobra.Command { log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/integrationArtifactDownload_generated.go b/cmd/integrationArtifactDownload_generated.go index 776bef048f..7fe0e94995 100644 --- a/cmd/integrationArtifactDownload_generated.go +++ b/cmd/integrationArtifactDownload_generated.go @@ -60,7 +60,7 @@ func IntegrationArtifactDownloadCommand() *cobra.Command { log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/integrationArtifactGetMplStatus_generated.go b/cmd/integrationArtifactGetMplStatus_generated.go index c402abaff8..95b9df203d 100644 --- a/cmd/integrationArtifactGetMplStatus_generated.go +++ b/cmd/integrationArtifactGetMplStatus_generated.go @@ -91,7 +91,7 @@ func IntegrationArtifactGetMplStatusCommand() *cobra.Command { log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/integrationArtifactGetServiceEndpoint_generated.go b/cmd/integrationArtifactGetServiceEndpoint_generated.go index 0c4960d1ac..58450b3d13 100644 --- a/cmd/integrationArtifactGetServiceEndpoint_generated.go +++ b/cmd/integrationArtifactGetServiceEndpoint_generated.go @@ -89,7 +89,7 @@ func IntegrationArtifactGetServiceEndpointCommand() *cobra.Command { log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/integrationArtifactResource_generated.go b/cmd/integrationArtifactResource_generated.go index 99f7f3d008..3694d031cf 100644 --- a/cmd/integrationArtifactResource_generated.go +++ b/cmd/integrationArtifactResource_generated.go @@ -60,7 +60,7 @@ func IntegrationArtifactResourceCommand() *cobra.Command { log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/integrationArtifactTransport_generated.go b/cmd/integrationArtifactTransport_generated.go index 4b580af082..4bb67ed73e 100644 --- a/cmd/integrationArtifactTransport_generated.go +++ b/cmd/integrationArtifactTransport_generated.go @@ -61,7 +61,7 @@ func IntegrationArtifactTransportCommand() *cobra.Command { log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/integrationArtifactTriggerIntegrationTest_generated.go b/cmd/integrationArtifactTriggerIntegrationTest_generated.go index 91a8678c43..349672ff37 100644 --- a/cmd/integrationArtifactTriggerIntegrationTest_generated.go +++ b/cmd/integrationArtifactTriggerIntegrationTest_generated.go @@ -94,7 +94,7 @@ func IntegrationArtifactTriggerIntegrationTestCommand() *cobra.Command { log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/integrationArtifactUnDeploy_generated.go b/cmd/integrationArtifactUnDeploy_generated.go index 6268cd3f11..d6372cca5a 100644 --- a/cmd/integrationArtifactUnDeploy_generated.go +++ b/cmd/integrationArtifactUnDeploy_generated.go @@ -58,7 +58,7 @@ func IntegrationArtifactUnDeployCommand() *cobra.Command { log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/integrationArtifactUpdateConfiguration_generated.go b/cmd/integrationArtifactUpdateConfiguration_generated.go index 2c4758325a..c0742a66b9 100644 --- a/cmd/integrationArtifactUpdateConfiguration_generated.go +++ b/cmd/integrationArtifactUpdateConfiguration_generated.go @@ -61,7 +61,7 @@ func IntegrationArtifactUpdateConfigurationCommand() *cobra.Command { log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/integrationArtifactUpload_generated.go b/cmd/integrationArtifactUpload_generated.go index b8ed6bd737..5a5ac1aba7 100644 --- a/cmd/integrationArtifactUpload_generated.go +++ b/cmd/integrationArtifactUpload_generated.go @@ -61,7 +61,7 @@ func IntegrationArtifactUploadCommand() *cobra.Command { log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/isChangeInDevelopment_generated.go b/cmd/isChangeInDevelopment_generated.go index 2bbff4c257..d2f83100eb 100644 --- a/cmd/isChangeInDevelopment_generated.go +++ b/cmd/isChangeInDevelopment_generated.go @@ -94,7 +94,7 @@ func IsChangeInDevelopmentCommand() *cobra.Command { log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/jsonApplyPatch_generated.go b/cmd/jsonApplyPatch_generated.go index d28c565272..ec6b0a9e15 100644 --- a/cmd/jsonApplyPatch_generated.go +++ b/cmd/jsonApplyPatch_generated.go @@ -59,7 +59,7 @@ This step can, e.g., be used if there is a json schema which needs to be patched log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/kanikoExecute.go b/cmd/kanikoExecute.go index 91469991ce..d18fa13370 100644 --- a/cmd/kanikoExecute.go +++ b/cmd/kanikoExecute.go @@ -226,7 +226,13 @@ func runKanikoExecute(config *kanikoExecuteOptions, telemetryData *telemetry.Cus "--context-sub-path", entry.ContextSubPath, "--destination", fmt.Sprintf("%v/%v", containerRegistry, containerImageNameAndTag), ) - if err = runKaniko(config.DockerfilePath, buildOptions, config.ReadImageDigest, execRunner, fileUtils, commonPipelineEnvironment); err != nil { + + dockerfilePath := config.DockerfilePath + if entry.DockerfilePath != "" { + dockerfilePath = entry.DockerfilePath + } + + if err = runKaniko(dockerfilePath, buildOptions, config.ReadImageDigest, execRunner, fileUtils, commonPipelineEnvironment); err != nil { return fmt.Errorf("multipleImages: failed to build image '%v' using '%v': %w", entry.ContainerImageName, config.DockerfilePath, err) } @@ -251,7 +257,13 @@ func runKanikoExecute(config *kanikoExecuteOptions, telemetryData *telemetry.Cus "--context-sub-path", entry.ContextSubPath, "--destination", entry.ContainerImage, ) - if err = runKaniko(config.DockerfilePath, buildOptions, config.ReadImageDigest, execRunner, fileUtils, commonPipelineEnvironment); err != nil { + + dockerfilePath := config.DockerfilePath + if entry.DockerfilePath != "" { + dockerfilePath = entry.DockerfilePath + } + + if err = runKaniko(dockerfilePath, buildOptions, config.ReadImageDigest, execRunner, fileUtils, commonPipelineEnvironment); err != nil { return fmt.Errorf("multipleImages: failed to build image '%v' using '%v': %w", containerImageName, config.DockerfilePath, err) } @@ -262,8 +274,11 @@ func runKanikoExecute(config *kanikoExecuteOptions, telemetryData *telemetry.Cus } } + // Docker image tags don't allow plus signs in tags, thus replacing with dash + containerImageTag := strings.ReplaceAll(config.ContainerImageTag, "+", "-") + // for compatibility reasons also fill single imageNameTag field with "root" image in commonPipelineEnvironment - containerImageNameAndTag := fmt.Sprintf("%v:%v", config.ContainerImageName, config.ContainerImageTag) + containerImageNameAndTag := fmt.Sprintf("%v:%v", config.ContainerImageName, containerImageTag) commonPipelineEnvironment.container.imageNameTag = containerImageNameAndTag commonPipelineEnvironment.container.registryURL = config.ContainerRegistryURL @@ -405,6 +420,7 @@ func runKaniko(dockerFilepath string, buildOptions []string, readDigest bool, ex type multipleImageConf struct { ContextSubPath string `json:"contextSubPath,omitempty"` + DockerfilePath string `json:"dockerfilePath,omitempty"` ContainerImageName string `json:"containerImageName,omitempty"` ContainerImageTag string `json:"containerImageTag,omitempty"` ContainerImage string `json:"containerImage,omitempty"` diff --git a/cmd/kanikoExecute_generated.go b/cmd/kanikoExecute_generated.go index 28cc101840..aa3c2a3707 100644 --- a/cmd/kanikoExecute_generated.go +++ b/cmd/kanikoExecute_generated.go @@ -230,7 +230,7 @@ Following final image names will be built: log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/kanikoExecute_test.go b/cmd/kanikoExecute_test.go index 0571b4be77..b8aa6483ce 100644 --- a/cmd/kanikoExecute_test.go +++ b/cmd/kanikoExecute_test.go @@ -50,12 +50,12 @@ func TestRunKanikoExecute(t *testing.T) { // required due to config resolution during build settings retrieval // ToDo: proper mocking - openFileBak := configOptions.openFile + openFileBak := configOptions.OpenFile defer func() { - configOptions.openFile = openFileBak + configOptions.OpenFile = openFileBak }() - configOptions.openFile = configOpenFileMock + configOptions.OpenFile = configOpenFileMock t.Run("success case", func(t *testing.T) { config := &kanikoExecuteOptions{ diff --git a/cmd/karmaExecuteTests_generated.go b/cmd/karmaExecuteTests_generated.go index 71a769f369..3ea87312d4 100644 --- a/cmd/karmaExecuteTests_generated.go +++ b/cmd/karmaExecuteTests_generated.go @@ -115,7 +115,7 @@ In the Docker network, the containers can be referenced by the values provided i log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/kubernetesDeploy_generated.go b/cmd/kubernetesDeploy_generated.go index 2dce2d8869..5938aa7678 100644 --- a/cmd/kubernetesDeploy_generated.go +++ b/cmd/kubernetesDeploy_generated.go @@ -116,7 +116,7 @@ helm upgrade --install --force --namespace 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/malwareExecuteScan_generated.go b/cmd/malwareExecuteScan_generated.go index 0f7602c81f..e2dc5df394 100644 --- a/cmd/malwareExecuteScan_generated.go +++ b/cmd/malwareExecuteScan_generated.go @@ -114,7 +114,7 @@ func MalwareExecuteScanCommand() *cobra.Command { log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/mavenBuild.go b/cmd/mavenBuild.go index 81fbb24278..c1e61c0a55 100644 --- a/cmd/mavenBuild.go +++ b/cmd/mavenBuild.go @@ -134,7 +134,10 @@ func runMavenBuild(config *mavenBuildOptions, telemetryData *telemetry.CustomDat mavenOptions.ProjectSettingsFile = projectSettingsFilePath } - deployFlags := []string{"-Dmaven.main.skip=true", "-Dmaven.test.skip=true", "-Dmaven.install.skip=true"} + deployFlags := []string{} + if len(config.DeployFlags) > 0 { + deployFlags = append(deployFlags, config.DeployFlags...) + } if (len(config.AltDeploymentRepositoryID) > 0) && (len(config.AltDeploymentRepositoryURL) > 0) { deployFlags = append(deployFlags, "-DaltDeploymentRepository="+config.AltDeploymentRepositoryID+"::default::"+config.AltDeploymentRepositoryURL) } diff --git a/cmd/mavenBuild_generated.go b/cmd/mavenBuild_generated.go index ba17bd1f8b..11bdc1ea28 100644 --- a/cmd/mavenBuild_generated.go +++ b/cmd/mavenBuild_generated.go @@ -39,6 +39,7 @@ type mavenBuildOptions struct { Publish bool `json:"publish,omitempty"` JavaCaCertFilePath string `json:"javaCaCertFilePath,omitempty"` BuildSettingsInfo string `json:"buildSettingsInfo,omitempty"` + DeployFlags []string `json:"deployFlags,omitempty"` } type mavenBuildCommonPipelineEnvironment struct { @@ -186,7 +187,7 @@ general: log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) @@ -267,6 +268,7 @@ func addMavenBuildFlags(cmd *cobra.Command, stepConfig *mavenBuildOptions) { cmd.Flags().BoolVar(&stepConfig.Publish, "publish", false, "Configures maven to run the deploy plugin to publish artifacts to a repository.") cmd.Flags().StringVar(&stepConfig.JavaCaCertFilePath, "javaCaCertFilePath", os.Getenv("PIPER_javaCaCertFilePath"), "path to the cacerts file used by Java. When maven publish is set to True and customTlsCertificateLinks (to deploy the artifact to a repository with a self signed cert) are provided to trust the self signed certs, Piper will extend the existing Java cacerts to include the new self signed certs. if not provided Piper will search for the cacerts in $JAVA_HOME/jre/lib/security/cacerts") cmd.Flags().StringVar(&stepConfig.BuildSettingsInfo, "buildSettingsInfo", os.Getenv("PIPER_buildSettingsInfo"), "build settings info is typically filled by the step automatically to create information about the build settings that were used during the maven build . This information is typically used for compliance related processes.") + cmd.Flags().StringSliceVar(&stepConfig.DeployFlags, "deployFlags", []string{`-Dmaven.main.skip=true`, `-Dmaven.test.skip=true`, `-Dmaven.install.skip=true`}, "maven deploy flags that will be used when publish is detected.") } @@ -283,6 +285,9 @@ func mavenBuildMetadata() config.StepData { Secrets: []config.StepSecrets{ {Name: "altDeploymentRepositoryPasswordId", Description: "Jenkins credentials ID containing the artifact deployment repository password.", Type: "jenkins"}, }, + Resources: []config.StepResources{ + {Type: "stash"}, + }, Parameters: []config.StepParameters{ { Name: "pomPath", @@ -493,6 +498,15 @@ func mavenBuildMetadata() config.StepData { Aliases: []config.Alias{}, Default: os.Getenv("PIPER_buildSettingsInfo"), }, + { + Name: "deployFlags", + ResourceRef: []config.ResourceReference{}, + Scope: []string{"STEPS", "STAGES", "PARAMETERS"}, + Type: "[]string", + Mandatory: false, + Aliases: []config.Alias{}, + Default: []string{`-Dmaven.main.skip=true`, `-Dmaven.test.skip=true`, `-Dmaven.install.skip=true`}, + }, }, }, Containers: []config.Container{ diff --git a/cmd/mavenBuild_test.go b/cmd/mavenBuild_test.go index a5f2a3fa33..bb0cd11674 100644 --- a/cmd/mavenBuild_test.go +++ b/cmd/mavenBuild_test.go @@ -101,7 +101,7 @@ func TestMavenBuild(t *testing.T) { t.Run("mavenBuild with deploy must skip build, install and test", func(t *testing.T) { mockedUtils := newMavenMockUtils() - config := mavenBuildOptions{Publish: true, Verify: false} + config := mavenBuildOptions{Publish: true, Verify: false, DeployFlags: []string{"-Dmaven.main.skip=true", "-Dmaven.test.skip=true", "-Dmaven.install.skip=true"}} err := runMavenBuild(&config, nil, &mockedUtils, &cpe) diff --git a/cmd/mavenExecuteIntegration_generated.go b/cmd/mavenExecuteIntegration_generated.go index 0a2a87e8a6..6e4e1c20db 100644 --- a/cmd/mavenExecuteIntegration_generated.go +++ b/cmd/mavenExecuteIntegration_generated.go @@ -108,7 +108,7 @@ the integration tests via the Jacoco Maven-plugin.`, log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/mavenExecuteStaticCodeChecks_generated.go b/cmd/mavenExecuteStaticCodeChecks_generated.go index 727724c731..e82bdb07b8 100644 --- a/cmd/mavenExecuteStaticCodeChecks_generated.go +++ b/cmd/mavenExecuteStaticCodeChecks_generated.go @@ -75,7 +75,7 @@ For PMD the failure priority and the max allowed violations are configurable via log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/mavenExecute_generated.go b/cmd/mavenExecute_generated.go index 18079fad2e..b8afc09f5d 100644 --- a/cmd/mavenExecute_generated.go +++ b/cmd/mavenExecute_generated.go @@ -64,7 +64,7 @@ func MavenExecuteCommand() *cobra.Command { log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/metadata_generated.go b/cmd/metadata_generated.go index 05eb2eb6d7..0554696056 100644 --- a/cmd/metadata_generated.go +++ b/cmd/metadata_generated.go @@ -72,6 +72,7 @@ func GetAllStepMetadata() map[string]config.StepData { "gradleExecuteBuild": gradleExecuteBuildMetadata(), "hadolintExecute": hadolintExecuteMetadata(), "helmExecute": helmExecuteMetadata(), + "imagePushToRegistry": imagePushToRegistryMetadata(), "influxWriteData": influxWriteDataMetadata(), "integrationArtifactDeploy": integrationArtifactDeployMetadata(), "integrationArtifactDownload": integrationArtifactDownloadMetadata(), diff --git a/cmd/mtaBuild.go b/cmd/mtaBuild.go index cc3e6e21f6..0982c0cb96 100644 --- a/cmd/mtaBuild.go +++ b/cmd/mtaBuild.go @@ -206,6 +206,10 @@ func runMtaBuild(config mtaBuildOptions, call = append(call, "--source", getSourcePath(config)) call = append(call, "--target", getAbsPath(getMtarFileRoot(config))) + if config.CreateBOM { + call = append(call, "--sbom-file-path", filepath.FromSlash("sbom-gen/bom-mta.xml")) + } + if config.Jobs > 0 { call = append(call, "--mode=verbose") call = append(call, "--jobs="+strconv.Itoa(config.Jobs)) diff --git a/cmd/mtaBuild_generated.go b/cmd/mtaBuild_generated.go index 6d7d665211..76a64e355e 100644 --- a/cmd/mtaBuild_generated.go +++ b/cmd/mtaBuild_generated.go @@ -42,6 +42,7 @@ type mtaBuildOptions struct { Publish bool `json:"publish,omitempty"` Profiles []string `json:"profiles,omitempty"` BuildSettingsInfo string `json:"buildSettingsInfo,omitempty"` + CreateBOM bool `json:"createBOM,omitempty"` } type mtaBuildCommonPipelineEnvironment struct { @@ -159,7 +160,7 @@ func MtaBuildCommand() *cobra.Command { log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) @@ -243,6 +244,7 @@ func addMtaBuildFlags(cmd *cobra.Command, stepConfig *mtaBuildOptions) { cmd.Flags().BoolVar(&stepConfig.Publish, "publish", false, "pushed mtar artifact to altDeploymentRepositoryUrl/altDeploymentRepositoryID when set to true") cmd.Flags().StringSliceVar(&stepConfig.Profiles, "profiles", []string{}, "Defines list of maven build profiles to be used. profiles will overwrite existing values in the global settings xml at $M2_HOME/conf/settings.xml") cmd.Flags().StringVar(&stepConfig.BuildSettingsInfo, "buildSettingsInfo", os.Getenv("PIPER_buildSettingsInfo"), "build settings info is typically filled by the step automatically to create information about the build settings that were used during the mta build . This information is typically used for compliance related processes.") + cmd.Flags().BoolVar(&stepConfig.CreateBOM, "createBOM", false, "Creates the bill of materials (BOM) using CycloneDX plugin.") } @@ -488,6 +490,15 @@ func mtaBuildMetadata() config.StepData { Aliases: []config.Alias{}, Default: os.Getenv("PIPER_buildSettingsInfo"), }, + { + Name: "createBOM", + ResourceRef: []config.ResourceReference{}, + Scope: []string{"GENERAL", "STEPS", "STAGES", "PARAMETERS"}, + Type: "bool", + Mandatory: false, + Aliases: []config.Alias{}, + Default: false, + }, }, }, Containers: []config.Container{ diff --git a/cmd/mtaBuild_test.go b/cmd/mtaBuild_test.go index 291fb84569..a3812900d4 100644 --- a/cmd/mtaBuild_test.go +++ b/cmd/mtaBuild_test.go @@ -289,6 +289,7 @@ func TestMtaBuild(t *testing.T) { func TestMtaBuildSourceDir(t *testing.T) { + cpe := mtaBuildCommonPipelineEnvironment{} t.Run("getSourcePath", func(t *testing.T) { t.Parallel() @@ -328,7 +329,6 @@ func TestMtaBuildSourceDir(t *testing.T) { t.Run("find build tool descriptor from configuration", func(t *testing.T) { t.Parallel() - cpe := mtaBuildCommonPipelineEnvironment{} t.Run("default mta.yaml", func(t *testing.T) { utilsMock := newMtaBuildTestUtilsBundle() @@ -358,6 +358,17 @@ func TestMtaBuildSourceDir(t *testing.T) { }) }) + t.Run("MTA build should enable create BOM", func(t *testing.T) { + utilsMock := newMtaBuildTestUtilsBundle() + + options := mtaBuildOptions{ApplicationName: "myApp", Platform: "CF", DefaultNpmRegistry: "https://example.org/npm", MtarName: "myName", Source: "./", Target: "./", CreateBOM: true} + utilsMock.AddFile("package.json", []byte("{\"name\": \"myName\", \"version\": \"1.2.3\"}")) + + err := runMtaBuild(options, &cpe, utilsMock) + assert.Nil(t, err) + assert.Contains(t, utilsMock.Calls[0].Params, "--sbom-file-path") + + }) } func TestMtaBuildMtar(t *testing.T) { diff --git a/cmd/newmanExecute_generated.go b/cmd/newmanExecute_generated.go index f73777c836..eff6f6f7a6 100644 --- a/cmd/newmanExecute_generated.go +++ b/cmd/newmanExecute_generated.go @@ -142,7 +142,7 @@ func NewmanExecuteCommand() *cobra.Command { log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/nexusUpload_generated.go b/cmd/nexusUpload_generated.go index c65709b69e..d94f9ad8c4 100644 --- a/cmd/nexusUpload_generated.go +++ b/cmd/nexusUpload_generated.go @@ -87,7 +87,7 @@ If an image for mavenExecute is configured, and npm packages are to be published log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/npmExecuteLint_generated.go b/cmd/npmExecuteLint_generated.go index 33afc2a7b4..5e730d6e78 100644 --- a/cmd/npmExecuteLint_generated.go +++ b/cmd/npmExecuteLint_generated.go @@ -62,7 +62,7 @@ either use ESLint configurations present in the project or use the provided gene log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/npmExecuteScripts.go b/cmd/npmExecuteScripts.go index 05a7c8eea5..b3de55b92b 100644 --- a/cmd/npmExecuteScripts.go +++ b/cmd/npmExecuteScripts.go @@ -1,6 +1,8 @@ package cmd import ( + "os" + "github.com/SAP/jenkins-library/pkg/buildsettings" "github.com/SAP/jenkins-library/pkg/log" "github.com/SAP/jenkins-library/pkg/npm" @@ -19,6 +21,11 @@ func npmExecuteScripts(config npmExecuteScriptsOptions, telemetryData *telemetry } func runNpmExecuteScripts(npmExecutor npm.Executor, config *npmExecuteScriptsOptions, commonPipelineEnvironment *npmExecuteScriptsCommonPipelineEnvironment) error { + // setting env. variable to omit installation of dev. dependencies + if config.Production { + os.Setenv("NODE_ENV", "production") + } + if config.Install { if len(config.BuildDescriptorList) > 0 { if err := npmExecutor.InstallAllDependencies(config.BuildDescriptorList); err != nil { @@ -79,14 +86,21 @@ func runNpmExecuteScripts(npmExecutor npm.Executor, config *npmExecuteScriptsOpt commonPipelineEnvironment.custom.buildSettingsInfo = buildSettingsInfo if config.Publish { - packageJSONFiles, err := npmExecutor.FindPackageJSONFilesWithExcludes(config.BuildDescriptorExcludeList) - if err != nil { - return err - } + if len(config.BuildDescriptorList) > 0 { + err = npmExecutor.PublishAllPackages(config.BuildDescriptorList, config.RepositoryURL, config.RepositoryUsername, config.RepositoryPassword, config.PackBeforePublish) + if err != nil { + return err + } + } else { + packageJSONFiles, err := npmExecutor.FindPackageJSONFilesWithExcludes(config.BuildDescriptorExcludeList) + if err != nil { + return err + } - err = npmExecutor.PublishAllPackages(packageJSONFiles, config.RepositoryURL, config.RepositoryUsername, config.RepositoryPassword, config.PackBeforePublish) - if err != nil { - return err + err = npmExecutor.PublishAllPackages(packageJSONFiles, config.RepositoryURL, config.RepositoryUsername, config.RepositoryPassword, config.PackBeforePublish) + if err != nil { + return err + } } } diff --git a/cmd/npmExecuteScripts_generated.go b/cmd/npmExecuteScripts_generated.go index 3cc7b95893..d04256a5cb 100644 --- a/cmd/npmExecuteScripts_generated.go +++ b/cmd/npmExecuteScripts_generated.go @@ -36,6 +36,7 @@ type npmExecuteScriptsOptions struct { RepositoryUsername string `json:"repositoryUsername,omitempty"` BuildSettingsInfo string `json:"buildSettingsInfo,omitempty"` PackBeforePublish bool `json:"packBeforePublish,omitempty"` + Production bool `json:"production,omitempty"` } type npmExecuteScriptsCommonPipelineEnvironment struct { @@ -161,7 +162,7 @@ and are exposed are environment variables that must be present in the environmen log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) @@ -238,7 +239,8 @@ func addNpmExecuteScriptsFlags(cmd *cobra.Command, stepConfig *npmExecuteScripts cmd.Flags().StringVar(&stepConfig.RepositoryPassword, "repositoryPassword", os.Getenv("PIPER_repositoryPassword"), "Password for the repository to which the project artifacts should be published.") cmd.Flags().StringVar(&stepConfig.RepositoryUsername, "repositoryUsername", os.Getenv("PIPER_repositoryUsername"), "Username for the repository to which the project artifacts should be published.") cmd.Flags().StringVar(&stepConfig.BuildSettingsInfo, "buildSettingsInfo", os.Getenv("PIPER_buildSettingsInfo"), "build settings info is typically filled by the step automatically to create information about the build settings that were used during the npm build . This information is typically used for compliance related processes.") - cmd.Flags().BoolVar(&stepConfig.PackBeforePublish, "packBeforePublish", false, "used for executing npm pack first, followed by npm publish. This two step maybe required when you are building a scoped packages and have npm dependencies from the same scope") + cmd.Flags().BoolVar(&stepConfig.PackBeforePublish, "packBeforePublish", false, "used for executing npm pack first, followed by npm publish. This two step maybe required in two cases. case 1) When building multiple npm packages (multiple package.json) please keep this parameter true and also see `buildDescriptorList` or `buildDescriptorExcludeList` to choose which package(s) to publish. case 2)when you are building a single npm (single `package.json` in your repo) / multiple npm (multiple package.json) scoped package(s) and have npm dependencies from the same scope.") + cmd.Flags().BoolVar(&stepConfig.Production, "production", false, "used for omitting installation of dev. dependencies if true") } @@ -417,6 +419,15 @@ func npmExecuteScriptsMetadata() config.StepData { Aliases: []config.Alias{}, Default: false, }, + { + Name: "production", + ResourceRef: []config.ResourceReference{}, + Scope: []string{"STEPS", "STAGES", "PARAMETERS"}, + Type: "bool", + Mandatory: false, + Aliases: []config.Alias{}, + Default: false, + }, }, }, Containers: []config.Container{ diff --git a/cmd/npmExecuteScripts_test.go b/cmd/npmExecuteScripts_test.go index e8c8053b94..c98bf70960 100644 --- a/cmd/npmExecuteScripts_test.go +++ b/cmd/npmExecuteScripts_test.go @@ -4,8 +4,10 @@ package cmd import ( + "os" "testing" + "github.com/SAP/jenkins-library/pkg/config" "github.com/SAP/jenkins-library/pkg/mock" "github.com/SAP/jenkins-library/pkg/npm" "github.com/stretchr/testify/assert" @@ -32,81 +34,105 @@ func TestNpmExecuteScripts(t *testing.T) { cpe := npmExecuteScriptsCommonPipelineEnvironment{} t.Run("Call with packagesList", func(t *testing.T) { - config := npmExecuteScriptsOptions{Install: true, RunScripts: []string{"ci-build", "ci-test"}, BuildDescriptorList: []string{"package.json", "src/package.json"}} + cfg := npmExecuteScriptsOptions{Install: true, RunScripts: []string{"ci-build", "ci-test"}, BuildDescriptorList: []string{"package.json", "src/package.json"}} utils := npm.NewNpmMockUtilsBundle() utils.AddFile("package.json", []byte("{\"name\": \"Test\" }")) utils.AddFile("src/package.json", []byte("{\"name\": \"Test\" }")) - npmExecutor := npm.NpmExecutorMock{Utils: utils, Config: npm.NpmConfig{Install: config.Install, RunScripts: config.RunScripts, PackagesList: config.BuildDescriptorList}} - err := runNpmExecuteScripts(&npmExecutor, &config, &cpe) + SetConfigOptions(ConfigCommandOptions{ + OpenFile: config.OpenPiperFile, + }) + + npmExecutor := npm.NpmExecutorMock{Utils: utils, Config: npm.NpmConfig{Install: cfg.Install, RunScripts: cfg.RunScripts, PackagesList: cfg.BuildDescriptorList}} + err := runNpmExecuteScripts(&npmExecutor, &cfg, &cpe) assert.NoError(t, err) }) t.Run("Call with excludeList", func(t *testing.T) { - config := npmExecuteScriptsOptions{Install: true, RunScripts: []string{"ci-build", "ci-test"}, BuildDescriptorExcludeList: []string{"**/path/**"}} + cfg := npmExecuteScriptsOptions{Install: true, RunScripts: []string{"ci-build", "ci-test"}, BuildDescriptorExcludeList: []string{"**/path/**"}} utils := npm.NewNpmMockUtilsBundle() utils.AddFile("package.json", []byte("{\"name\": \"Test\" }")) utils.AddFile("src/package.json", []byte("{\"name\": \"Test\" }")) - npmExecutor := npm.NpmExecutorMock{Utils: utils, Config: npm.NpmConfig{Install: config.Install, RunScripts: config.RunScripts, ExcludeList: config.BuildDescriptorExcludeList}} - err := runNpmExecuteScripts(&npmExecutor, &config, &cpe) + SetConfigOptions(ConfigCommandOptions{ + OpenFile: config.OpenPiperFile, + }) + + npmExecutor := npm.NpmExecutorMock{Utils: utils, Config: npm.NpmConfig{Install: cfg.Install, RunScripts: cfg.RunScripts, ExcludeList: cfg.BuildDescriptorExcludeList}} + err := runNpmExecuteScripts(&npmExecutor, &cfg, &cpe) assert.NoError(t, err) }) t.Run("Call with scriptOptions", func(t *testing.T) { - config := npmExecuteScriptsOptions{Install: true, RunScripts: []string{"ci-build", "ci-test"}, ScriptOptions: []string{"--run"}} + cfg := npmExecuteScriptsOptions{Install: true, RunScripts: []string{"ci-build", "ci-test"}, ScriptOptions: []string{"--run"}} utils := npm.NewNpmMockUtilsBundle() utils.AddFile("package.json", []byte("{\"name\": \"Test\" }")) utils.AddFile("src/package.json", []byte("{\"name\": \"Test\" }")) - npmExecutor := npm.NpmExecutorMock{Utils: utils, Config: npm.NpmConfig{Install: config.Install, RunScripts: config.RunScripts, ScriptOptions: config.ScriptOptions}} - err := runNpmExecuteScripts(&npmExecutor, &config, &cpe) + SetConfigOptions(ConfigCommandOptions{ + OpenFile: config.OpenPiperFile, + }) + + npmExecutor := npm.NpmExecutorMock{Utils: utils, Config: npm.NpmConfig{Install: cfg.Install, RunScripts: cfg.RunScripts, ScriptOptions: cfg.ScriptOptions}} + err := runNpmExecuteScripts(&npmExecutor, &cfg, &cpe) assert.NoError(t, err) }) t.Run("Call with install", func(t *testing.T) { - config := npmExecuteScriptsOptions{Install: true, RunScripts: []string{"ci-build", "ci-test"}} + cfg := npmExecuteScriptsOptions{Install: true, RunScripts: []string{"ci-build", "ci-test"}} utils := npm.NewNpmMockUtilsBundle() utils.AddFile("package.json", []byte("{\"name\": \"Test\" }")) utils.AddFile("src/package.json", []byte("{\"name\": \"Test\" }")) - npmExecutor := npm.NpmExecutorMock{Utils: utils, Config: npm.NpmConfig{Install: config.Install, RunScripts: config.RunScripts}} - err := runNpmExecuteScripts(&npmExecutor, &config, &cpe) + SetConfigOptions(ConfigCommandOptions{ + OpenFile: config.OpenPiperFile, + }) + + npmExecutor := npm.NpmExecutorMock{Utils: utils, Config: npm.NpmConfig{Install: cfg.Install, RunScripts: cfg.RunScripts}} + err := runNpmExecuteScripts(&npmExecutor, &cfg, &cpe) assert.NoError(t, err) }) t.Run("Call without install", func(t *testing.T) { - config := npmExecuteScriptsOptions{Install: true, RunScripts: []string{"ci-build", "ci-test"}} + cfg := npmExecuteScriptsOptions{Install: true, RunScripts: []string{"ci-build", "ci-test"}} utils := npm.NewNpmMockUtilsBundle() utils.AddFile("package.json", []byte("{\"name\": \"Test\" }")) utils.AddFile("src/package.json", []byte("{\"name\": \"Test\" }")) - npmExecutor := npm.NpmExecutorMock{Utils: utils, Config: npm.NpmConfig{Install: config.Install, RunScripts: config.RunScripts}} - err := runNpmExecuteScripts(&npmExecutor, &config, &cpe) + SetConfigOptions(ConfigCommandOptions{ + OpenFile: config.OpenPiperFile, + }) + + npmExecutor := npm.NpmExecutorMock{Utils: utils, Config: npm.NpmConfig{Install: cfg.Install, RunScripts: cfg.RunScripts}} + err := runNpmExecuteScripts(&npmExecutor, &cfg, &cpe) assert.NoError(t, err) }) t.Run("Call with virtualFrameBuffer", func(t *testing.T) { - config := npmExecuteScriptsOptions{Install: true, RunScripts: []string{"ci-build", "ci-test"}, VirtualFrameBuffer: true} + cfg := npmExecuteScriptsOptions{Install: true, RunScripts: []string{"ci-build", "ci-test"}, VirtualFrameBuffer: true} utils := npm.NewNpmMockUtilsBundle() utils.AddFile("package.json", []byte("{\"name\": \"Test\" }")) utils.AddFile("src/package.json", []byte("{\"name\": \"Test\" }")) - npmExecutor := npm.NpmExecutorMock{Utils: utils, Config: npm.NpmConfig{Install: config.Install, RunScripts: config.RunScripts, VirtualFrameBuffer: config.VirtualFrameBuffer}} - err := runNpmExecuteScripts(&npmExecutor, &config, &cpe) + SetConfigOptions(ConfigCommandOptions{ + OpenFile: config.OpenPiperFile, + }) + + npmExecutor := npm.NpmExecutorMock{Utils: utils, Config: npm.NpmConfig{Install: cfg.Install, RunScripts: cfg.RunScripts, VirtualFrameBuffer: cfg.VirtualFrameBuffer}} + err := runNpmExecuteScripts(&npmExecutor, &cfg, &cpe) assert.NoError(t, err) }) t.Run("Test integration with npm pkg", func(t *testing.T) { - config := npmExecuteScriptsOptions{Install: true, RunScripts: []string{"ci-build"}} + cfg := npmExecuteScriptsOptions{Install: true, RunScripts: []string{"ci-build"}} - options := npm.ExecutorOptions{DefaultNpmRegistry: config.DefaultNpmRegistry} + options := npm.ExecutorOptions{DefaultNpmRegistry: cfg.DefaultNpmRegistry} utils := newNpmMockUtilsBundle() utils.AddFile("package.json", []byte("{\"scripts\": { \"ci-build\": \"\" } }")) @@ -114,7 +140,11 @@ func TestNpmExecuteScripts(t *testing.T) { npmExecutor := npm.Execute{Utils: &utils, Options: options} - err := runNpmExecuteScripts(&npmExecutor, &config, &cpe) + SetConfigOptions(ConfigCommandOptions{ + OpenFile: config.OpenPiperFile, + }) + + err := runNpmExecuteScripts(&npmExecutor, &cfg, &cpe) if assert.NoError(t, err) { if assert.Equal(t, 4, len(utils.execRunner.Calls)) { @@ -126,17 +156,42 @@ func TestNpmExecuteScripts(t *testing.T) { }) t.Run("Call with createBOM", func(t *testing.T) { - config := npmExecuteScriptsOptions{CreateBOM: true, RunScripts: []string{"ci-build", "ci-test"}} + cfg := npmExecuteScriptsOptions{CreateBOM: true, RunScripts: []string{"ci-build", "ci-test"}} - options := npm.ExecutorOptions{DefaultNpmRegistry: config.DefaultNpmRegistry} + options := npm.ExecutorOptions{DefaultNpmRegistry: cfg.DefaultNpmRegistry} utils := newNpmMockUtilsBundle() utils.AddFile("package.json", []byte("{\"name\": \"Test\" }")) utils.AddFile("src/package.json", []byte("{\"name\": \"Test\" }")) + SetConfigOptions(ConfigCommandOptions{ + OpenFile: config.OpenPiperFile, + }) + npmExecutor := npm.Execute{Utils: &utils, Options: options} - err := runNpmExecuteScripts(&npmExecutor, &config, &cpe) + err := runNpmExecuteScripts(&npmExecutor, &cfg, &cpe) + + assert.NoError(t, err) + }) + + t.Run("Call with production", func(t *testing.T) { + cfg := npmExecuteScriptsOptions{Production: true, RunScripts: []string{"ci-build", "ci-test"}} + + options := npm.ExecutorOptions{DefaultNpmRegistry: cfg.DefaultNpmRegistry} + + utils := newNpmMockUtilsBundle() + utils.AddFile("package.json", []byte("{\"name\": \"Test\" }")) + utils.AddFile("src/package.json", []byte("{\"name\": \"Test\" }")) + + SetConfigOptions(ConfigCommandOptions{ + OpenFile: config.OpenPiperFile, + }) + npmExecutor := npm.Execute{Utils: &utils, Options: options} + err := runNpmExecuteScripts(&npmExecutor, &cfg, &cpe) assert.NoError(t, err) + + v := os.Getenv("NODE_ENV") + assert.Equal(t, "production", v) }) } diff --git a/cmd/pipelineCreateScanSummary_generated.go b/cmd/pipelineCreateScanSummary_generated.go index 21db7defce..237686530c 100644 --- a/cmd/pipelineCreateScanSummary_generated.go +++ b/cmd/pipelineCreateScanSummary_generated.go @@ -60,7 +60,7 @@ It is for example used to create a markdown file which can be used to create a G log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/piper.go b/cmd/piper.go index 08d0464bf9..9d440be570 100644 --- a/cmd/piper.go +++ b/cmd/piper.go @@ -201,6 +201,7 @@ func Execute() { rootCmd.AddCommand(TmsExportCommand()) rootCmd.AddCommand(IntegrationArtifactTransportCommand()) rootCmd.AddCommand(AscAppUploadCommand()) + rootCmd.AddCommand(ImagePushToRegistryCommand()) addRootFlags(rootCmd) diff --git a/cmd/protecodeExecuteScan_generated.go b/cmd/protecodeExecuteScan_generated.go index 52420897d4..de659bb299 100644 --- a/cmd/protecodeExecuteScan_generated.go +++ b/cmd/protecodeExecuteScan_generated.go @@ -187,7 +187,7 @@ BDBA (Protecode) uses a combination of static binary analysis techniques to X-ra log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/pythonBuild_generated.go b/cmd/pythonBuild_generated.go index d61770135e..66d532ef3f 100644 --- a/cmd/pythonBuild_generated.go +++ b/cmd/pythonBuild_generated.go @@ -96,7 +96,7 @@ func PythonBuildCommand() *cobra.Command { log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/shellExecute_generated.go b/cmd/shellExecute_generated.go index 7ac0a335e8..6a59375810 100644 --- a/cmd/shellExecute_generated.go +++ b/cmd/shellExecute_generated.go @@ -59,7 +59,7 @@ func ShellExecuteCommand() *cobra.Command { log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/sonarExecuteScan_generated.go b/cmd/sonarExecuteScan_generated.go index 7ab6195c6b..83750660d5 100644 --- a/cmd/sonarExecuteScan_generated.go +++ b/cmd/sonarExecuteScan_generated.go @@ -180,7 +180,7 @@ func SonarExecuteScanCommand() *cobra.Command { log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/terraformExecute_generated.go b/cmd/terraformExecute_generated.go index e41d9103a7..f72d11a84d 100644 --- a/cmd/terraformExecute_generated.go +++ b/cmd/terraformExecute_generated.go @@ -94,7 +94,7 @@ func TerraformExecuteCommand() *cobra.Command { log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/tmsExport.go b/cmd/tmsExport.go index 1def2705b0..ec9c4ed822 100644 --- a/cmd/tmsExport.go +++ b/cmd/tmsExport.go @@ -28,7 +28,7 @@ func tmsExport(exportConfig tmsExportOptions, telemetryData *telemetry.CustomDat func runTmsExport(exportConfig tmsExportOptions, communicationInstance tms.CommunicationInterface, utils tms.TmsUtils) error { config := convertExportOptions(exportConfig) - fileId, errUploadFile := tms.UploadFile(config, communicationInstance, utils) + fileInfo, errUploadFile := tms.UploadFile(config, communicationInstance, utils) if errUploadFile != nil { return errUploadFile } @@ -38,7 +38,7 @@ func runTmsExport(exportConfig tmsExportOptions, communicationInstance tms.Commu return errUploadDescriptors } - _, errExportFileToNode := communicationInstance.ExportFileToNode(config.NodeName, fileId, config.CustomDescription, config.NamedUser) + _, errExportFileToNode := communicationInstance.ExportFileToNode(fileInfo, config.NodeName, config.CustomDescription, config.NamedUser) if errExportFileToNode != nil { log.SetErrorCategory(log.ErrorService) return fmt.Errorf("failed to export file to node: %w", errExportFileToNode) @@ -49,7 +49,11 @@ func runTmsExport(exportConfig tmsExportOptions, communicationInstance tms.Commu func convertExportOptions(exportConfig tmsExportOptions) tms.Options { var config tms.Options - config.TmsServiceKey = exportConfig.TmsServiceKey + config.ServiceKey = exportConfig.ServiceKey + if exportConfig.ServiceKey == "" && exportConfig.TmsServiceKey != "" { + config.ServiceKey = exportConfig.TmsServiceKey + log.Entry().Warn("DEPRECATION WARNING: The tmsServiceKey parameter has been deprecated, please use the serviceKey parameter instead.") + } config.CustomDescription = exportConfig.CustomDescription if config.CustomDescription == "" { config.CustomDescription = tms.DEFAULT_TR_DESCRIPTION diff --git a/cmd/tmsExport_generated.go b/cmd/tmsExport_generated.go index 33b0855bd1..1379ce1271 100644 --- a/cmd/tmsExport_generated.go +++ b/cmd/tmsExport_generated.go @@ -19,6 +19,7 @@ import ( type tmsExportOptions struct { TmsServiceKey string `json:"tmsServiceKey,omitempty"` + ServiceKey string `json:"serviceKey,omitempty"` CustomDescription string `json:"customDescription,omitempty"` NamedUser string `json:"namedUser,omitempty"` NodeName string `json:"nodeName,omitempty"` @@ -83,7 +84,7 @@ For more information, see [official documentation of SAP Cloud Transport Managem !!! note "Prerequisites" * You have subscribed to and set up TMS, as described in [Initial Setup](https://help.sap.com/viewer/7f7160ec0d8546c6b3eab72fb5ad6fd8/Cloud/en-US/66fd7283c62f48adb23c56fb48c84a60.html), which includes the configuration of your transport landscape. -* A corresponding service key has been created, as described in [Set Up the Environment to Transport Content Archives directly in an Application](https://help.sap.com/viewer/7f7160ec0d8546c6b3eab72fb5ad6fd8/Cloud/en-US/8d9490792ed14f1bbf8a6ac08a6bca64.html). This service key (JSON) must be stored as a secret text within the Jenkins secure store or provided as value of tmsServiceKey parameter.`, +* A corresponding service key has been created, as described in [Set Up the Environment to Transport Content Archives directly in an Application](https://help.sap.com/viewer/7f7160ec0d8546c6b3eab72fb5ad6fd8/Cloud/en-US/8d9490792ed14f1bbf8a6ac08a6bca64.html). This service key (JSON) must be stored as a secret text within the Jenkins secure store or provided as value of serviceKey parameter.`, PreRunE: func(cmd *cobra.Command, _ []string) error { startTime = time.Now() log.SetStepName(STEP_NAME) @@ -101,13 +102,14 @@ For more information, see [official documentation of SAP Cloud Transport Managem return err } log.RegisterSecret(stepConfig.TmsServiceKey) + log.RegisterSecret(stepConfig.ServiceKey) if len(GeneralConfig.HookConfig.SentryConfig.Dsn) > 0 { sentryHook := log.NewSentryHook(GeneralConfig.HookConfig.SentryConfig.Dsn, GeneralConfig.CorrelationID) log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) @@ -170,7 +172,8 @@ For more information, see [official documentation of SAP Cloud Transport Managem } func addTmsExportFlags(cmd *cobra.Command, stepConfig *tmsExportOptions) { - cmd.Flags().StringVar(&stepConfig.TmsServiceKey, "tmsServiceKey", os.Getenv("PIPER_tmsServiceKey"), "Service key JSON string to access the SAP Cloud Transport Management service instance APIs. If not specified and if pipeline is running on Jenkins, service key, stored under ID provided with credentialsId parameter, is used.") + cmd.Flags().StringVar(&stepConfig.TmsServiceKey, "tmsServiceKey", os.Getenv("PIPER_tmsServiceKey"), "DEPRECATION WARNING: This parameter has been deprecated, please use the serviceKey parameter instead, which supports both service key for TMS (SAP Cloud Transport Management service), as well as service key for CALM (SAP Cloud Application Lifecycle Management) service.\nService key JSON string to access the SAP Cloud Transport Management service instance APIs.\n") + cmd.Flags().StringVar(&stepConfig.ServiceKey, "serviceKey", os.Getenv("PIPER_serviceKey"), "Service key JSON string to access TMS (SAP Cloud Transport Management service) instance APIs. This can be a service key for TMS, or a service key for CALM (SAP Cloud Application Lifecycle Management) service. If not specified and if pipeline is running on Jenkins, service key, stored under ID provided with credentialsId parameter, is used.\n") cmd.Flags().StringVar(&stepConfig.CustomDescription, "customDescription", os.Getenv("PIPER_customDescription"), "Can be used as the description of a transport request. Will overwrite the default, which is corresponding Git commit ID.") cmd.Flags().StringVar(&stepConfig.NamedUser, "namedUser", `Piper-Pipeline`, "Defines the named user to execute transport request with. The default value is 'Piper-Pipeline'. If pipeline is running on Jenkins, the name of the user, who started the job, is tried to be used at first.") cmd.Flags().StringVar(&stepConfig.NodeName, "nodeName", os.Getenv("PIPER_nodeName"), "Defines the name of the export node - starting node in TMS landscape. The transport request is added to the queues of the follow-on nodes of export node.") @@ -179,7 +182,7 @@ func addTmsExportFlags(cmd *cobra.Command, stepConfig *tmsExportOptions) { cmd.Flags().StringVar(&stepConfig.Proxy, "proxy", os.Getenv("PIPER_proxy"), "Proxy URL which should be used for communication with the SAP Cloud Transport Management service backend.") - cmd.MarkFlagRequired("tmsServiceKey") + cmd.MarkFlagRequired("serviceKey") cmd.MarkFlagRequired("nodeName") } @@ -194,18 +197,27 @@ func tmsExportMetadata() config.StepData { Spec: config.StepSpec{ Inputs: config.StepInputs{ Secrets: []config.StepSecrets{ - {Name: "credentialsId", Description: "Jenkins 'Secret text' credentials ID containing service key for SAP Cloud Transport Management service.", Type: "jenkins"}, + {Name: "credentialsId", Description: "Jenkins 'Secret text' credentials ID containing service key for TMS (SAP Cloud Transport Management service) or CALM (SAP Cloud Application Lifecycle Management) service.", Type: "jenkins"}, }, Resources: []config.StepResources{ {Name: "buildResult", Type: "stash"}, }, Parameters: []config.StepParameters{ { - Name: "tmsServiceKey", + Name: "tmsServiceKey", + ResourceRef: []config.ResourceReference{}, + Scope: []string{"PARAMETERS", "STEPS", "STAGES"}, + Type: "string", + Mandatory: false, + Aliases: []config.Alias{}, + Default: os.Getenv("PIPER_tmsServiceKey"), + }, + { + Name: "serviceKey", ResourceRef: []config.ResourceReference{ { Name: "credentialsId", - Param: "tmsServiceKey", + Param: "serviceKey", Type: "secret", }, }, @@ -213,7 +225,7 @@ func tmsExportMetadata() config.StepData { Type: "string", Mandatory: true, Aliases: []config.Alias{}, - Default: os.Getenv("PIPER_tmsServiceKey"), + Default: os.Getenv("PIPER_serviceKey"), }, { Name: "customDescription", diff --git a/cmd/tmsExport_test.go b/cmd/tmsExport_test.go index 76a759637b..72e43967b7 100644 --- a/cmd/tmsExport_test.go +++ b/cmd/tmsExport_test.go @@ -27,7 +27,8 @@ func newTmsExportTestsUtils() tmsExportMockUtils { return utils } -func (cim *communicationInstanceMock) ExportFileToNode(nodeName, fileId, description, namedUser string) (tms.NodeUploadResponseEntity, error) { +func (cim *communicationInstanceMock) ExportFileToNode(fileInfo tms.FileInfo, nodeName, description, namedUser string) (tms.NodeUploadResponseEntity, error) { + fileId := strconv.FormatInt(fileInfo.Id, 10) var nodeUploadResponseEntity tms.NodeUploadResponseEntity if description != CUSTOM_DESCRIPTION || nodeName != NODE_NAME || fileId != strconv.FormatInt(FILE_ID, 10) || namedUser != NAMED_USER { return nodeUploadResponseEntity, errors.New(INVALID_INPUT_MSG) @@ -149,3 +150,50 @@ func TestRunTmsExport(t *testing.T) { assert.EqualError(t, err, "failed to export file to node: Something went wrong on exporting file to node") }) } + +func Test_convertExportOptions(t *testing.T) { + t.Parallel() + mockServiceKey := `no real serviceKey json necessary for these tests` + + t.Run("Use of new serviceKey parameter works", func(t *testing.T) { + t.Parallel() + + // init + config := tmsExportOptions{ServiceKey: mockServiceKey} + wantOptions := tms.Options{ServiceKey: mockServiceKey, CustomDescription: "Created by Piper"} + + // test + gotOptions := convertExportOptions(config) + + // assert + assert.Equal(t, wantOptions, gotOptions) + }) + + t.Run("Use of old tmsServiceKey parameter works as well", func(t *testing.T) { + t.Parallel() + + // init + config := tmsExportOptions{TmsServiceKey: mockServiceKey} + wantOptions := tms.Options{ServiceKey: mockServiceKey, CustomDescription: "Created by Piper"} + + // test + gotOptions := convertExportOptions(config) + + // assert + assert.Equal(t, wantOptions, gotOptions) + }) + + t.Run("Use of both tmsServiceKey and serviceKey parameter favors the new serviceKey parameter", func(t *testing.T) { + t.Parallel() + + // init + config := tmsExportOptions{ServiceKey: mockServiceKey, TmsServiceKey: "some other string"} + wantOptions := tms.Options{ServiceKey: mockServiceKey, CustomDescription: "Created by Piper"} + + // test + gotOptions := convertExportOptions(config) + + // assert + assert.Equal(t, wantOptions, gotOptions) + }) +} diff --git a/cmd/tmsUpload.go b/cmd/tmsUpload.go index 3fc053dac3..9fff8f938e 100644 --- a/cmd/tmsUpload.go +++ b/cmd/tmsUpload.go @@ -21,7 +21,7 @@ func tmsUpload(uploadConfig tmsUploadOptions, telemetryData *telemetry.CustomDat func runTmsUpload(uploadConfig tmsUploadOptions, communicationInstance tms.CommunicationInterface, utils tms.TmsUtils) error { config := convertUploadOptions(uploadConfig) - fileId, errUploadFile := tms.UploadFile(config, communicationInstance, utils) + fileInfo, errUploadFile := tms.UploadFile(config, communicationInstance, utils) if errUploadFile != nil { return errUploadFile } @@ -31,7 +31,7 @@ func runTmsUpload(uploadConfig tmsUploadOptions, communicationInstance tms.Commu return errUploadDescriptors } - _, errUploadFileToNode := communicationInstance.UploadFileToNode(config.NodeName, fileId, config.CustomDescription, config.NamedUser) + _, errUploadFileToNode := communicationInstance.UploadFileToNode(fileInfo, config.NodeName, config.CustomDescription, config.NamedUser) if errUploadFileToNode != nil { log.SetErrorCategory(log.ErrorService) return fmt.Errorf("failed to upload file to node: %w", errUploadFileToNode) @@ -42,7 +42,11 @@ func runTmsUpload(uploadConfig tmsUploadOptions, communicationInstance tms.Commu func convertUploadOptions(uploadConfig tmsUploadOptions) tms.Options { var config tms.Options - config.TmsServiceKey = uploadConfig.TmsServiceKey + config.ServiceKey = uploadConfig.ServiceKey + if uploadConfig.ServiceKey == "" && uploadConfig.TmsServiceKey != "" { + config.ServiceKey = uploadConfig.TmsServiceKey + log.Entry().Warn("DEPRECATION WARNING: The tmsServiceKey parameter has been deprecated, please use the serviceKey parameter instead.") + } config.CustomDescription = uploadConfig.CustomDescription if config.CustomDescription == "" { config.CustomDescription = tms.DEFAULT_TR_DESCRIPTION diff --git a/cmd/tmsUpload_generated.go b/cmd/tmsUpload_generated.go index c76a1365f2..547e3bfcf5 100644 --- a/cmd/tmsUpload_generated.go +++ b/cmd/tmsUpload_generated.go @@ -19,6 +19,7 @@ import ( type tmsUploadOptions struct { TmsServiceKey string `json:"tmsServiceKey,omitempty"` + ServiceKey string `json:"serviceKey,omitempty"` CustomDescription string `json:"customDescription,omitempty"` NamedUser string `json:"namedUser,omitempty"` NodeName string `json:"nodeName,omitempty"` @@ -84,7 +85,7 @@ For more information, see [official documentation of SAP Cloud Transport Managem !!! note "Prerequisites" * You have subscribed to and set up TMS, as described in [Initial Setup](https://help.sap.com/viewer/7f7160ec0d8546c6b3eab72fb5ad6fd8/Cloud/en-US/66fd7283c62f48adb23c56fb48c84a60.html), which includes the configuration of a node to be used for uploading an MTA file. -* A corresponding service key has been created, as described in [Set Up the Environment to Transport Content Archives directly in an Application](https://help.sap.com/viewer/7f7160ec0d8546c6b3eab72fb5ad6fd8/Cloud/en-US/8d9490792ed14f1bbf8a6ac08a6bca64.html). This service key (JSON) must be stored as a secret text within the Jenkins secure store or provided as value of tmsServiceKey parameter.`, +* A corresponding service key has been created, as described in [Set Up the Environment to Transport Content Archives directly in an Application](https://help.sap.com/viewer/7f7160ec0d8546c6b3eab72fb5ad6fd8/Cloud/en-US/8d9490792ed14f1bbf8a6ac08a6bca64.html). This service key (JSON) must be stored as a secret text within the Jenkins secure store or provided as value of serviceKey parameter.`, PreRunE: func(cmd *cobra.Command, _ []string) error { startTime = time.Now() log.SetStepName(STEP_NAME) @@ -102,13 +103,14 @@ For more information, see [official documentation of SAP Cloud Transport Managem return err } log.RegisterSecret(stepConfig.TmsServiceKey) + log.RegisterSecret(stepConfig.ServiceKey) if len(GeneralConfig.HookConfig.SentryConfig.Dsn) > 0 { sentryHook := log.NewSentryHook(GeneralConfig.HookConfig.SentryConfig.Dsn, GeneralConfig.CorrelationID) log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) @@ -171,7 +173,8 @@ For more information, see [official documentation of SAP Cloud Transport Managem } func addTmsUploadFlags(cmd *cobra.Command, stepConfig *tmsUploadOptions) { - cmd.Flags().StringVar(&stepConfig.TmsServiceKey, "tmsServiceKey", os.Getenv("PIPER_tmsServiceKey"), "Service key JSON string to access the SAP Cloud Transport Management service instance APIs. If not specified and if pipeline is running on Jenkins, service key, stored under ID provided with credentialsId parameter, is used.") + cmd.Flags().StringVar(&stepConfig.TmsServiceKey, "tmsServiceKey", os.Getenv("PIPER_tmsServiceKey"), "DEPRECATION WARNING: This parameter has been deprecated, please use the serviceKey parameter instead, which supports both service key for TMS (SAP Cloud Transport Management service), as well as service key for CALM (SAP Cloud Application Lifecycle Management) service.\nService key JSON string to access the SAP Cloud Transport Management service instance APIs.\n") + cmd.Flags().StringVar(&stepConfig.ServiceKey, "serviceKey", os.Getenv("PIPER_serviceKey"), "Service key JSON string to access TMS (SAP Cloud Transport Management service) instance APIs. This can be a service key for TMS, or a service key for CALM (SAP Cloud Application Lifecycle Management) service. If not specified and if pipeline is running on Jenkins, service key, stored under ID provided with credentialsId parameter, is used.\n") cmd.Flags().StringVar(&stepConfig.CustomDescription, "customDescription", os.Getenv("PIPER_customDescription"), "Can be used as the description of a transport request. Will overwrite the default, which is corresponding Git commit ID.") cmd.Flags().StringVar(&stepConfig.NamedUser, "namedUser", `Piper-Pipeline`, "Defines the named user to execute transport request with. The default value is 'Piper-Pipeline'. If pipeline is running on Jenkins, the name of the user, who started the job, is tried to be used at first.") cmd.Flags().StringVar(&stepConfig.NodeName, "nodeName", os.Getenv("PIPER_nodeName"), "Defines the name of the node to which the *.mtar file should be uploaded.") @@ -181,7 +184,7 @@ func addTmsUploadFlags(cmd *cobra.Command, stepConfig *tmsUploadOptions) { cmd.Flags().StringVar(&stepConfig.Proxy, "proxy", os.Getenv("PIPER_proxy"), "Proxy URL which should be used for communication with the SAP Cloud Transport Management service backend.") cmd.Flags().StringSliceVar(&stepConfig.StashContent, "stashContent", []string{`buildResult`}, "If specific stashes should be considered during Jenkins execution, their names need to be passed as a list via this parameter, e.g. stashContent: [\"deployDescriptor\", \"buildResult\"]. By default, the build result is considered.") - cmd.MarkFlagRequired("tmsServiceKey") + cmd.MarkFlagRequired("serviceKey") cmd.MarkFlagRequired("nodeName") } @@ -196,18 +199,27 @@ func tmsUploadMetadata() config.StepData { Spec: config.StepSpec{ Inputs: config.StepInputs{ Secrets: []config.StepSecrets{ - {Name: "credentialsId", Description: "Jenkins 'Secret text' credentials ID containing service key for SAP Cloud Transport Management service.", Type: "jenkins"}, + {Name: "credentialsId", Description: "Jenkins 'Secret text' credentials ID containing service key for TMS (SAP Cloud Transport Management service) or CALM (SAP Cloud Application Lifecycle Management) service.", Type: "jenkins"}, }, Resources: []config.StepResources{ {Name: "buildResult", Type: "stash"}, }, Parameters: []config.StepParameters{ { - Name: "tmsServiceKey", + Name: "tmsServiceKey", + ResourceRef: []config.ResourceReference{}, + Scope: []string{"PARAMETERS", "STEPS", "STAGES"}, + Type: "string", + Mandatory: false, + Aliases: []config.Alias{}, + Default: os.Getenv("PIPER_tmsServiceKey"), + }, + { + Name: "serviceKey", ResourceRef: []config.ResourceReference{ { Name: "credentialsId", - Param: "tmsServiceKey", + Param: "serviceKey", Type: "secret", }, }, @@ -215,7 +227,7 @@ func tmsUploadMetadata() config.StepData { Type: "string", Mandatory: true, Aliases: []config.Alias{}, - Default: os.Getenv("PIPER_tmsServiceKey"), + Default: os.Getenv("PIPER_serviceKey"), }, { Name: "customDescription", diff --git a/cmd/tmsUpload_test.go b/cmd/tmsUpload_test.go index d263db4538..40a1c0f63a 100644 --- a/cmd/tmsUpload_test.go +++ b/cmd/tmsUpload_test.go @@ -134,7 +134,8 @@ func (cim *communicationInstanceMock) UploadFile(file, namedUser string) (tms.Fi } } -func (cim *communicationInstanceMock) UploadFileToNode(nodeName, fileId, description, namedUser string) (tms.NodeUploadResponseEntity, error) { +func (cim *communicationInstanceMock) UploadFileToNode(fileInfo tms.FileInfo, nodeName, description, namedUser string) (tms.NodeUploadResponseEntity, error) { + fileId := strconv.FormatInt(fileInfo.Id, 10) var nodeUploadResponseEntity tms.NodeUploadResponseEntity if description != CUSTOM_DESCRIPTION || nodeName != NODE_NAME || fileId != strconv.FormatInt(FILE_ID, 10) || namedUser != NAMED_USER { return nodeUploadResponseEntity, errors.New(INVALID_INPUT_MSG) @@ -505,3 +506,50 @@ func TestRunTmsUpload(t *testing.T) { assert.EqualError(t, err, "failed to upload file to node: Something went wrong on uploading file to node") }) } + +func Test_convertUploadOptions(t *testing.T) { + t.Parallel() + mockServiceKey := `no real serviceKey json necessary for these tests` + + t.Run("Use of new serviceKey parameter works", func(t *testing.T) { + t.Parallel() + + // init + config := tmsUploadOptions{ServiceKey: mockServiceKey} + wantOptions := tms.Options{ServiceKey: mockServiceKey, CustomDescription: "Created by Piper"} + + // test + gotOptions := convertUploadOptions(config) + + // assert + assert.Equal(t, wantOptions, gotOptions) + }) + + t.Run("Use of old tmsServiceKey parameter works as well", func(t *testing.T) { + t.Parallel() + + // init + config := tmsUploadOptions{TmsServiceKey: mockServiceKey} + wantOptions := tms.Options{ServiceKey: mockServiceKey, CustomDescription: "Created by Piper"} + + // test + gotOptions := convertUploadOptions(config) + + // assert + assert.Equal(t, wantOptions, gotOptions) + }) + + t.Run("Use of both tmsServiceKey and serviceKey parameter favors the new serviceKey parameter", func(t *testing.T) { + t.Parallel() + + // init + config := tmsUploadOptions{ServiceKey: mockServiceKey, TmsServiceKey: "some other string"} + wantOptions := tms.Options{ServiceKey: mockServiceKey, CustomDescription: "Created by Piper"} + + // test + gotOptions := convertUploadOptions(config) + + // assert + assert.Equal(t, wantOptions, gotOptions) + }) +} diff --git a/cmd/transportRequestDocIDFromGit_generated.go b/cmd/transportRequestDocIDFromGit_generated.go index 08c3ef6a10..94504474fb 100644 --- a/cmd/transportRequestDocIDFromGit_generated.go +++ b/cmd/transportRequestDocIDFromGit_generated.go @@ -90,7 +90,7 @@ It is primarily made for the transportRequestUploadSOLMAN step to provide the ch log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/transportRequestReqIDFromGit_generated.go b/cmd/transportRequestReqIDFromGit_generated.go index cfd43bb2b5..7b91a5e6d1 100644 --- a/cmd/transportRequestReqIDFromGit_generated.go +++ b/cmd/transportRequestReqIDFromGit_generated.go @@ -90,7 +90,7 @@ It is primarily made for the transport request upload steps to provide the trans log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/transportRequestUploadCTS_generated.go b/cmd/transportRequestUploadCTS_generated.go index 5ba35df1d4..d537ece756 100644 --- a/cmd/transportRequestUploadCTS_generated.go +++ b/cmd/transportRequestUploadCTS_generated.go @@ -101,7 +101,7 @@ It processes the results of the ` + "`" + `ui5 build` + "`" + ` command of the S log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/transportRequestUploadRFC_generated.go b/cmd/transportRequestUploadRFC_generated.go index 7ac8f0330b..e9514c38ee 100644 --- a/cmd/transportRequestUploadRFC_generated.go +++ b/cmd/transportRequestUploadRFC_generated.go @@ -101,7 +101,7 @@ func TransportRequestUploadRFCCommand() *cobra.Command { log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/transportRequestUploadSOLMAN_generated.go b/cmd/transportRequestUploadSOLMAN_generated.go index 3e6329046e..4885836890 100644 --- a/cmd/transportRequestUploadSOLMAN_generated.go +++ b/cmd/transportRequestUploadSOLMAN_generated.go @@ -100,7 +100,7 @@ The application ID specifies how the file needs to be handled on server side.`, log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/uiVeri5ExecuteTests_generated.go b/cmd/uiVeri5ExecuteTests_generated.go index 0c49a522d5..b1e173a3fb 100644 --- a/cmd/uiVeri5ExecuteTests_generated.go +++ b/cmd/uiVeri5ExecuteTests_generated.go @@ -103,7 +103,7 @@ func UiVeri5ExecuteTestsCommand() *cobra.Command { log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/vaultRotateSecretId_generated.go b/cmd/vaultRotateSecretId_generated.go index eaadc9fa8f..da9a1407c0 100644 --- a/cmd/vaultRotateSecretId_generated.go +++ b/cmd/vaultRotateSecretId_generated.go @@ -77,7 +77,7 @@ func VaultRotateSecretIdCommand() *cobra.Command { log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/cmd/whitesourceExecuteScan.go b/cmd/whitesourceExecuteScan.go index 11b1c42051..7a195f0d0a 100644 --- a/cmd/whitesourceExecuteScan.go +++ b/cmd/whitesourceExecuteScan.go @@ -18,6 +18,7 @@ import ( "github.com/SAP/jenkins-library/pkg/command" "github.com/SAP/jenkins-library/pkg/format" + "github.com/SAP/jenkins-library/pkg/golang" "github.com/SAP/jenkins-library/pkg/log" "github.com/SAP/jenkins-library/pkg/npm" "github.com/SAP/jenkins-library/pkg/piperutils" @@ -157,6 +158,13 @@ func whitesourceExecuteScan(config ScanOptions, _ *telemetry.CustomData, commonP } func runWhitesourceExecuteScan(ctx context.Context, config *ScanOptions, scan *ws.Scan, utils whitesourceUtils, sys whitesource, commonPipelineEnvironment *whitesourceExecuteScanCommonPipelineEnvironment, influx *whitesourceExecuteScanInflux) error { + if config != nil && config.PrivateModules != "" && config.PrivateModulesGitToken != "" { + //configuring go private packages + if err := golang.PrepareGolangPrivatePackages("WhitesourceExecuteStep", config.PrivateModules, config.PrivateModulesGitToken); err != nil { + log.Entry().Warningf("couldn't set private packages for golang, error: %s", err.Error()) + } + } + if err := resolveAggregateProjectName(config, scan, sys); err != nil { return errors.Wrapf(err, "failed to resolve and aggregate project name") } @@ -516,13 +524,6 @@ func checkPolicyViolations(ctx context.Context, config *ScanOptions, scan *ws.Sc return piperutils.Path{}, fmt.Errorf("failed to retrieve project policy alerts from WhiteSource: %w", err) } - // TODO add ignored alerts to list of all alerts - _, err = sys.GetProjectIgnoredAlertsByType(project.Token, "REJECTED_BY_POLICY_RESOURCE") - if err != nil { - return piperutils.Path{}, fmt.Errorf("failed to retrieve project policy ignored alerts from WhiteSource: %w", err) - } - // alerts = append(alerts, ignoredAlerts...) - policyViolationCount += len(alerts) allAlerts = append(allAlerts, alerts...) } @@ -758,7 +759,11 @@ func reportGitHubIssuesAndCreateReports( reportPaths = append(reportPaths, paths...) - sarif := ws.CreateSarifResultFile(scan, &allAlerts) + combinedAlerts := make([]ws.Alert, 0, len(allAlerts)+len(allAssessedAlerts)) + combinedAlerts = append(combinedAlerts, allAlerts...) + combinedAlerts = append(combinedAlerts, allAssessedAlerts...) + + sarif := ws.CreateSarifResultFile(scan, &combinedAlerts) paths, err = ws.WriteSarifFile(sarif, utils) if err != nil { errorsOccured = append(errorsOccured, fmt.Sprint(err)) @@ -808,18 +813,15 @@ func readAssessmentsFromFile(assessmentFilePath string, utils whitesourceUtils) // checkSecurityViolations checks security violations and returns an error if the configured severity limit is crossed. Besides the potential error the list of unassessed and assessed alerts are being returned to allow generating reports and issues from the data. func checkProjectSecurityViolations(config *ScanOptions, cvssSeverityLimit float64, project ws.Project, sys whitesource, assessments *[]format.Assessment, influx *whitesourceExecuteScanInflux) (int, []ws.Alert, []ws.Alert, error) { // get project alerts (vulnerabilities) - assessedAlerts := []ws.Alert{} alerts, err := sys.GetProjectAlertsByType(project.Token, "SECURITY_VULNERABILITY") if err != nil { - return 0, alerts, assessedAlerts, fmt.Errorf("failed to retrieve project alerts from WhiteSource: %w", err) + return 0, alerts, []ws.Alert{}, fmt.Errorf("failed to retrieve project alerts from WhiteSource: %w", err) } - // TODO add ignored alerts to list of all alerts - _, err = sys.GetProjectIgnoredAlertsByType(project.Token, "SECURITY_VULNERABILITY") + assessedAlerts, err := sys.GetProjectIgnoredAlertsByType(project.Token, "SECURITY_VULNERABILITY") if err != nil { - return 0, alerts, assessedAlerts, fmt.Errorf("failed to retrieve project ignored alerts from WhiteSource: %w", err) + return 0, alerts, []ws.Alert{}, fmt.Errorf("failed to retrieve project ignored alerts from WhiteSource: %w", err) } - // alerts = append(alerts, ignoredAlerts...) // filter alerts related to existing assessments filteredAlerts := []ws.Alert{} @@ -907,13 +909,6 @@ func aggregateVersionWideVulnerabilities(config *ScanOptions, utils whitesourceU return errors.Wrapf(err, "failed to get project alerts by type") } - // TODO add ignored alerts to list of all alerts - _, err = sys.GetProjectIgnoredAlertsByType(project.Token, "SECURITY_VULNERABILITY") - if err != nil { - return errors.Wrapf(err, "failed to get project ignored alerts by type") - } - // alerts = append(alerts, ignoredAlerts...) - log.Entry().Infof("Found project: %s with %v vulnerabilities.", project.Name, len(alerts)) versionWideAlerts = append(versionWideAlerts, alerts...) } diff --git a/cmd/whitesourceExecuteScan_generated.go b/cmd/whitesourceExecuteScan_generated.go index 673f7ee6d0..d034bf7f66 100644 --- a/cmd/whitesourceExecuteScan_generated.go +++ b/cmd/whitesourceExecuteScan_generated.go @@ -74,6 +74,8 @@ type whitesourceExecuteScanOptions struct { Repository string `json:"repository,omitempty"` Assignees []string `json:"assignees,omitempty"` CustomTLSCertificateLinks []string `json:"customTlsCertificateLinks,omitempty"` + PrivateModules string `json:"privateModules,omitempty"` + PrivateModulesGitToken string `json:"privateModulesGitToken,omitempty"` } type whitesourceExecuteScanCommonPipelineEnvironment struct { @@ -243,13 +245,14 @@ The step uses the so-called Mend Unified Agent. For details please refer to the log.RegisterSecret(stepConfig.OrgToken) log.RegisterSecret(stepConfig.UserToken) log.RegisterSecret(stepConfig.GithubToken) + log.RegisterSecret(stepConfig.PrivateModulesGitToken) if len(GeneralConfig.HookConfig.SentryConfig.Dsn) > 0 { sentryHook := log.NewSentryHook(GeneralConfig.HookConfig.SentryConfig.Dsn, GeneralConfig.CorrelationID) log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) @@ -366,6 +369,8 @@ func addWhitesourceExecuteScanFlags(cmd *cobra.Command, stepConfig *whitesourceE cmd.Flags().StringVar(&stepConfig.Repository, "repository", os.Getenv("PIPER_repository"), "Set the GitHub repository.") cmd.Flags().StringSliceVar(&stepConfig.Assignees, "assignees", []string{``}, "Defines the assignees for the Github Issue created/updated with the results of the scan as a list of login names.") cmd.Flags().StringSliceVar(&stepConfig.CustomTLSCertificateLinks, "customTlsCertificateLinks", []string{}, "List of download links to custom TLS certificates. This is required to ensure trusted connections to instances with repositories (like nexus) when publish flag is set to true.") + cmd.Flags().StringVar(&stepConfig.PrivateModules, "privateModules", os.Getenv("PIPER_privateModules"), "Tells go which modules shall be considered to be private (by setting [GOPRIVATE](https://pkg.go.dev/cmd/go#hdr-Configuration_for_downloading_non_public_code)).") + cmd.Flags().StringVar(&stepConfig.PrivateModulesGitToken, "privateModulesGitToken", os.Getenv("PIPER_privateModulesGitToken"), "GitHub personal access token as per https://help.github.com/en/github/authenticating-to-github/creating-a-personal-access-token-for-the-command-line.") cmd.MarkFlagRequired("buildTool") cmd.MarkFlagRequired("orgToken") @@ -387,6 +392,7 @@ func whitesourceExecuteScanMetadata() config.StepData { {Name: "orgAdminUserTokenCredentialsId", Description: "Jenkins 'Secret text' credentials ID containing Whitesource org admin token.", Type: "jenkins", Aliases: []config.Alias{{Name: "whitesourceOrgAdminUserTokenCredentialsId", Deprecated: false}, {Name: "whitesource/orgAdminUserTokenCredentialsId", Deprecated: true}}}, {Name: "dockerConfigJsonCredentialsId", Description: "Jenkins 'Secret file' credentials ID containing Docker config.json (with registry credential(s)). You can find more details about the Docker credentials in the [Docker documentation](https://docs.docker.com/engine/reference/commandline/login/).", Type: "jenkins", Aliases: []config.Alias{{Name: "dockerCredentialsId", Deprecated: true}}}, {Name: "githubTokenCredentialsId", Description: "Jenkins 'Secret text' credentials ID containing token to authenticate to GitHub.", Type: "jenkins"}, + {Name: "golangPrivateModulesGitTokenCredentialsId", Description: "Jenkins 'Username with password' credentials ID containing username/password for http access to your git repos where your go private modules are stored.", Type: "jenkins"}, }, Resources: []config.StepResources{ {Name: "buildDescriptor", Type: "stash"}, @@ -967,6 +973,36 @@ func whitesourceExecuteScanMetadata() config.StepData { Aliases: []config.Alias{}, Default: []string{}, }, + { + Name: "privateModules", + ResourceRef: []config.ResourceReference{}, + Scope: []string{"GENERAL", "STEPS", "STAGES", "PARAMETERS"}, + Type: "string", + Mandatory: false, + Aliases: []config.Alias{}, + Default: os.Getenv("PIPER_privateModules"), + }, + { + Name: "privateModulesGitToken", + ResourceRef: []config.ResourceReference{ + { + Name: "golangPrivateModulesGitTokenCredentialsId", + Param: "password", + Type: "secret", + }, + + { + Name: "golangPrivateModulesGitTokenVaultSecret", + Type: "vaultSecret", + Default: "golang", + }, + }, + Scope: []string{"GENERAL", "PARAMETERS", "STAGES", "STEPS"}, + Type: "string", + Mandatory: false, + Aliases: []config.Alias{}, + Default: os.Getenv("PIPER_privateModulesGitToken"), + }, }, }, Containers: []config.Container{ diff --git a/cmd/xsDeploy_generated.go b/cmd/xsDeploy_generated.go index f36277b0c9..86fd060745 100644 --- a/cmd/xsDeploy_generated.go +++ b/cmd/xsDeploy_generated.go @@ -99,7 +99,7 @@ func XsDeployCommand() *cobra.Command { log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/documentation/docs/steps/gatlingExecuteTests.md b/documentation/docs/steps/gatlingExecuteTests.md index 428263a8ab..90dbda2981 100644 --- a/documentation/docs/steps/gatlingExecuteTests.md +++ b/documentation/docs/steps/gatlingExecuteTests.md @@ -19,5 +19,5 @@ We recommend to define values of step parameters via [config.yml file](../config Pipeline step: ```groovy -gatlingExecuteTests script: this, testModule: 'performance-tests/pom.xml' +gatlingExecuteTests script: this, pomPath: 'performance-tests/pom.xml' ``` diff --git a/documentation/docs/steps/imagePushToRegistry.md b/documentation/docs/steps/imagePushToRegistry.md new file mode 100644 index 0000000000..63991c1344 --- /dev/null +++ b/documentation/docs/steps/imagePushToRegistry.md @@ -0,0 +1,7 @@ +# ${docGenStepName} + +## ${docGenDescription} + +## ${docGenParameters} + +## ${docGenConfiguration} diff --git a/documentation/docs/steps/testsPublishResults.md b/documentation/docs/steps/testsPublishResults.md index 32ffdb8c3a..de13825771 100644 --- a/documentation/docs/steps/testsPublishResults.md +++ b/documentation/docs/steps/testsPublishResults.md @@ -58,24 +58,25 @@ testsPublishResults( ### jmeter -| parameter | mandatory | default | possible values | -| ----------|-----------|---------|-----------------| +| parameter | mandatory | default | possible values | +| ----------|-----------|--------------|-----------------| | pattern | no | `'**/*.jtl'` | | -| errorFailedThreshold | no | `20` | | -| errorUnstableThreshold | no | `10` | | -| errorUnstableResponseTimeThreshold | no | `` | | -| relativeFailedThresholdPositive | no | `0` | | -| relativeFailedThresholdNegative | no | `0` | | -| relativeUnstableThresholdPositive | no | `0` | | -| relativeUnstableThresholdNegative | no | `0` | | -| modeOfThreshold | no | `false` | true, false | -| modeThroughput | no | `false` | true, false | -| nthBuildNumber | no | `0` | | -| configType | no | `PRT` | | -| failBuildIfNoResultFile | no | `false` | true, false | -| compareBuildPrevious | no | `true` | true, false | -| archive | no | `false` | true, false | -| allowEmptyResults | no | `true` | true, false | +| errorFailedThreshold | no | `20` | | +| errorUnstableThreshold | no | `10` | | +| errorUnstableResponseTimeThreshold | no | `` | | +| relativeFailedThresholdPositive | no | `0` | | +| relativeFailedThresholdNegative | no | `0` | | +| relativeUnstableThresholdPositive | no | `0` | | +| relativeUnstableThresholdNegative | no | `0` | | +| modeOfThreshold | no | `false` | true, false | +| modeThroughput | no | `false` | true, false | +| nthBuildNumber | no | `0` | | +| configType | no | `PRT` | | +| failBuildIfNoResultFile | no | `false` | true, false | +| compareBuildPrevious | no | `true` | true, false | +| archive | no | `false` | true, false | +| allowEmptyResults | no | `true` | true, false | +| filterRegex | no | ' ' | | ## ${docGenConfiguration} diff --git a/documentation/mkdocs.yml b/documentation/mkdocs.yml index c3416e6525..211b35e23c 100644 --- a/documentation/mkdocs.yml +++ b/documentation/mkdocs.yml @@ -122,6 +122,7 @@ nav: - handlePipelineStepErrors: steps/handlePipelineStepErrors.md - healthExecuteCheck: steps/healthExecuteCheck.md - helmExecute: steps/helmExecute.md + - imagePushToRegistry: steps/imagePushToRegistry.md - influxWriteData: steps/influxWriteData.md - integrationArtifactDeploy: steps/integrationArtifactDeploy.md - integrationArtifactDownload: steps/integrationArtifactDownload.md diff --git a/go.mod b/go.mod index 2351952435..a435cc8a03 100644 --- a/go.mod +++ b/go.mod @@ -23,19 +23,19 @@ require ( github.com/evanphx/json-patch v5.6.0+incompatible github.com/getsentry/sentry-go v0.11.0 github.com/ghodss/yaml v1.0.1-0.20190212211648-25d852aebe32 - github.com/go-git/go-billy/v5 v5.3.1 - github.com/go-git/go-git/v5 v5.4.2 + github.com/go-git/go-billy/v5 v5.5.0 + github.com/go-git/go-git/v5 v5.10.1 github.com/go-openapi/runtime v0.24.1 github.com/go-openapi/strfmt v0.21.3 github.com/go-playground/locales v0.14.0 github.com/go-playground/universal-translator v0.18.0 github.com/go-playground/validator/v10 v10.11.0 - github.com/google/go-cmp v0.5.9 + github.com/google/go-cmp v0.6.0 github.com/google/go-containerregistry v0.13.0 github.com/google/go-github/v45 v45.2.0 github.com/google/uuid v1.3.1 github.com/hashicorp/go-retryablehttp v0.7.2 - github.com/hashicorp/vault v1.14.0 + github.com/hashicorp/vault v1.14.1 github.com/hashicorp/vault/api v1.9.2 github.com/iancoleman/orderedmap v0.2.0 github.com/imdario/mergo v0.3.15 @@ -56,9 +56,9 @@ require ( github.com/stretchr/testify v1.8.4 github.com/testcontainers/testcontainers-go v0.10.0 github.com/xuri/excelize/v2 v2.4.1 - golang.org/x/mod v0.12.0 - golang.org/x/oauth2 v0.12.0 - golang.org/x/text v0.13.0 + golang.org/x/mod v0.14.0 + golang.org/x/oauth2 v0.15.0 + golang.org/x/text v0.14.0 google.golang.org/api v0.126.0 gopkg.in/ini.v1 v1.66.6 gopkg.in/yaml.v2 v2.4.0 @@ -69,21 +69,22 @@ require ( require ( cloud.google.com/go/compute/metadata v0.2.3 // indirect + dario.cat/mergo v1.0.0 // indirect github.com/AdaLogics/go-fuzz-headers v0.0.0-20230106234847-43070de90fa1 // indirect github.com/aws/aws-sdk-go-v2/internal/v4a v1.0.23 // indirect github.com/aws/aws-sdk-go-v2/service/ssooidc v1.14.6 // indirect github.com/axiomhq/hyperloglog v0.0.0-20220105174342-98591331716a // indirect github.com/boombuler/barcode v1.0.1 // indirect github.com/cloudflare/circl v1.3.3 // indirect - github.com/cyphar/filepath-securejoin v0.2.3 // indirect + github.com/cyphar/filepath-securejoin v0.2.4 // indirect github.com/dgryski/go-metro v0.0.0-20180109044635-280f6062b5bc // indirect github.com/duosecurity/duo_api_golang v0.0.0-20190308151101-6c680f768e74 // indirect - github.com/go-jose/go-jose/v3 v3.0.0 // indirect + github.com/go-jose/go-jose/v3 v3.0.1 // indirect github.com/go-logr/stdr v1.2.2 // indirect github.com/go-ozzo/ozzo-validation v3.6.0+incompatible // indirect github.com/google/s2a-go v0.1.4 // indirect github.com/hashicorp/consul/sdk v0.13.1 // indirect - github.com/hashicorp/eventlogger v0.1.1 // indirect + github.com/hashicorp/eventlogger v0.2.1 // indirect github.com/hashicorp/go-kms-wrapping/entropy/v2 v2.0.0 // indirect github.com/hashicorp/go-kms-wrapping/v2 v2.0.9 // indirect github.com/hashicorp/go-kms-wrapping/wrappers/aead/v2 v2.0.7-1 // indirect @@ -101,15 +102,17 @@ require ( github.com/okta/okta-sdk-golang/v2 v2.12.1 // indirect github.com/oracle/oci-go-sdk/v60 v60.0.0 // indirect github.com/pires/go-proxyproto v0.6.1 // indirect + github.com/pjbgf/sha1cd v0.3.0 // indirect github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c // indirect github.com/pquerna/otp v1.2.1-0.20191009055518-468c2dd2b58d // indirect github.com/shirou/gopsutil/v3 v3.22.6 // indirect + github.com/skeema/knownhosts v1.2.1 // indirect github.com/sony/gobreaker v0.4.2-0.20210216022020-dd874f9dd33b // indirect github.com/yusufpapurcu/wmi v1.2.2 // indirect go.opentelemetry.io/otel v1.14.0 // indirect go.opentelemetry.io/otel/trace v1.14.0 // indirect golang.org/x/image v0.0.0-20220302094943-723b81ca9867 // indirect - golang.org/x/tools v0.7.0 // indirect + golang.org/x/tools v0.13.0 // indirect google.golang.org/genproto/googleapis/api v0.0.0-20230530153820-e85fd2cbaebc // indirect google.golang.org/genproto/googleapis/rpc v0.0.0-20230530153820-e85fd2cbaebc // indirect nhooyr.io/websocket v1.8.7 // indirect @@ -143,8 +146,7 @@ require ( github.com/Masterminds/semver/v3 v3.2.1 // indirect github.com/Microsoft/go-winio v0.6.1 // indirect github.com/NYTimes/gziphandler v1.1.1 // indirect - github.com/ProtonMail/go-crypto v0.0.0-20230626094100-7e9e0395ebec // indirect - github.com/acomagu/bufpipe v1.0.3 // indirect + github.com/ProtonMail/go-crypto v0.0.0-20230828082145-3c4c8a2d2371 // indirect github.com/aliyun/alibaba-cloud-sdk-go v1.62.301 // indirect github.com/antchfx/xpath v1.2.0 // indirect github.com/armon/go-metrics v0.4.1 // indirect @@ -189,12 +191,12 @@ require ( github.com/docker/go-metrics v0.0.1 // indirect github.com/docker/go-units v0.5.0 // indirect github.com/emicklei/go-restful/v3 v3.10.1 // indirect - github.com/emirpasic/gods v1.12.0 // indirect + github.com/emirpasic/gods v1.18.1 // indirect github.com/evanphx/json-patch/v5 v5.6.0 // indirect github.com/fatih/color v1.15.0 // indirect github.com/frankban/quicktest v1.14.4 // indirect github.com/go-errors/errors v1.4.2 // indirect - github.com/go-git/gcfg v1.5.0 // indirect + github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 // indirect github.com/go-logr/logr v1.2.3 // indirect github.com/go-ole/go-ole v1.2.6 // indirect github.com/go-openapi/analysis v0.21.2 // indirect @@ -234,7 +236,7 @@ require ( github.com/hashicorp/go-plugin v1.4.9 // indirect github.com/hashicorp/go-raftchunking v0.6.3-0.20191002164813-7e9e8525653a // indirect github.com/hashicorp/go-rootcerts v1.0.2 // indirect - github.com/hashicorp/go-secure-stdlib/awsutil v0.2.2 // indirect + github.com/hashicorp/go-secure-stdlib/awsutil v0.2.3 // indirect github.com/hashicorp/go-secure-stdlib/base62 v0.1.2 // indirect github.com/hashicorp/go-secure-stdlib/mlock v0.1.3 // indirect github.com/hashicorp/go-secure-stdlib/parseutil v0.1.7 // indirect @@ -265,7 +267,7 @@ require ( github.com/josharian/intern v1.0.0 // indirect github.com/joyent/triton-go v1.7.1-0.20200416154420-6801d15b779f // indirect github.com/json-iterator/go v1.1.12 // indirect - github.com/kevinburke/ssh_config v0.0.0-20201106050909-4977a11b4351 // indirect + github.com/kevinburke/ssh_config v1.2.0 // indirect github.com/klauspost/compress v1.16.5 // indirect github.com/leodido/go-urn v1.2.1 // indirect github.com/liggitt/tabwriter v0.0.0-20181228230101-89fcab3d43de // indirect @@ -325,7 +327,7 @@ require ( github.com/tv42/httpunix v0.0.0-20191220191345-2ba4b9c3382c // indirect github.com/vbatts/tar-split v0.11.2 // indirect github.com/vmware/govmomi v0.18.0 // indirect - github.com/xanzy/ssh-agent v0.3.0 // indirect + github.com/xanzy/ssh-agent v0.3.3 // indirect github.com/xlab/treeprint v1.1.0 // indirect github.com/xuri/efp v0.0.0-20210322160811-ab561f5b45e3 // indirect go.etcd.io/bbolt v1.3.7 // indirect @@ -333,11 +335,11 @@ require ( go.opencensus.io v0.24.0 // indirect go.starlark.net v0.0.0-20200306205701-8dd3e2ee1dd5 // indirect go.uber.org/atomic v1.11.0 // indirect - golang.org/x/crypto v0.13.0 - golang.org/x/net v0.15.0 // indirect - golang.org/x/sync v0.2.0 - golang.org/x/sys v0.12.0 // indirect - golang.org/x/term v0.12.0 // indirect + golang.org/x/crypto v0.16.0 + golang.org/x/net v0.19.0 // indirect + golang.org/x/sync v0.5.0 + golang.org/x/sys v0.15.0 // indirect + golang.org/x/term v0.15.0 // indirect golang.org/x/time v0.3.0 // indirect golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2 // indirect google.golang.org/appengine v1.6.7 // indirect @@ -355,7 +357,7 @@ require ( k8s.io/client-go v0.27.2 // indirect k8s.io/klog/v2 v2.90.1 // indirect k8s.io/kube-openapi v0.0.0-20230501164219-8b0f38b5fd1f // indirect - k8s.io/utils v0.0.0-20230220204549-a5ecb0141aa5 // indirect + k8s.io/utils v0.0.0-20230220204549-a5ecb0141aa5 oras.land/oras-go v1.2.3 // indirect sigs.k8s.io/kustomize/api v0.12.1 // indirect sigs.k8s.io/kustomize/kyaml v0.13.9 // indirect diff --git a/go.sum b/go.sum index ee4e5346d0..6fa95fe49c 100644 --- a/go.sum +++ b/go.sum @@ -54,6 +54,8 @@ cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9 cloud.google.com/go/storage v1.29.0 h1:6weCgzRvMg7lzuUurI4697AqIRPU1SvzHhynwpW31jI= cloud.google.com/go/storage v1.29.0/go.mod h1:4puEjyTKnku6gfKoTfNOU/W+a9JyuVNxjpS5GBrB8h4= code.cloudfoundry.org/gofileutils v0.0.0-20170111115228-4d0c80011a0f h1:UrKzEwTgeiff9vxdrfdqxibzpWjxLnuXDI5m6z3GJAk= +dario.cat/mergo v1.0.0 h1:AGCNq9Evsj31mOgNPcLyXc+4PNABt905YmuqPYYpBWk= +dario.cat/mergo v1.0.0/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk= dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= github.com/99designs/go-keychain v0.0.0-20191008050251-8e49817e8af4 h1:/vQbFIOMbk2FiG/kXiLl8BRyzTWDw7gX/Hz7Dd5eDMs= github.com/99designs/keyring v1.2.2 h1:pZd3neh/EmUzWONb35LxQfvuY7kiSXAq3HQd97+XBn0= @@ -159,6 +161,7 @@ github.com/Microsoft/go-winio v0.4.17-0.20210211115548-6eac466e5fa3/go.mod h1:JP github.com/Microsoft/go-winio v0.4.17-0.20210324224401-5516f17a5958/go.mod h1:JPGBdM1cNvN/6ISo+n8V5iA4v8pBzdOpzfwIujj1a84= github.com/Microsoft/go-winio v0.4.17/go.mod h1:JPGBdM1cNvN/6ISo+n8V5iA4v8pBzdOpzfwIujj1a84= github.com/Microsoft/go-winio v0.5.0/go.mod h1:JPGBdM1cNvN/6ISo+n8V5iA4v8pBzdOpzfwIujj1a84= +github.com/Microsoft/go-winio v0.5.2/go.mod h1:WpS1mjBmmwHBEWmogvA2mj8546UReBk4v8QkMxJ6pZY= github.com/Microsoft/go-winio v0.6.1 h1:9/kr64B9VUZrLm5YYwbGtUJnMgqWVOdUAXu6Migciow= github.com/Microsoft/go-winio v0.6.1/go.mod h1:LRdKpFKfdobln8UmuiYcKPot9D2v6svN5+sAH+4kjUM= github.com/Microsoft/hcsshim v0.8.6/go.mod h1:Op3hHsoHPAvb6lceZHDtd9OkTew38wNoXnJs8iY7rUg= @@ -176,9 +179,8 @@ github.com/NYTimes/gziphandler v1.1.1 h1:ZUDjpQae29j0ryrS0u/B8HZfJBtBQHjqw2rQ2cq github.com/NYTimes/gziphandler v1.1.1/go.mod h1:n/CVRwUEOgIxrgPvAQhUUr9oeUtvrhMomdKFjzJNB0c= github.com/Nvveen/Gotty v0.0.0-20120604004816-cd527374f1e5 h1:TngWCqHvy9oXAN6lEVMRuU21PR1EtLVZJmdB18Gu3Rw= github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= -github.com/ProtonMail/go-crypto v0.0.0-20210428141323-04723f9f07d7/go.mod h1:z4/9nQmJSSwwds7ejkxaJwO37dru3geImFUdJlaLzQo= -github.com/ProtonMail/go-crypto v0.0.0-20230626094100-7e9e0395ebec h1:vV3RryLxt42+ZIVOFbYJCH1jsZNTNmj2NYru5zfx+4E= -github.com/ProtonMail/go-crypto v0.0.0-20230626094100-7e9e0395ebec/go.mod h1:EjAoLdwvbIOoOQr3ihjnSoLZRtE8azugULFRteWMNc0= +github.com/ProtonMail/go-crypto v0.0.0-20230828082145-3c4c8a2d2371 h1:kkhsdkhsCvIsutKu5zLMgWtgh9YxGCNAw8Ad8hjwfYg= +github.com/ProtonMail/go-crypto v0.0.0-20230828082145-3c4c8a2d2371/go.mod h1:EjAoLdwvbIOoOQr3ihjnSoLZRtE8azugULFRteWMNc0= github.com/PuerkitoBio/purell v1.0.0/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0= github.com/PuerkitoBio/purell v1.1.0/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0= github.com/PuerkitoBio/purell v1.1.1/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0= @@ -190,8 +192,6 @@ github.com/Shopify/logrus-bugsnag v0.0.0-20171204204709-577dee27f20d h1:UrqY+r/O github.com/Shopify/logrus-bugsnag v0.0.0-20171204204709-577dee27f20d/go.mod h1:HI8ITrYtUY+O+ZhtlqUnD8+KwNPOyugEhfP9fdUIaEQ= github.com/abdullin/seq v0.0.0-20160510034733-d5467c17e7af h1:DBNMBMuMiWYu0b+8KMJuWmfCkcxl09JwdlqwDZZ6U14= github.com/abdullin/seq v0.0.0-20160510034733-d5467c17e7af/go.mod h1:5Jv4cbFiHJMsVxt52+i0Ha45fjshj6wxYr1r19tB9bw= -github.com/acomagu/bufpipe v1.0.3 h1:fxAGrHZTgQ9w5QqVItgzwj235/uYZYgbXitB+dLupOk= -github.com/acomagu/bufpipe v1.0.3/go.mod h1:mxdxdup/WdsKVreO5GpW4+M/1CE2sMG4jeGJ2sYmHc4= github.com/agnivade/levenshtein v1.0.1/go.mod h1:CURSv5d9Uaml+FovSIICkLbAUZ9S4RqaHDIsdSBg7lM= github.com/ajg/form v1.5.1/go.mod h1:uL1WgH+h2mgNtvBq0339dVnzXdBETtL2LeUXaIv25UY= github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= @@ -203,8 +203,7 @@ github.com/alexflint/go-filemutex v0.0.0-20171022225611-72bdc8eae2ae/go.mod h1:C github.com/aliyun/alibaba-cloud-sdk-go v1.62.301 h1:8mgvCpqsv3mQAcqZ/baAaMGUBj5J6MKMhxLd+K8L27Q= github.com/aliyun/alibaba-cloud-sdk-go v1.62.301/go.mod h1:Api2AkmMgGaSUAhmk76oaFObkoeCPc/bKAqcyplPODs= github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883/go.mod h1:rCTlJbsFo29Kk6CurOXKm700vrz8f0KW0JNfpkRJY/8= -github.com/anmitsu/go-shlex v0.0.0-20161002113705-648efa622239 h1:kFOfPq6dUM1hTo4JG6LR5AXSUEsOjtdm0kw0FtQtMJA= -github.com/anmitsu/go-shlex v0.0.0-20161002113705-648efa622239/go.mod h1:2FmKhYUyUczH0OGQWaF5ceTx0UBShxjsH6f8oGKYe2c= +github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be h1:9AeTilPcZAjCFIImctFaOjnTIavg87rW78vTPkQqLI8= github.com/antchfx/htmlquery v1.2.4 h1:qLteofCMe/KGovBI6SQgmou2QNyedFUW+pE+BpeZ494= github.com/antchfx/htmlquery v1.2.4/go.mod h1:2xO6iu3EVWs7R2JYqBbp8YzG50gj/ofqs5/0VZoDZLc= github.com/antchfx/xpath v1.2.0 h1:mbwv7co+x0RwgeGAOHdrKy89GvHaGvxxBtPK0uF9Zr8= @@ -227,7 +226,6 @@ github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj github.com/armon/go-radix v1.0.0 h1:F4z6KzEeeQIMeLFa97iZU6vupzoecKdU5TX24SNppXI= github.com/armon/go-radix v1.0.0/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5 h1:0CwZNZbxp69SHPdPJAN/hZIm0C4OItdklCFmMRWYpio= -github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5/go.mod h1:wHh0iHkYZB8zMSxRWpUBQtwG5a7fFgvEO+odwuTv2gs= github.com/asaskevich/govalidator v0.0.0-20180720115003-f9ffefc3facf/go.mod h1:lB+ZfQJz7igIIfQNfa7Ml4HSf2uFQQRzpGGRXenZAgY= github.com/asaskevich/govalidator v0.0.0-20190424111038-f61b66f89f4a/go.mod h1:lB+ZfQJz7igIIfQNfa7Ml4HSf2uFQQRzpGGRXenZAgY= github.com/asaskevich/govalidator v0.0.0-20200907205600-7a23bdc65eef/go.mod h1:WaHUgvxTVq04UNunO+XhnAqY/wQc+bxr74GqbsZ/Jqw= @@ -474,8 +472,8 @@ github.com/creack/pty v1.1.11/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ github.com/creack/pty v1.1.18 h1:n56/Zwd5o6whRC5PMGretI4IdRLlmBXYNjScPaBgsbY= github.com/cyberdelia/templates v0.0.0-20141128023046-ca7fffd4298c/go.mod h1:GyV+0YP4qX0UQ7r2MoYZ+AvYDp12OF5yg4q8rGnyNh4= github.com/cyphar/filepath-securejoin v0.2.2/go.mod h1:FpkQEhXnPnOthhzymB7CGsFk2G9VLXONKD9G7QGMM+4= -github.com/cyphar/filepath-securejoin v0.2.3 h1:YX6ebbZCZP7VkM3scTTokDgBL2TY741X51MTk3ycuNI= -github.com/cyphar/filepath-securejoin v0.2.3/go.mod h1:aPGpWjXOXUn2NCNjFvBE6aRxGGx79pTxQpKOJNYHHl4= +github.com/cyphar/filepath-securejoin v0.2.4 h1:Ugdm7cg7i6ZK6x3xDF1oEu1nfkyfH53EtKeQYTC3kyg= +github.com/cyphar/filepath-securejoin v0.2.4/go.mod h1:aPGpWjXOXUn2NCNjFvBE6aRxGGx79pTxQpKOJNYHHl4= github.com/d2g/dhcp4 v0.0.0-20170904100407-a1d1b6c41b1c/go.mod h1:Ct2BUK8SB0YC1SMSibvLzxjeJLnrYEVLULFNiHY9YfQ= github.com/d2g/dhcp4client v1.0.0/go.mod h1:j0hNfjhrt2SxUOw55nL0ATM/z4Yt3t2Kd1mW34z5W5s= github.com/d2g/dhcp4server v0.0.0-20181031114812-7d4a0a7f59a5/go.mod h1:Eo87+Kg/IX2hfWJfwxMzLyuSZyxSoAug2nGa1G2QAi8= @@ -549,14 +547,15 @@ github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25Kn github.com/dvsekhvalnov/jose2go v1.5.0 h1:3j8ya4Z4kMCwT5nXIKFSV84YS+HdqSSO0VsTQxaLAeM= github.com/eknkc/amber v0.0.0-20171010120322-cdade1c07385/go.mod h1:0vRUJqYpeSZifjYj7uP3BG/gKcuzL9xWVV/Y+cK33KM= github.com/elazarl/goproxy v0.0.0-20180725130230-947c36da3153/go.mod h1:/Zj4wYkgs4iZTTu3o/KG3Itv/qCCa8VVMlb3i9OVuzc= +github.com/elazarl/goproxy v0.0.0-20230808193330-2592e75ae04a h1:mATvB/9r/3gvcejNsXKSkQ6lcIaNec2nyfOdlTBR2lU= github.com/elliotchance/orderedmap v1.4.0 h1:wZtfeEONCbx6in1CZyE6bELEt/vFayMvsxqI5SgsR+A= github.com/elliotchance/orderedmap v1.4.0/go.mod h1:wsDwEaX5jEoyhbs7x93zk2H/qv0zwuhg4inXhDkYqys= github.com/emicklei/go-restful v0.0.0-20170410110728-ff4f55a20633/go.mod h1:otzb+WCGbkyDHkqmQmT5YD2WR4BBwUdeQoFo8l/7tVs= github.com/emicklei/go-restful v2.9.5+incompatible/go.mod h1:otzb+WCGbkyDHkqmQmT5YD2WR4BBwUdeQoFo8l/7tVs= github.com/emicklei/go-restful/v3 v3.10.1 h1:rc42Y5YTp7Am7CS630D7JmhRjq4UlEUuEKfrDac4bSQ= github.com/emicklei/go-restful/v3 v3.10.1/go.mod h1:6n3XBCmQQb25CM2LCACGz8ukIrRry+4bhvbpWn3mrbc= -github.com/emirpasic/gods v1.12.0 h1:QAUIPSaCu4G+POclxeqb3F+WPpdKqFGlw36+yOzGlrg= -github.com/emirpasic/gods v1.12.0/go.mod h1:YfzfFFoVP/catgzJb4IKIqXjX78Ha8FMSDh3ymbK86o= +github.com/emirpasic/gods v1.18.1 h1:FXtiHYKDGKCW2KzwZKx0iC0PQmdlorYgdFG9jPXJ1Bc= +github.com/emirpasic/gods v1.18.1/go.mod h1:8tpGGwCnJ5H4r6BWwaV6OrWmMoPhUl5jm/FMNAnJvWQ= github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= @@ -580,7 +579,6 @@ github.com/fatih/color v1.15.0/go.mod h1:0h5ZqXfHYED7Bhv2ZJamyIOUej9KtShiJESRwBD github.com/fatih/structs v1.1.0 h1:Q7juDM0QtcnhCpeyLGQKyg4TOIghuNXrkL32pHAUMxo= github.com/fatih/structs v1.1.0/go.mod h1:9NiDSp5zOcgEDl+j00MP/WkGVPOlPRLejGD8Ga6PJ7M= github.com/felixge/httpsnoop v1.0.1 h1:lvB5Jl89CsZtGIWuTcDM1E/vkVs49/Ml7JJe07l8SPQ= -github.com/flynn/go-shlex v0.0.0-20150515145356-3f9db97f8568/go.mod h1:xEzjJPgXI435gkrCt3MPfRiAkVrwSbHsst4LCFVfpJc= github.com/form3tech-oss/jwt-go v3.2.2+incompatible/go.mod h1:pbq4aXjuKjdthFRnoDwaVPLA+WlJuPGy+QneDUgJi2k= github.com/form3tech-oss/jwt-go v3.2.5+incompatible h1:/l4kBbb4/vGSsdtB5nUe8L7B9mImVMaBPw9L/0TBHU8= github.com/frankban/quicktest v1.11.3/go.mod h1:wRf/ReqHper53s+kmmSZizM8NamnL3IM0I9ntUbOk+k= @@ -608,8 +606,7 @@ github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm github.com/gin-gonic/gin v1.4.0/go.mod h1:OW2EZn3DO8Ln9oIKOvM++LBO+5UPHJJDH72/q/3rZdM= github.com/gin-gonic/gin v1.6.3 h1:ahKqKTFpO5KTPHxWZjEdPScmYaGtLo8Y4DMHoEsnp14= github.com/gin-gonic/gin v1.6.3/go.mod h1:75u5sXoLsGZoRN5Sgbi1eraJ4GU3++wFwWzhwvtwp4M= -github.com/gliderlabs/ssh v0.2.2 h1:6zsha5zo/TWhRhwqCD3+EarCAgZ2yN28ipRnGPnwkI0= -github.com/gliderlabs/ssh v0.2.2/go.mod h1:U7qILu1NlMHj9FlMhZLlkCdDnU1DBEAqr0aevW3Awn0= +github.com/gliderlabs/ssh v0.3.5 h1:OcaySEmAQJgyYcArR+gGGTHCyE7nvhEMTlYY+Dp8CpY= github.com/globalsign/mgo v0.0.0-20180905125535-1ca0a4f7cbcb/go.mod h1:xkRDCp4j0OGD1HRkm4kmhM+pmpv3AKq5SU7GMg4oO/Q= github.com/globalsign/mgo v0.0.0-20181015135952-eeefdecb41b8/go.mod h1:xkRDCp4j0OGD1HRkm4kmhM+pmpv3AKq5SU7GMg4oO/Q= github.com/go-asn1-ber/asn1-ber v1.5.4 h1:vXT6d/FNDiELJnLb6hGNa309LMsrCoYFvpwHDF0+Y1A= @@ -618,21 +615,19 @@ github.com/go-chi/chi/v5 v5.0.0/go.mod h1:BBug9lr0cqtdAhsu6R4AAdvufI0/XBzAQSsUqJ github.com/go-errors/errors v1.0.1/go.mod h1:f4zRHt4oKfwPJE5k8C9vpYG+aDHdBFUsgrm6/TyX73Q= github.com/go-errors/errors v1.4.2 h1:J6MZopCL4uSllY1OfXM374weqZFFItUbrImctkmUxIA= github.com/go-errors/errors v1.4.2/go.mod h1:sIVyrIiJhuEF+Pj9Ebtd6P/rEYROXFi3BopGUQ5a5Og= -github.com/go-git/gcfg v1.5.0 h1:Q5ViNfGF8zFgyJWPqYwA7qGFoMTEiBmdlkcfRmpIMa4= -github.com/go-git/gcfg v1.5.0/go.mod h1:5m20vg6GwYabIxaOonVkTdrILxQMpEShl1xiMF4ua+E= -github.com/go-git/go-billy/v5 v5.2.0/go.mod h1:pmpqyWchKfYfrkb/UVH4otLvyi/5gJlGI4Hb3ZqZ3W0= -github.com/go-git/go-billy/v5 v5.3.1 h1:CPiOUAzKtMRvolEKw+bG1PLRpT7D3LIs3/3ey4Aiu34= -github.com/go-git/go-billy/v5 v5.3.1/go.mod h1:pmpqyWchKfYfrkb/UVH4otLvyi/5gJlGI4Hb3ZqZ3W0= -github.com/go-git/go-git-fixtures/v4 v4.2.1 h1:n9gGL1Ct/yIw+nfsfr8s4+sbhT+Ncu2SubfXjIWgci8= -github.com/go-git/go-git-fixtures/v4 v4.2.1/go.mod h1:K8zd3kDUAykwTdDCr+I0per6Y6vMiRR/nnVTBtavnB0= -github.com/go-git/go-git/v5 v5.4.2 h1:BXyZu9t0VkbiHtqrsvdq39UDhGJTl1h55VW6CSC4aY4= -github.com/go-git/go-git/v5 v5.4.2/go.mod h1:gQ1kArt6d+n+BGd+/B/I74HwRTLhth2+zti4ihgckDc= +github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 h1:+zs/tPmkDkHx3U66DAb0lQFJrpS6731Oaa12ikc+DiI= +github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376/go.mod h1:an3vInlBmSxCcxctByoQdvwPiA7DTK7jaaFDBTtu0ic= +github.com/go-git/go-billy/v5 v5.5.0 h1:yEY4yhzCDuMGSv83oGxiBotRzhwhNr8VZyphhiu+mTU= +github.com/go-git/go-billy/v5 v5.5.0/go.mod h1:hmexnoNsr2SJU1Ju67OaNz5ASJY3+sHgFRpCtpDCKow= +github.com/go-git/go-git-fixtures/v4 v4.3.2-0.20231010084843-55a94097c399 h1:eMje31YglSBqCdIqdhKBW8lokaMrL3uTkpGYlE2OOT4= +github.com/go-git/go-git/v5 v5.10.1 h1:tu8/D8i+TWxgKpzQ3Vc43e+kkhXqtsZCKI/egajKnxk= +github.com/go-git/go-git/v5 v5.10.1/go.mod h1:uEuHjxkHap8kAl//V5F/nNWwqIYtP/402ddd05mp0wg= github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-ini/ini v1.25.4/go.mod h1:ByCAeIL28uOIIG0E3PJtZPDL8WnHpFKFOtgjp+3Ies8= -github.com/go-jose/go-jose/v3 v3.0.0 h1:s6rrhirfEP/CGIoc6p+PZAeogN2SxKav6Wp7+dyMWVo= -github.com/go-jose/go-jose/v3 v3.0.0/go.mod h1:RNkWWRld676jZEYoV3+XK8L2ZnNSvIsxFMht0mSX+u8= +github.com/go-jose/go-jose/v3 v3.0.1 h1:pWmKFVtt+Jl0vBZTIpz/eAKwsm6LkIxDVVbFHKkchhA= +github.com/go-jose/go-jose/v3 v3.0.1/go.mod h1:RNkWWRld676jZEYoV3+XK8L2ZnNSvIsxFMht0mSX+u8= github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vbaY= @@ -880,8 +875,9 @@ github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/ github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.8/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= -github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38= github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= +github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/go-containerregistry v0.5.2-0.20210604130445-3bfab55f3bd9/go.mod h1:R5WRYyTdQqTchlBhX4q+WICGh8HQIL5wDFoFZv7Jq6Q= github.com/google/go-containerregistry v0.13.0 h1:y1C7Z3e149OJbOPDBxLYR8ITPz8dTKqQwjErKVHJC8k= github.com/google/go-containerregistry v0.13.0/go.mod h1:J9FQ+eSS4a1aC2GNZxvNpbWhgp0487v+cgiilB4FqDo= @@ -977,8 +973,8 @@ github.com/hashicorp/errwrap v0.0.0-20141028054710-7554cd9344ce/go.mod h1:YH+1FK github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I= github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= -github.com/hashicorp/eventlogger v0.1.1 h1:zyCjxsy7KunFsMPZKU5PnwWEakSrp1zjj2vPFmrDaeo= -github.com/hashicorp/eventlogger v0.1.1/go.mod h1://CHt6/j+Q2lc0NlUB5af4aS2M0c0aVBg9/JfcpAyhM= +github.com/hashicorp/eventlogger v0.2.1 h1:sjAOKO62BDDBn10516Uo7QDf5KEqzhU0LkUnbBptVUU= +github.com/hashicorp/eventlogger v0.2.1/go.mod h1://CHt6/j+Q2lc0NlUB5af4aS2M0c0aVBg9/JfcpAyhM= github.com/hashicorp/go-cleanhttp v0.5.0/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= github.com/hashicorp/go-cleanhttp v0.5.2 h1:035FKYIWjmULyFRBKPs8TBQoi0x6d9G4xc9neXJWAZQ= @@ -1033,8 +1029,8 @@ github.com/hashicorp/go-retryablehttp v0.7.2/go.mod h1:Jy/gPYAdjqffZ/yFGCFV2doI5 github.com/hashicorp/go-rootcerts v1.0.0/go.mod h1:K6zTfqpRlCUIjkwsN4Z+hiSfzSTQa6eBIzfwKfwNnHU= github.com/hashicorp/go-rootcerts v1.0.2 h1:jzhAVGtqPKbwpyCPELlgNWhE1znq+qwJtW5Oi2viEzc= github.com/hashicorp/go-rootcerts v1.0.2/go.mod h1:pqUvnprVnM5bf7AOirdbb01K4ccR319Vf4pU3K5EGc8= -github.com/hashicorp/go-secure-stdlib/awsutil v0.2.2 h1:kWg2vyKl7BRXrNxYziqDJ55n+vtOQ1QsGORjzoeB+uM= -github.com/hashicorp/go-secure-stdlib/awsutil v0.2.2/go.mod h1:oKHSQs4ivIfZ3fbXGQOop1XuDfdSb8RIsWTGaAanSfg= +github.com/hashicorp/go-secure-stdlib/awsutil v0.2.3 h1:AAQ6Vmo/ncfrZYtbpjhO+g0Qt+iNpYtl3UWT1NLmbYY= +github.com/hashicorp/go-secure-stdlib/awsutil v0.2.3/go.mod h1:oKHSQs4ivIfZ3fbXGQOop1XuDfdSb8RIsWTGaAanSfg= github.com/hashicorp/go-secure-stdlib/base62 v0.1.1/go.mod h1:EdWO6czbmthiwZ3/PUsDV+UD1D5IRU4ActiaWGwt0Yw= github.com/hashicorp/go-secure-stdlib/base62 v0.1.2 h1:ET4pqyjiGmY09R5y+rSd70J2w45CtbWDNvGqWp/R3Ng= github.com/hashicorp/go-secure-stdlib/base62 v0.1.2/go.mod h1:EdWO6czbmthiwZ3/PUsDV+UD1D5IRU4ActiaWGwt0Yw= @@ -1100,10 +1096,10 @@ github.com/hashicorp/raft-snapshot v1.0.4 h1:EuDuayAJPdiDmVk1ygTDnG2zDzrs0/6/yBu github.com/hashicorp/raft-snapshot v1.0.4/go.mod h1:5sL9eUn72lH5DzsFIJ9jaysITbHksSSszImWSOTC8Ic= github.com/hashicorp/serf v0.8.2/go.mod h1:6hOLApaqBFA1NXqRQAsxw9QxuDEvNxSQRwA/JwenrHc= github.com/hashicorp/serf v0.10.1 h1:Z1H2J60yRKvfDYAOZLd2MU0ND4AH/WDz7xYHDWQsIPY= -github.com/hashicorp/vault v1.14.0 h1:c+ujeY6SP/6xFF7dn1tfMhn5JPbRntX6lpIaoRUR6LM= -github.com/hashicorp/vault v1.14.0/go.mod h1:bVRLXpE3TF0NgB/t2pJyox1n7dhtqbsZ5G19G0gpLRw= +github.com/hashicorp/vault v1.14.1 h1:JBRe4N6g6iu3yWenhlMn9PwSNAQYIQQ6PTYnbccvyxM= +github.com/hashicorp/vault v1.14.1/go.mod h1:VH1j4CD8lYPQ+XjmgpAF7gt0M2swsARFHndbDyDRgkU= github.com/hashicorp/vault-plugin-auth-alicloud v0.15.0 h1:R2SVwOeVLG5DXzUx42UWhjfFqS0Z9+ncfebPu+gO9VA= -github.com/hashicorp/vault-plugin-auth-azure v0.15.0 h1:OPK3rpRsWUQm/oo8l4N+YS7dka+lUHDT/qxTafSFPzY= +github.com/hashicorp/vault-plugin-auth-azure v0.15.1 h1:CknW0l2O70326KfepWeDuPszuNherhAtVNaSLRBsS4U= github.com/hashicorp/vault-plugin-auth-centrify v0.15.1 h1:6StAr5tltpySNgyUwWC8czm9ZqkO7NIZfcRmxxtFwQ8= github.com/hashicorp/vault-plugin-auth-cf v0.15.0 h1:zIVGlYXCRBY/ElucWdFC9xF27d2QMGMQPm9wSezGREI= github.com/hashicorp/vault-plugin-auth-gcp v0.16.0 h1:DA/ZDLCrUsbHS/7Xqkkw7l2SgbQE9rWEHLLWYTGu8rw= @@ -1120,7 +1116,7 @@ github.com/hashicorp/vault-plugin-database-snowflake v0.8.0 h1:Ec7gxxWIhxTmbKNXp github.com/hashicorp/vault-plugin-mock v0.16.1 h1:5QQvSUHxDjEEbrd2REOeacqyJnCLPD51IQzy71hx8P0= github.com/hashicorp/vault-plugin-secrets-ad v0.16.0 h1:6RCpd2PbBvmi5xmxXhggE0Xv+/Gag896/NNZeMKH+8A= github.com/hashicorp/vault-plugin-secrets-alicloud v0.15.0 h1:uVpcx2s3PwYXSOHmjA/Ai6+V0c3wgvSApELZez8b9mI= -github.com/hashicorp/vault-plugin-secrets-azure v0.16.0 h1:4Y2LG2P6XUy4HLlObJtHiveJBQwZ4kazs0EpxDmAal0= +github.com/hashicorp/vault-plugin-secrets-azure v0.16.1 h1:eMU5qYPa5dQQALPP7B+UPB0QCSHzB6LKrqbNCcRr7Ts= github.com/hashicorp/vault-plugin-secrets-gcp v0.16.0 h1:5ozLtt38Bw/DLt37dbccT8j56A+2T7CWFfYecKleGl4= github.com/hashicorp/vault-plugin-secrets-gcpkms v0.15.0 h1:CueteKXEuO52qGu1nUaDc/euSTSfQD9MONkXuvWdZQw= github.com/hashicorp/vault-plugin-secrets-kubernetes v0.5.0 h1:g0W1ybHjO945jDtuDEFcqTINyW/s06wxZarE/7aLumc= @@ -1151,7 +1147,6 @@ github.com/imdario/mergo v0.3.6/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJ github.com/imdario/mergo v0.3.8/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA= github.com/imdario/mergo v0.3.10/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA= github.com/imdario/mergo v0.3.11/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA= -github.com/imdario/mergo v0.3.12/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA= github.com/imdario/mergo v0.3.15 h1:M8XP7IuFNsqUx6VPK2P9OSmsYsI/YFaGil0uD21V3dM= github.com/imdario/mergo v0.3.15/go.mod h1:WBLT9ZmE3lPoWsEzCh9LPo3TiwVN+ZKEjmz+hD27ysY= github.com/imkira/go-interpol v1.1.0/go.mod h1:z0h2/2T3XF8kyEPpRgJ3kmNv+C43p+I/CoI+jC3w2iA= @@ -1172,15 +1167,15 @@ github.com/iris-contrib/schema v0.0.1/go.mod h1:urYA3uvUNG1TIIjOSCzHr9/LmbQo8LrO github.com/j-keck/arping v0.0.0-20160618110441-2cf9dc699c56/go.mod h1:ymszkNOg6tORTn+6F6j+Jc8TOr5osrynvN6ivFWZ2GA= github.com/jackc/chunkreader/v2 v2.0.1 h1:i+RDz65UE+mmpjTfyz0MoVTnzeYxroil2G82ki7MGG8= github.com/jackc/fake v0.0.0-20150926172116-812a484cc733/go.mod h1:WrMFNQdiFJ80sQsxDoMokWK1W5TQtxBFNpzWTD84ibQ= -github.com/jackc/pgconn v1.11.0 h1:HiHArx4yFbwl91X3qqIHtUFoiIfLNJXCQRsnzkiwwaQ= +github.com/jackc/pgconn v1.14.0 h1:vrbA9Ud87g6JdFWkHTJXppVce58qPIdP7N8y0Ml/A7Q= github.com/jackc/pgio v1.0.0 h1:g12B9UwVnzGhueNavwioyEEpAmqMe1E/BN9ES+8ovkE= github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM= -github.com/jackc/pgproto3/v2 v2.2.0 h1:r7JypeP2D3onoQTCxWdTpCtJ4D+qpKr0TxvoyMhZ5ns= -github.com/jackc/pgservicefile v0.0.0-20200714003250-2b9c44734f2b h1:C8S2+VttkHFdOOCXJe+YGfa4vHYwlt4Zx+IVXQ97jYg= -github.com/jackc/pgtype v1.10.0 h1:ILnBWrRMSXGczYvmkYD6PsYyVFUNLTnIUJHHDLmqk38= +github.com/jackc/pgproto3/v2 v2.3.2 h1:7eY55bdBeCz1F2fTzSz69QC+pG46jYq9/jtSPiJ5nn0= +github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a h1:bbPeKD0xmW/Y25WS6cokEszi5g+S0QxI/d45PkRi7Nk= +github.com/jackc/pgtype v1.14.0 h1:y+xUdabmyMkJLyApYuPj38mW+aAIqCe5uuBB51rH3Vw= github.com/jackc/pgx v3.3.0+incompatible h1:Wa90/+qsITBAPkAZjiByeIGHFcj3Ztu+VzrrIpHjL90= github.com/jackc/pgx v3.3.0+incompatible/go.mod h1:0ZGrqGqkRlliWnWB4zKnWtjbSWbGkVEFm4TeybAXq+I= -github.com/jackc/pgx/v4 v4.15.0 h1:B7dTkXsdILD3MF987WGGCcg+tvLW6bZJdEcqVFeU//w= +github.com/jackc/pgx/v4 v4.18.1 h1:YP7G1KABtKpB5IHrO9vYwSrCOhs7p3uqhvhhQBptya0= github.com/jarcoal/httpmock v0.0.0-20180424175123-9c70cfe4a1da/go.mod h1:ks+b9deReOc7jgqp+e7LuFiCBH6Rm5hL32cLcEAArb4= github.com/jarcoal/httpmock v1.0.8 h1:8kI16SoO6LQKgPE7PvQuV+YuD/inwHd7fOOe2zMbo4k= github.com/jarcoal/httpmock v1.0.8/go.mod h1:ATjnClrvW/3tijVmpL/va5Z3aAyGvqU3gCT8nX0Txik= @@ -1198,7 +1193,6 @@ github.com/jefferai/isbadcipher v0.0.0-20190226160619-51d2077c035f/go.mod h1:3J2 github.com/jefferai/jsonx v1.0.0 h1:Xoz0ZbmkpBvED5W9W1B5B/zc3Oiq7oXqiW7iRV3B6EI= github.com/jefferai/jsonx v1.0.0/go.mod h1:OGmqmi2tTeI/PS+qQfBDToLHHJIy/RMp24fPo8vFvoQ= github.com/jessevdk/go-flags v1.4.0/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI= -github.com/jessevdk/go-flags v1.5.0/go.mod h1:Fw0T6WPc1dYxT4mKEZRfG5kJhaTDP9pj1c2EWnYs/m4= github.com/jhump/protoreflect v1.10.3 h1:8ogeubpKh2TiulA0apmGlW5YAH4U1Vi4TINIP+gpNfQ= github.com/jhump/protoreflect v1.10.3/go.mod h1:7GcYQDdMU/O/BBrl/cX6PNHpXh6cenjd8pneu5yW7Tg= github.com/jmespath/go-jmespath v0.0.0-20160202185014-0b12d6b521d8/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k= @@ -1242,8 +1236,8 @@ github.com/kataras/pio v0.0.2/go.mod h1:hAoW0t9UmXi4R5Oyq5Z4irTbaTsOemSrDGUtaTl7 github.com/kataras/sitemap v0.0.5/go.mod h1:KY2eugMKiPwsJgx7+U103YZehfvNGOXURubcGyk0Bz8= github.com/kelseyhightower/envconfig v1.4.0 h1:Im6hONhd3pLkfDFsbRgu68RDNkGF1r3dvMUtDTo2cv8= github.com/kelseyhightower/envconfig v1.4.0/go.mod h1:cccZRl6mQpaq41TPp5QxidR+Sa3axMbJDNb//FQX6Gg= -github.com/kevinburke/ssh_config v0.0.0-20201106050909-4977a11b4351 h1:DowS9hvgyYSX4TO5NpyC606/Z4SxnNYbT+WX27or6Ck= -github.com/kevinburke/ssh_config v0.0.0-20201106050909-4977a11b4351/go.mod h1:CT57kijsi8u/K/BOFA39wgDQJ9CxiF4nAY/ojJ6r6mM= +github.com/kevinburke/ssh_config v1.2.0 h1:x584FjTGwHzMwvHx18PXxbBVzfnxogHaAReU4gf13a4= +github.com/kevinburke/ssh_config v1.2.0/go.mod h1:CT57kijsi8u/K/BOFA39wgDQJ9CxiF4nAY/ojJ6r6mM= github.com/kisielk/errcheck v1.1.0/go.mod h1:EZBBE59ingxPouuu3KfxchcWSUPOHkagtvWXihfKN4Q= github.com/kisielk/errcheck v1.2.0/go.mod h1:/BMXB+zMLi60iA8Vv6Ksmxu/1UDYcXs4uQLJ+jE2L00= github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= @@ -1309,8 +1303,6 @@ github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJ github.com/markbates/oncer v0.0.0-20181203154359-bf2de49a0be2/go.mod h1:Ld9puTsIW75CHf65OeIOkyKbteujpZVXDpWK6YGZbxE= github.com/markbates/safe v1.0.1/go.mod h1:nAqgmRi7cY2nqMc92/bSEeQA+R4OheNU2T1kNSCBdG0= github.com/marstr/guid v1.1.0/go.mod h1:74gB1z2wpxxInTG6yaqA7KrtM0NZ+RbrcqDvYHefzho= -github.com/matryer/is v1.2.0 h1:92UTHpy8CDwaJ08GqLDzhhuixiBUUD1p3AU6PHddz4A= -github.com/matryer/is v1.2.0/go.mod h1:2fLPjFQM9rhQ15aVEtbuwhJinnOqrmgXPNdZsdwlWXA= github.com/matryer/moq v0.0.0-20190312154309-6cfb0558e1bd/go.mod h1:9ELz6aaclSIGnZBoaSLZ3NAl1VTufbOrXBPvtcy6WiQ= github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= github.com/mattn/go-colorable v0.1.1/go.mod h1:FuOcm+DKB9mbwrcAfNl7/TZVBZ6rcnceauSikq3lYCQ= @@ -1467,7 +1459,7 @@ github.com/onsi/gomega v1.7.0/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1Cpa github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY= github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo= github.com/onsi/gomega v1.10.3/go.mod h1:V9xEwhxec5O8UDM77eCW8vLymOMltsqPVYWrpDsH8xc= -github.com/onsi/gomega v1.27.4 h1:Z2AnStgsdSayCMDiCU42qIz+HLqEPcgiOCXjAU/w+8E= +github.com/onsi/gomega v1.27.10 h1:naR28SdDFlqrG6kScpT8VWpu1xWY5nJRCF3XaYyBjhI= github.com/opencontainers/go-digest v0.0.0-20170106003457-a6d0ee40d420/go.mod h1:cMLVZDEM3+U2I4VmLI6N8jQYUd2OVphdqWwCJHrFt2s= github.com/opencontainers/go-digest v0.0.0-20180430190053-c9281466c8b2/go.mod h1:cMLVZDEM3+U2I4VmLI6N8jQYUd2OVphdqWwCJHrFt2s= github.com/opencontainers/go-digest v1.0.0-rc1/go.mod h1:cMLVZDEM3+U2I4VmLI6N8jQYUd2OVphdqWwCJHrFt2s= @@ -1530,6 +1522,8 @@ github.com/piper-validation/fortify-client-go v0.0.0-20220126145513-7b3e9a72af01 github.com/piper-validation/fortify-client-go v0.0.0-20220126145513-7b3e9a72af01/go.mod h1:EZkdCgngw/tInYdidqDQlRIXvyM1fSbqn/vx83YNCcw= github.com/pires/go-proxyproto v0.6.1 h1:EBupykFmo22SDjv4fQVQd2J9NOoLPmyZA/15ldOGkPw= github.com/pires/go-proxyproto v0.6.1/go.mod h1:Odh9VFOZJCf9G8cLW5o435Xf1J95Jw9Gw5rnCjcwzAY= +github.com/pjbgf/sha1cd v0.3.0 h1:4D5XXmUUBUl/xQ6IjCkEAbqXskkq/4O7LmGn0AqMDs4= +github.com/pjbgf/sha1cd v0.3.0/go.mod h1:nZ1rrWOcGJ5uZgEEVL1VUM9iRQiZvWdbZjkKyFzPPsI= github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8 h1:KoWmjvw+nsYOo29YJK9vDA65RGE3NrOnUtO7a+RF9HU= github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= @@ -1615,7 +1609,7 @@ github.com/rogpeppe/go-internal v1.6.2/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTE github.com/rogpeppe/go-internal v1.8.0/go.mod h1:WmiCO8CzOY8rg0OYDC4/i/2WRWAB6poM+XZ2dLUbcbE= github.com/rogpeppe/go-internal v1.8.1/go.mod h1:JeRgkft04UBgHMgCIwADu4Pn6Mtm5d4nPKWu0nJ5d+o= github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs= -github.com/rogpeppe/go-internal v1.10.0 h1:TMyTOH3F/DB16zRVcYyreMH6GnZZrwQVAoYjRBZyWFQ= +github.com/rogpeppe/go-internal v1.11.0 h1:cWPaGQEPrBb5/AsnsZesgZZ9yb1OQ+GOISoDNXVBh4M= github.com/rs/zerolog v1.4.0/go.mod h1:YbFCdg8HfsridGWAh22vktObvhZbQsZXe4/zB0OKkWU= github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g= github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= @@ -1640,7 +1634,6 @@ github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529 h1:nn5Wsu0esKSJiIVhscUt github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc= github.com/seccomp/libseccomp-golang v0.9.1/go.mod h1:GbW5+tmTXfcxTToHLXlScSlAvWlF4P2Ca7zGrPiEpWo= github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo= -github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM= github.com/sergi/go-diff v1.2.0 h1:XU+rvMAioB0UC3q1MFrIQy4Vo5/4VsRDQQXHsEya6xQ= github.com/sergi/go-diff v1.2.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM= github.com/sethvargo/go-limiter v0.7.1 h1:wWNhTj0pxjyJ7wuJHpRJpYwJn+bUnjYfw2a85eu5w9U= @@ -1660,6 +1653,8 @@ github.com/sirupsen/logrus v1.7.0/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic github.com/sirupsen/logrus v1.8.1/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= github.com/sirupsen/logrus v1.9.0 h1:trlNQbNUG3OdDrDil03MCb1H2o9nJ1x4/5LYw7byDE0= github.com/sirupsen/logrus v1.9.0/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= +github.com/skeema/knownhosts v1.2.1 h1:SHWdIUa82uGZz+F+47k8SY4QhhI291cXCpopT1lK2AQ= +github.com/skeema/knownhosts v1.2.1/go.mod h1:xYbVRSPxqBZFrdmDyMmsOs+uX1UZC3nTN3ThzgDxUwo= github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= github.com/smartystreets/assertions v1.0.0/go.mod h1:kHHU4qYBaI3q23Pp3VPrmWhuIUrLW/7eUrw0BU5VaoM= github.com/smartystreets/go-aws-auth v0.0.0-20180515143844-0c1422d1fdb9/go.mod h1:SnhjPscd9TpLiy1LpzGSKh3bXCfxxXuqd9xmQJy3slM= @@ -1780,8 +1775,8 @@ github.com/vmware/govmomi v0.18.0 h1:f7QxSmP7meCtoAmiKZogvVbLInT+CZx6Px6K5rYsJZo github.com/vmware/govmomi v0.18.0/go.mod h1:URlwyTFZX72RmxtxuaFL2Uj3fD1JTvZdx59bHWk6aFU= github.com/willf/bitset v1.1.11-0.20200630133818-d5bec3311243/go.mod h1:RjeCKbqT1RxIR/KWY6phxZiaY1IyutSBfGjNPySAYV4= github.com/willf/bitset v1.1.11/go.mod h1:83CECat5yLh5zVOf4P1ErAgKA5UDvKtgyUABdr3+MjI= -github.com/xanzy/ssh-agent v0.3.0 h1:wUMzuKtKilRgBAD1sUb8gOwwRr2FGoBVumcjoOACClI= -github.com/xanzy/ssh-agent v0.3.0/go.mod h1:3s9xbODqPuuhK9JV1R321M/FlMZSBvE5aY6eAcqrDh0= +github.com/xanzy/ssh-agent v0.3.3 h1:+/15pJfg/RsTxqYcX6fHqOXZwwMP+2VyYWJeWM2qQFM= +github.com/xanzy/ssh-agent v0.3.3/go.mod h1:6dzNDKs0J9rVPHPhaGCukekBHKqfl+L3KghI1Bc68Uw= github.com/xdg-go/pbkdf2 v1.0.0 h1:Su7DPu48wXMwC3bs7MCNG+z4FhcyEuz5dlvchbq0B0c= github.com/xdg-go/pbkdf2 v1.0.0/go.mod h1:jrpuAogTd400dnrH08LKmI/xc1MbPOebTwRqcT5RDeI= github.com/xdg-go/scram v1.0.2/go.mod h1:1WAq6h33pAW+iRreB34OORO2Nf7qel3VV3fjBj+hCSs= @@ -1874,7 +1869,6 @@ golang.org/x/crypto v0.0.0-20181009213950-7c1a557ab941/go.mod h1:6SG95UA2DQfeDnf golang.org/x/crypto v0.0.0-20181029021203-45a5f77698d3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20181203042331-505ab145d0a9/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20190211182817-74369b46fc67/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= -golang.org/x/crypto v0.0.0-20190219172222-a4c6cb3142f2/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20190320223903-b7391e95e576/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20190422162423-af44ce270edf/go.mod h1:WFFai1msRO1wXaEeE5yQxYXgSfI8pQAWXbQop6sCtWE= @@ -1896,7 +1890,6 @@ golang.org/x/crypto v0.0.0-20201002170205-7f63de1d35b0/go.mod h1:LzIPMQfyMNhhGPh golang.org/x/crypto v0.0.0-20201216223049-8b5274cf687f/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I= golang.org/x/crypto v0.0.0-20201221181555-eec23a3978ad/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I= golang.org/x/crypto v0.0.0-20210322153248-0c34fe9e7dc2/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= -golang.org/x/crypto v0.0.0-20210421170649-83a5a9bb288b/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20211215153901-e495a2d5b3d3/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= @@ -1906,8 +1899,8 @@ golang.org/x/crypto v0.0.0-20220722155217-630584e8d5aa/go.mod h1:IxCIyHEi3zRg3s0 golang.org/x/crypto v0.3.1-0.20221117191849-2c476679df9a/go.mod h1:hebNnKkNXi2UzZN1eVRvBB7co0a+JxK6XbPiWVs/3J4= golang.org/x/crypto v0.6.0/go.mod h1:OFC/31mSvZgRz0V1QTNCzfAI1aIRzbiufJtkMIlEp58= golang.org/x/crypto v0.7.0/go.mod h1:pYwdfH91IfpZVANVyUOhSIPZaFoJGxTFbZhFTx+dXZU= -golang.org/x/crypto v0.13.0 h1:mvySKfSWJ+UKUii46M40LOvyWfN0s2U+46/jDd0e6Ck= -golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc= +golang.org/x/crypto v0.16.0 h1:mMMrFzRSCF0GvB7Ne27XVtVAaXLrPmgPC7/v0tkwHaY= +golang.org/x/crypto v0.16.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq4= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= @@ -1949,8 +1942,8 @@ golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= -golang.org/x/mod v0.12.0 h1:rmsUpXtvNzj340zd98LZ4KntptpfRHwpFOHG188oHXc= -golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/mod v0.14.0 h1:dGoOF9QVLYng8IHTm7BAyWqCqSheQ5pYWGhzW00YJr0= +golang.org/x/mod v0.14.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= golang.org/x/net v0.0.0-20170114055629-f2499483f923/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -2004,7 +1997,6 @@ golang.org/x/net v0.0.0-20201224014010-6772e930b67b/go.mod h1:m0MpNAwzfU5UDzcl9v golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLdyRGr576XBO4/greRjx4P4O3yc= -golang.org/x/net v0.0.0-20210326060303-6b1517762897/go.mod h1:uSPa2vr4CLtc/ILN5odXGNXS6mhrKVzTaCXzk9m6W3k= golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= golang.org/x/net v0.0.0-20210410081132-afb366fc7cd1/go.mod h1:9tjilg8BloeKEkVJvy7fQ90B1CfIiPueXVOjqfkSzI8= golang.org/x/net v0.0.0-20210421230115-4e50805a0758/go.mod h1:72T/g9IO56b78aLF+1Kcs5dz7/ng1VjMUvfKvpfy+jM= @@ -2020,8 +2012,8 @@ golang.org/x/net v0.1.0/go.mod h1:Cx3nUiGt4eDBEyega/BKRp+/AlGL8hYe7U9odMt2Cco= golang.org/x/net v0.2.0/go.mod h1:KqCZLdyyvdV855qA2rE3GC2aiw5xGR5TEjj8smXukLY= golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= golang.org/x/net v0.8.0/go.mod h1:QVkue5JL9kW//ek3r6jTKnTFis1tRmNAW2P1shuFdJc= -golang.org/x/net v0.15.0 h1:ugBLEUaxABaB5AJqW9enI0ACdci2RUd4eP51NTBvuJ8= -golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk= +golang.org/x/net v0.19.0 h1:zTwKpTd2XuCqf8huc7Fo2iSy+4RHPd10s4KzeTnVr1c= +golang.org/x/net v0.19.0/go.mod h1:CfAk/cbD4CthTvqiEl8NpboMuiuOYsAr/7NOjZJtv1U= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= @@ -2035,8 +2027,8 @@ golang.org/x/oauth2 v0.0.0-20210220000619-9bb904979d93/go.mod h1:KelEdhl1UZF7XfJ golang.org/x/oauth2 v0.0.0-20210313182246-cd4f82c27b84/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20210514164344-f6687ab2804c/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20220223155221-ee480838109b/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc= -golang.org/x/oauth2 v0.12.0 h1:smVPGxink+n1ZI5pkQa8y6fZT0RW0MgCO5bFpepy4B4= -golang.org/x/oauth2 v0.12.0/go.mod h1:A74bZ3aGXgCY0qaIC9Ahg6Lglin4AMAco8cIv9baba4= +golang.org/x/oauth2 v0.15.0 h1:s8pnnxNVzjWyrvYdFUQq5llS1PX2zhPXmccZv99h7uQ= +golang.org/x/oauth2 v0.15.0/go.mod h1:q48ptWNTY5XWf+JNten23lcvHpLJ0ZSxF5ttTHKVCAM= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -2051,8 +2043,8 @@ golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.2.0 h1:PUR+T4wwASmuSTYdKjYHI5TD22Wy5ogLU5qZCOLxBrI= -golang.org/x/sync v0.2.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.5.0 h1:60k92dhOjHxJkrqnwsfl8KuaHbn/5dl0lUPUklKo3qE= +golang.org/x/sync v0.5.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.0.0-20170830134202-bb24a47a89ea/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= @@ -2151,7 +2143,6 @@ golang.org/x/sys v0.0.0-20210324051608-47abb6519492/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210420072515-93ed5bcd2bfe/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210502180810-71e4cd670f79/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210514084401-e8d321eab015/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210603081109-ebe580a85c40/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= @@ -2176,8 +2167,8 @@ golang.org/x/sys v0.3.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.7.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.12.0 h1:CM0HF96J0hcLAwsHPJZjfdNzs0gftsLfgKt57wWHJ0o= -golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.15.0 h1:h48lPFYpsTvQJZF4EKyI4aLHaev3CxivZmv7yZig9pc= +golang.org/x/sys v0.15.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= @@ -2185,8 +2176,8 @@ golang.org/x/term v0.1.0/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.2.0/go.mod h1:TVmDHMZPmdnySmBfhjOoOdhjzdE1h4u1VwSiw2l1Nuc= golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= golang.org/x/term v0.6.0/go.mod h1:m6U89DPEgQRMq3DNkDClhWw02AUbt2daBVO4cn4Hv9U= -golang.org/x/term v0.12.0 h1:/ZfYdc3zq+q02Rv9vGqTeSItdzZTSNDmfTi0mBAuidU= -golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU= +golang.org/x/term v0.15.0 h1:y/Oo/a/q3IXu26lQgl04j/gjuBDOBlx7X6Om1j2CPW4= +golang.org/x/term v0.15.0/go.mod h1:BDl952bC7+uMoWR75FIrCDx79TPU9oHkTZ9yRbYOrX0= golang.org/x/text v0.0.0-20160726164857-2910a502d2bf/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -2201,8 +2192,8 @@ golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ= golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.8.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= -golang.org/x/text v0.13.0 h1:ablQoSUd0tRdKxZewP80B+BaqeKJuVhuRxj/dkrun3k= -golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= +golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ= +golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= golang.org/x/time v0.0.0-20180412165947-fbb02b2291d2/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= @@ -2285,8 +2276,8 @@ golang.org/x/tools v0.1.1/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= -golang.org/x/tools v0.7.0 h1:W4OVu8VVOaIO0yzWMNdepAulS7YfoS3Zabrm8DOXXU4= -golang.org/x/tools v0.7.0/go.mod h1:4pg6aUX35JBAogB10C9AtvVL+qowtN4pT3CGSQex14s= +golang.org/x/tools v0.13.0 h1:Iey4qkscZuv0VvIt8E0neZjtPVQFSc870HQ448QgEmQ= +golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= diff --git a/integration/integration_cnb_test.go b/integration/integration_cnb_test.go index 6c5d2a42e1..01bb6cefc0 100644 --- a/integration/integration_cnb_test.go +++ b/integration/integration_cnb_test.go @@ -17,7 +17,7 @@ import ( const ( registryURL = "localhost:5000" - baseBuilder = "paketobuildpacks/builder:0.3.280-base" + baseBuilder = "paketobuildpacks/builder-jammy-base:0.4.252" ) func setupDockerRegistry(t *testing.T, ctx context.Context) testcontainers.Container { @@ -43,7 +43,7 @@ func TestCNBIntegrationNPMProject(t *testing.T) { container := givenThisContainer(t, IntegrationTestDockerExecRunnerBundle{ Image: baseBuilder, - User: "cnb", + User: "0", TestDir: []string{"testdata", "TestCnbIntegration"}, Network: fmt.Sprintf("container:%s", registryContainer.GetContainerID()), Environment: map[string]string{ @@ -53,7 +53,7 @@ func TestCNBIntegrationNPMProject(t *testing.T) { container2 := givenThisContainer(t, IntegrationTestDockerExecRunnerBundle{ Image: baseBuilder, - User: "cnb", + User: "0", TestDir: []string{"testdata", "TestCnbIntegration"}, Network: fmt.Sprintf("container:%s", registryContainer.GetContainerID()), Environment: map[string]string{ @@ -65,7 +65,7 @@ func TestCNBIntegrationNPMProject(t *testing.T) { assert.NoError(t, err) container.assertHasOutput(t, "running command: /cnb/lifecycle/creator") container.assertHasOutput(t, "Selected Node Engine version (using BP_NODE_VERSION): 16") - container.assertHasOutput(t, "Paketo NPM Start Buildpack") + container.assertHasOutput(t, "Paketo Buildpack for NPM Start") container.assertHasOutput(t, fmt.Sprintf("Saving %s/node:0.0.1", registryURL)) container.assertHasOutput(t, "Setting default process type 'greeter'") container.assertHasOutput(t, "*** Images (sha256:") @@ -77,7 +77,7 @@ func TestCNBIntegrationNPMProject(t *testing.T) { assert.NoError(t, err) container2.assertHasOutput(t, "running command: /cnb/lifecycle/creator") container2.assertHasOutput(t, "Selected Node Engine version (using BP_NODE_VERSION): 16") - container2.assertHasOutput(t, "Paketo NPM Start Buildpack") + container2.assertHasOutput(t, "Paketo Buildpack for NPM Start") container2.assertHasOutput(t, fmt.Sprintf("Saving %s/node:0.0.1", registryURL)) container2.assertHasOutput(t, "*** Images (sha256:") container2.assertHasOutput(t, "SUCCESS") @@ -93,7 +93,7 @@ func TestCNBIntegrationProjectDescriptor(t *testing.T) { container := givenThisContainer(t, IntegrationTestDockerExecRunnerBundle{ Image: baseBuilder, - User: "cnb", + User: "0", TestDir: []string{"testdata", "TestCnbIntegration", "project"}, Network: fmt.Sprintf("container:%s", registryContainer.GetContainerID()), }) @@ -108,7 +108,7 @@ func TestCNBIntegrationProjectDescriptor(t *testing.T) { "Downloading buildpack", "Setting custom environment variables: 'map[BP_NODE_VERSION:16 TMPDIR:/tmp/cnbBuild-", "Selected Node Engine version (using BP_NODE_VERSION): 16", - "Paketo NPM Start Buildpack", + "Paketo Buildpack for NPM Start", fmt.Sprintf("Saving %s/not-found:0.0.1", registryURL), "*** Images (sha256:", "SUCCESS", @@ -123,7 +123,7 @@ func TestCNBIntegrationBuildSummary(t *testing.T) { container := givenThisContainer(t, IntegrationTestDockerExecRunnerBundle{ Image: baseBuilder, - User: "cnb", + User: "0", TestDir: []string{"testdata", "TestCnbIntegration", "project"}, Network: fmt.Sprintf("container:%s", registryContainer.GetContainerID()), }) @@ -148,7 +148,7 @@ func TestCNBIntegrationZipPath(t *testing.T) { container := givenThisContainer(t, IntegrationTestDockerExecRunnerBundle{ Image: baseBuilder, - User: "cnb", + User: "0", TestDir: []string{"testdata", "TestCnbIntegration", "zip"}, Network: fmt.Sprintf("container:%s", registryContainer.GetContainerID()), }) @@ -177,7 +177,7 @@ func TestCNBIntegrationNonZipPath(t *testing.T) { container := givenThisContainer(t, IntegrationTestDockerExecRunnerBundle{ Image: baseBuilder, - User: "cnb", + User: "0", TestDir: []string{"testdata", "TestMtaIntegration", "npm"}, Network: fmt.Sprintf("container:%s", registryContainer.GetContainerID()), }) @@ -197,19 +197,19 @@ func TestCNBIntegrationNPMCustomBuildpacksFullProject(t *testing.T) { container := givenThisContainer(t, IntegrationTestDockerExecRunnerBundle{ Image: baseBuilder, - User: "cnb", + User: "0", TestDir: []string{"testdata", "TestMtaIntegration", "npm"}, Network: fmt.Sprintf("container:%s", registryContainer.GetContainerID()), }) - err := container.whenRunningPiperCommand("cnbBuild", "--noTelemetry", "--verbose", "--buildpacks", "gcr.io/paketo-buildpacks/nodejs:0.19.0", "--containerImageName", "not-found", "--containerImageTag", "0.0.1", "--containerRegistryUrl", registryURL) + err := container.whenRunningPiperCommand("cnbBuild", "--noTelemetry", "--verbose", "--buildpacks", "gcr.io/paketo-buildpacks/nodejs:2.0.0", "--containerImageName", "not-found", "--containerImageTag", "0.0.1", "--containerRegistryUrl", registryURL) assert.NoError(t, err) container.assertHasOutput(t, - "Setting custom buildpacks: '[gcr.io/paketo-buildpacks/nodejs:0.19.0]'", - "Downloading buildpack 'gcr.io/paketo-buildpacks/nodejs:0.19.0' to /tmp/buildpacks_cache/sha256:", + "Setting custom buildpacks: '[gcr.io/paketo-buildpacks/nodejs:2.0.0]'", + "Downloading buildpack 'gcr.io/paketo-buildpacks/nodejs:2.0.0' to /tmp/buildpacks_cache/sha256:", "running command: /cnb/lifecycle/creator", - "Paketo NPM Start Buildpack", + "Paketo Buildpack for NPM Start", fmt.Sprintf("Saving %s/not-found:0.0.1", registryURL), "*** Images (sha256:", "SUCCESS", @@ -224,19 +224,19 @@ func TestCNBIntegrationNPMCustomBuildpacksBuildpacklessProject(t *testing.T) { defer registryContainer.Terminate(ctx) container := givenThisContainer(t, IntegrationTestDockerExecRunnerBundle{ - Image: "paketobuildpacks/builder:buildpackless-full", - User: "cnb", + Image: "paketobuildpacks/builder-jammy-buildpackless-full", + User: "0", TestDir: []string{"testdata", "TestMtaIntegration", "npm"}, Network: fmt.Sprintf("container:%s", registryContainer.GetContainerID()), }) - err := container.whenRunningPiperCommand("cnbBuild", "--noTelemetry", "--verbose", "--buildpacks", "gcr.io/paketo-buildpacks/nodejs:0.19.0", "--containerImageName", "not-found", "--containerImageTag", "0.0.1", "--containerRegistryUrl", registryURL) + err := container.whenRunningPiperCommand("cnbBuild", "--noTelemetry", "--verbose", "--buildpacks", "gcr.io/paketo-buildpacks/nodejs:2.0.0", "--containerImageName", "not-found", "--containerImageTag", "0.0.1", "--containerRegistryUrl", registryURL) assert.NoError(t, err) - container.assertHasOutput(t, "Setting custom buildpacks: '[gcr.io/paketo-buildpacks/nodejs:0.19.0]'", - "Downloading buildpack 'gcr.io/paketo-buildpacks/nodejs:0.19.0' to /tmp/buildpacks_cache/sha256:", + container.assertHasOutput(t, "Setting custom buildpacks: '[gcr.io/paketo-buildpacks/nodejs:2.0.0]'", + "Downloading buildpack 'gcr.io/paketo-buildpacks/nodejs:2.0.0' to /tmp/buildpacks_cache/sha256:", "running command: /cnb/lifecycle/creator", - "Paketo NPM Start Buildpack", + "Paketo Buildpack for NPM Start", fmt.Sprintf("Saving %s/not-found:0.0.1", registryURL), "*** Images (sha256:", "SUCCESS", @@ -266,7 +266,7 @@ func TestCNBIntegrationBindings(t *testing.T) { container := givenThisContainer(t, IntegrationTestDockerExecRunnerBundle{ Image: baseBuilder, - User: "cnb", + User: "0", TestDir: []string{"testdata", "TestCnbIntegration"}, Network: fmt.Sprintf("container:%s", registryContainer.GetContainerID()), Environment: map[string]string{ @@ -294,7 +294,7 @@ func TestCNBIntegrationMultiImage(t *testing.T) { container := givenThisContainer(t, IntegrationTestDockerExecRunnerBundle{ Image: baseBuilder, - User: "cnb", + User: "0", TestDir: []string{"testdata", "TestCnbIntegration"}, Network: fmt.Sprintf("container:%s", registryContainer.GetContainerID()), }) @@ -328,7 +328,7 @@ func TestCNBIntegrationPreserveFiles(t *testing.T) { container := givenThisContainer(t, IntegrationTestDockerExecRunnerBundle{ Image: baseBuilder, - User: "cnb", + User: "0", TestDir: []string{"testdata", "TestCnbIntegration"}, Network: fmt.Sprintf("container:%s", registryContainer.GetContainerID()), }) @@ -348,7 +348,7 @@ func TestCNBIntegrationPreserveFilesIgnored(t *testing.T) { container := givenThisContainer(t, IntegrationTestDockerExecRunnerBundle{ Image: baseBuilder, - User: "cnb", + User: "0", TestDir: []string{"testdata", "TestCnbIntegration"}, Network: fmt.Sprintf("container:%s", registryContainer.GetContainerID()), }) @@ -367,7 +367,7 @@ func TestCNBIntegrationPrePostBuildpacks(t *testing.T) { container := givenThisContainer(t, IntegrationTestDockerExecRunnerBundle{ Image: baseBuilder, - User: "cnb", + User: "0", TestDir: []string{"testdata", "TestCnbIntegration"}, Network: fmt.Sprintf("container:%s", registryContainer.GetContainerID()), Environment: map[string]string{ diff --git a/integration/integration_tms_export_test.go b/integration/integration_tms_export_test.go index 8ee561708f..f661a89b47 100644 --- a/integration/integration_tms_export_test.go +++ b/integration/integration_tms_export_test.go @@ -19,7 +19,7 @@ func TestTmsExportIntegrationYaml(t *testing.T) { Image: "devxci/mbtci-java11-node14", User: "root", TestDir: []string{"testdata", "TestTmsIntegration"}, - Environment: map[string]string{"PIPER_tmsServiceKey": tmsServiceKey}, + Environment: map[string]string{"PIPER_serviceKey": tmsServiceKey}, }) defer container.terminate(t) @@ -41,7 +41,7 @@ func TestTmsExportIntegrationBinFailDescription(t *testing.T) { Image: "devxci/mbtci-java11-node14", User: "root", TestDir: []string{"testdata", "TestTmsIntegration"}, - Environment: map[string]string{"PIPER_tmsServiceKey": tmsServiceKey}, + Environment: map[string]string{"PIPER_serviceKey": tmsServiceKey}, }) defer container.terminate(t) diff --git a/integration/integration_tms_upload_test.go b/integration/integration_tms_upload_test.go index e7b93dc7b3..b240e2bb9c 100644 --- a/integration/integration_tms_upload_test.go +++ b/integration/integration_tms_upload_test.go @@ -32,7 +32,7 @@ func TestTmsUploadIntegrationBinSuccess(t *testing.T) { Image: "devxci/mbtci-java11-node14", User: "root", TestDir: []string{"testdata", "TestTmsIntegration"}, - Environment: map[string]string{"PIPER_tmsServiceKey": tmsServiceKey}, + Environment: map[string]string{"PIPER_serviceKey": tmsServiceKey}, }) defer container.terminate(t) @@ -58,7 +58,7 @@ func TestTmsUploadIntegrationBinNoDescriptionSuccess(t *testing.T) { Image: "devxci/mbtci-java11-node14", User: "root", TestDir: []string{"testdata", "TestTmsIntegration"}, - Environment: map[string]string{"PIPER_tmsServiceKey": tmsServiceKey}, + Environment: map[string]string{"PIPER_serviceKey": tmsServiceKey}, }) defer container.terminate(t) @@ -105,7 +105,7 @@ func TestTmsUploadIntegrationBinFailDescription(t *testing.T) { Image: "devxci/mbtci-java11-node14", User: "root", TestDir: []string{"testdata", "TestTmsIntegration"}, - Environment: map[string]string{"PIPER_tmsServiceKey": tmsServiceKey}, + Environment: map[string]string{"PIPER_serviceKey": tmsServiceKey}, }) defer container.terminate(t) @@ -126,7 +126,7 @@ func TestTmsUploadIntegrationYaml(t *testing.T) { Image: "devxci/mbtci-java11-node14", User: "root", TestDir: []string{"testdata", "TestTmsIntegration"}, - Environment: map[string]string{"PIPER_tmsServiceKey": tmsServiceKey}, + Environment: map[string]string{"PIPER_serviceKey": tmsServiceKey}, }) defer container.terminate(t) diff --git a/integration/testdata/TestCnbIntegration/project/project-with-id.toml b/integration/testdata/TestCnbIntegration/project/project-with-id.toml index 11b884697e..9acac87159 100644 --- a/integration/testdata/TestCnbIntegration/project/project-with-id.toml +++ b/integration/testdata/TestCnbIntegration/project/project-with-id.toml @@ -14,16 +14,13 @@ value = "16" [[build.buildpacks]] id = "paketo-buildpacks/ca-certificates" -version = "3.2.5" +version = "3.6.6" [[build.buildpacks]] -uri = "gcr.io/paketo-buildpacks/node-engine:0.15.0" +uri = "gcr.io/paketo-buildpacks/node-engine:3.0.1" [[build.buildpacks]] -uri = "gcr.io/paketo-buildpacks/npm-install:0.10.3" +uri = "gcr.io/paketo-buildpacks/npm-install:1.3.1" [[build.buildpacks]] -uri = "gcr.io/paketo-buildpacks/node-module-bom:0.4.0" - -[[build.buildpacks]] -uri = "gcr.io/paketo-buildpacks/npm-start:0.9.2" +uri = "gcr.io/paketo-buildpacks/npm-start:1.0.15" diff --git a/integration/testdata/TestCnbIntegration/project/project.toml b/integration/testdata/TestCnbIntegration/project/project.toml index aca4a89a06..18d59f69a4 100644 --- a/integration/testdata/TestCnbIntegration/project/project.toml +++ b/integration/testdata/TestCnbIntegration/project/project.toml @@ -14,19 +14,16 @@ name = "BP_NODE_VERSION" value = "16" [[build.buildpacks]] -uri = "gcr.io/paketo-buildpacks/ca-certificates:3.2.5" +uri = "gcr.io/paketo-buildpacks/ca-certificates:3.6.6" [[build.buildpacks]] -uri = "gcr.io/paketo-buildpacks/node-engine:0.15.0" +uri = "gcr.io/paketo-buildpacks/node-engine:3.0.1" [[build.buildpacks]] -uri = "gcr.io/paketo-buildpacks/npm-install:0.10.3" +uri = "gcr.io/paketo-buildpacks/npm-install:1.3.1" [[build.buildpacks]] -uri = "gcr.io/paketo-buildpacks/node-module-bom:0.4.0" +uri = "gcr.io/paketo-buildpacks/npm-start:1.0.15" [[build.buildpacks]] -uri = "gcr.io/paketo-buildpacks/npm-start:0.9.2" - -[[build.buildpacks]] -uri = "gcr.io/paketo-buildpacks/procfile:5.4.0" +uri = "gcr.io/paketo-buildpacks/procfile:5.6.7" diff --git a/pkg/abaputils/abaputils.go b/pkg/abaputils/abaputils.go index 738b6715f3..9b944cdc31 100644 --- a/pkg/abaputils/abaputils.go +++ b/pkg/abaputils/abaputils.go @@ -167,6 +167,9 @@ func ReadConfigFile(path string) (file []byte, err error) { // GetHTTPResponse wraps the SendRequest function of piperhttp func GetHTTPResponse(requestType string, connectionDetails ConnectionDetailsHTTP, body []byte, client piperhttp.Sender) (*http.Response, error) { + log.Entry().Debugf("Request body: %s", string(body)) + log.Entry().Debugf("Request user: %s", connectionDetails.User) + header := make(map[string][]string) header["Content-Type"] = []string{"application/json"} header["Accept"] = []string{"application/json"} @@ -182,16 +185,20 @@ func GetHTTPResponse(requestType string, connectionDetails ConnectionDetailsHTTP // Further error details may be present in the response body of the HTTP response. // If the response body is parseable, the included details are wrapped around the original error from the HTTP repsponse. // If this is not possible, the original error is returned. -func HandleHTTPError(resp *http.Response, err error, message string, connectionDetails ConnectionDetailsHTTP) error { +func HandleHTTPError(resp *http.Response, err error, message string, connectionDetails ConnectionDetailsHTTP) (string, error) { + + var errorText string + var errorCode string + var parsingError error if resp == nil { // Response is nil in case of a timeout log.Entry().WithError(err).WithField("ABAP Endpoint", connectionDetails.URL).Error("Request failed") match, _ := regexp.MatchString(".*EOF$", err.Error()) if match { - AddDefaultDashedLine() + AddDefaultDashedLine(1) log.Entry().Infof("%s", "A connection could not be established to the ABAP system. The typical root cause is the network configuration (firewall, IP allowlist, etc.)") - AddDefaultDashedLine() + AddDefaultDashedLine(1) } log.Entry().Infof("Error message: %s,", err.Error()) @@ -201,15 +208,15 @@ func HandleHTTPError(resp *http.Response, err error, message string, connectionD log.Entry().WithField("StatusCode", resp.Status).WithField("User", connectionDetails.User).WithField("URL", connectionDetails.URL).Error(message) - errorText, errorCode, parsingError := GetErrorDetailsFromResponse(resp) + errorText, errorCode, parsingError = GetErrorDetailsFromResponse(resp) if parsingError != nil { - return err + return "", err } abapError := errors.New(fmt.Sprintf("%s - %s", errorCode, errorText)) err = errors.Wrap(abapError, err.Error()) } - return err + return errorCode, err } func GetErrorDetailsFromResponse(resp *http.Response) (errorString string, errorCode string, err error) { @@ -249,8 +256,10 @@ func ConvertTime(logTimeStamp string) time.Time { } // AddDefaultDashedLine adds 25 dashes -func AddDefaultDashedLine() { - log.Entry().Infof(strings.Repeat("-", 25)) +func AddDefaultDashedLine(j int) { + for i := 1; i <= j; i++ { + log.Entry().Infof(strings.Repeat("-", 25)) + } } // AddDefaultDebugLine adds 25 dashes in debug @@ -370,6 +379,7 @@ type ClientMock struct { Error error NilResponse bool ErrorInsteadOfDump bool + ErrorList []error } // SetOptions sets clientOptions for a client mock @@ -383,8 +393,10 @@ func (c *ClientMock) SendRequest(method, url string, bdy io.Reader, hdr http.Hea } var body []byte + var responseError error if c.Body != "" { body = []byte(c.Body) + responseError = c.Error } else { if c.ErrorInsteadOfDump && len(c.BodyList) == 0 { return nil, errors.New("No more bodies in the list") @@ -392,6 +404,12 @@ func (c *ClientMock) SendRequest(method, url string, bdy io.Reader, hdr http.Hea bodyString := c.BodyList[len(c.BodyList)-1] c.BodyList = c.BodyList[:len(c.BodyList)-1] body = []byte(bodyString) + if len(c.ErrorList) == 0 { + responseError = c.Error + } else { + responseError = c.ErrorList[len(c.ErrorList)-1] + c.ErrorList = c.ErrorList[:len(c.ErrorList)-1] + } } header := http.Header{} header.Set("X-Csrf-Token", c.Token) @@ -399,7 +417,7 @@ func (c *ClientMock) SendRequest(method, url string, bdy io.Reader, hdr http.Hea StatusCode: c.StatusCode, Header: header, Body: io.NopCloser(bytes.NewReader(body)), - }, c.Error + }, responseError } // DownloadFile : Empty file download diff --git a/pkg/abaputils/abaputils_test.go b/pkg/abaputils/abaputils_test.go index f81dd63dd7..8bfc272f93 100644 --- a/pkg/abaputils/abaputils_test.go +++ b/pkg/abaputils/abaputils_test.go @@ -309,7 +309,7 @@ func TestHandleHTTPError(t *testing.T) { receivedErr := errors.New(errorValue) message := "Custom Error Message" - err := HandleHTTPError(&resp, receivedErr, message, ConnectionDetailsHTTP{}) + _, err := HandleHTTPError(&resp, receivedErr, message, ConnectionDetailsHTTP{}) assert.EqualError(t, err, fmt.Sprintf("%s: %s - %s", receivedErr.Error(), abapErrorCode, abapErrorMessage)) log.Entry().Info(err.Error()) }) @@ -328,7 +328,7 @@ func TestHandleHTTPError(t *testing.T) { receivedErr := errors.New(errorValue) message := "Custom Error Message" - err := HandleHTTPError(&resp, receivedErr, message, ConnectionDetailsHTTP{}) + _, err := HandleHTTPError(&resp, receivedErr, message, ConnectionDetailsHTTP{}) assert.EqualError(t, err, fmt.Sprintf("%s", receivedErr.Error())) log.Entry().Info(err.Error()) }) @@ -347,7 +347,7 @@ func TestHandleHTTPError(t *testing.T) { receivedErr := errors.New(errorValue) message := "Custom Error Message" - err := HandleHTTPError(&resp, receivedErr, message, ConnectionDetailsHTTP{}) + _, err := HandleHTTPError(&resp, receivedErr, message, ConnectionDetailsHTTP{}) assert.EqualError(t, err, fmt.Sprintf("%s", receivedErr.Error())) log.Entry().Info(err.Error()) }) @@ -361,7 +361,7 @@ func TestHandleHTTPError(t *testing.T) { _, hook := test.NewNullLogger() log.RegisterHook(hook) - err := HandleHTTPError(nil, receivedErr, message, ConnectionDetailsHTTP{}) + _, err := HandleHTTPError(nil, receivedErr, message, ConnectionDetailsHTTP{}) assert.EqualError(t, err, fmt.Sprintf("%s", receivedErr.Error())) assert.Equal(t, 5, len(hook.Entries), "Expected a different number of entries") diff --git a/pkg/abaputils/manageGitRepositoryUtils.go b/pkg/abaputils/manageGitRepositoryUtils.go index 679b001c9d..84fe060dd6 100644 --- a/pkg/abaputils/manageGitRepositoryUtils.go +++ b/pkg/abaputils/manageGitRepositoryUtils.go @@ -1,75 +1,71 @@ package abaputils import ( - "encoding/json" "fmt" - "io" "reflect" "sort" - "strconv" "strings" "time" - piperhttp "github.com/SAP/jenkins-library/pkg/http" "github.com/SAP/jenkins-library/pkg/log" "github.com/pkg/errors" ) -const failureMessageClonePull = "Could not pull the Repository / Software Component " const numberOfEntriesPerPage = 100000 const logOutputStatusLength = 10 const logOutputTimestampLength = 29 -// PollEntity periodically polls the pull/import entity to get the status. Check if the import is still running -func PollEntity(repositoryName string, connectionDetails ConnectionDetailsHTTP, client piperhttp.Sender, pollIntervall time.Duration) (string, error) { +// PollEntity periodically polls the action entity to get the status. Check if the import is still running +func PollEntity(api SoftwareComponentApiInterface, pollIntervall time.Duration) (string, error) { log.Entry().Info("Start polling the status...") - var status string = "R" + var statusCode string = "R" + var err error for { - pullEntity, responseStatus, err := GetStatus(failureMessageClonePull+repositoryName, connectionDetails, client) + // pullEntity, responseStatus, err := api.GetStatus(failureMessageClonePull+repositoryName, connectionDetails, client) + statusCode, err = api.GetAction() if err != nil { - return status, err + return statusCode, err } - status = pullEntity.Status - log.Entry().WithField("StatusCode", responseStatus).Info("Status: " + pullEntity.StatusDescription) - if pullEntity.Status != "R" { - PrintLogs(repositoryName, connectionDetails, client) + if statusCode != "R" && statusCode != "Q" { + + PrintLogs(api) break } time.Sleep(pollIntervall) } - return status, nil + return statusCode, nil } -func PrintLogs(repositoryName string, connectionDetails ConnectionDetailsHTTP, client piperhttp.Sender) { - connectionDetails.URL = connectionDetails.URL + "?$expand=to_Log_Overview" - entity, _, err := GetStatus(failureMessageClonePull+repositoryName, connectionDetails, client) - if err != nil || len(entity.ToLogOverview.Results) == 0 { +func PrintLogs(api SoftwareComponentApiInterface) { + // connectionDetails.URL = connectionDetails.URL + "?$expand=to_Log_Overview" + results, err := api.GetLogOverview() + if err != nil || len(results) == 0 { // return if no logs are available return } // Sort logs - sort.SliceStable(entity.ToLogOverview.Results, func(i, j int) bool { - return entity.ToLogOverview.Results[i].Index < entity.ToLogOverview.Results[j].Index + sort.SliceStable(results, func(i, j int) bool { + return results[i].Index < results[j].Index }) - printOverview(entity) + printOverview(results) // Print Details - for _, logEntryForDetails := range entity.ToLogOverview.Results { - printLog(logEntryForDetails, connectionDetails, client) + for _, logEntryForDetails := range results { + printLog(logEntryForDetails, api) } - AddDefaultDashedLine() + AddDefaultDashedLine(1) return } -func printOverview(entity PullEntity) { +func printOverview(results []LogResultsV2) { - logOutputPhaseLength, logOutputLineLength := calculateLenghts(entity) + logOutputPhaseLength, logOutputLineLength := calculateLenghts(results) log.Entry().Infof("\n") @@ -79,15 +75,15 @@ func printOverview(entity PullEntity) { printDashedLine(logOutputLineLength) - for _, logEntry := range entity.ToLogOverview.Results { + for _, logEntry := range results { log.Entry().Infof("| %-"+fmt.Sprint(logOutputPhaseLength)+"s | %"+fmt.Sprint(logOutputStatusLength)+"s | %-"+fmt.Sprint(logOutputTimestampLength)+"s |", logEntry.Name, logEntry.Status, ConvertTime(logEntry.Timestamp)) } printDashedLine(logOutputLineLength) } -func calculateLenghts(entity PullEntity) (int, int) { +func calculateLenghts(results []LogResultsV2) (int, int) { phaseLength := 22 - for _, logEntry := range entity.ToLogOverview.Results { + for _, logEntry := range results { if l := len(logEntry.Name); l > phaseLength { phaseLength = l } @@ -101,59 +97,47 @@ func printDashedLine(i int) { log.Entry().Infof(strings.Repeat("-", i)) } -func printLog(logOverviewEntry LogResultsV2, connectionDetails ConnectionDetailsHTTP, client piperhttp.Sender) { +func printLog(logOverviewEntry LogResultsV2, api SoftwareComponentApiInterface) { page := 0 - printHeader(logOverviewEntry) - for { - connectionDetails.URL = logOverviewEntry.ToLogProtocol.Deferred.URI + getLogProtocolQuery(page) - entity, err := GetProtocol(failureMessageClonePull, connectionDetails, client) - - printLogProtocolEntries(logOverviewEntry, entity) - + logProtocols, count, err := api.GetLogProtocol(logOverviewEntry, page) + printLogProtocolEntries(logOverviewEntry, logProtocols) page += 1 - if allLogsHaveBeenPrinted(entity, page, err) { + if allLogsHaveBeenPrinted(logProtocols, page, count, err) { break } } - } -func printLogProtocolEntries(logEntry LogResultsV2, entity LogProtocolResults) { +func printLogProtocolEntries(logEntry LogResultsV2, logProtocols []LogProtocol) { - sort.SliceStable(entity.Results, func(i, j int) bool { - return entity.Results[i].ProtocolLine < entity.Results[j].ProtocolLine + sort.SliceStable(logProtocols, func(i, j int) bool { + return logProtocols[i].ProtocolLine < logProtocols[j].ProtocolLine }) - if logEntry.Status != `Success` { - for _, entry := range entity.Results { + for _, entry := range logProtocols { log.Entry().Info(entry.Description) } - } else { - for _, entry := range entity.Results { + for _, entry := range logProtocols { log.Entry().Debug(entry.Description) } } } -func allLogsHaveBeenPrinted(entity LogProtocolResults, page int, err error) bool { - allPagesHaveBeenRead := false - numberOfProtocols, errConversion := strconv.Atoi(entity.Count) - if errConversion == nil { - allPagesHaveBeenRead = numberOfProtocols <= page*numberOfEntriesPerPage - } - return (err != nil || allPagesHaveBeenRead || reflect.DeepEqual(entity.Results, LogProtocolResults{})) +func allLogsHaveBeenPrinted(protocols []LogProtocol, page int, count int, err error) bool { + allPagesHaveBeenRead := count <= page*numberOfEntriesPerPage + return (err != nil || allPagesHaveBeenRead || reflect.DeepEqual(protocols, []LogProtocol{})) } func printHeader(logEntry LogResultsV2) { if logEntry.Status != `Success` { log.Entry().Infof("\n") - AddDefaultDashedLine() + AddDefaultDashedLine(1) log.Entry().Infof("%s (%v)", logEntry.Name, ConvertTime(logEntry.Timestamp)) - AddDefaultDashedLine() + AddDefaultDashedLine(1) } else { log.Entry().Debugf("\n") AddDebugDashedLine() @@ -162,72 +146,6 @@ func printHeader(logEntry LogResultsV2) { } } -func getLogProtocolQuery(page int) string { - skip := page * numberOfEntriesPerPage - top := numberOfEntriesPerPage - - return fmt.Sprintf("?$skip=%s&$top=%s&$inlinecount=allpages", fmt.Sprint(skip), fmt.Sprint(top)) -} - -func GetStatus(failureMessage string, connectionDetails ConnectionDetailsHTTP, client piperhttp.Sender) (body PullEntity, status string, err error) { - resp, err := GetHTTPResponse("GET", connectionDetails, nil, client) - if err != nil { - log.SetErrorCategory(log.ErrorInfrastructure) - err = HandleHTTPError(resp, err, failureMessage, connectionDetails) - if resp != nil { - status = resp.Status - } - return body, status, err - } - defer resp.Body.Close() - - // Parse response - var abapResp map[string]*json.RawMessage - bodyText, _ := io.ReadAll(resp.Body) - - marshallError := json.Unmarshal(bodyText, &abapResp) - if marshallError != nil { - return body, status, errors.Wrap(marshallError, "Could not parse response from the ABAP Environment system") - } - marshallError = json.Unmarshal(*abapResp["d"], &body) - if marshallError != nil { - return body, status, errors.Wrap(marshallError, "Could not parse response from the ABAP Environment system") - } - - if reflect.DeepEqual(PullEntity{}, body) { - log.Entry().WithField("StatusCode", resp.Status).Error(failureMessage) - log.SetErrorCategory(log.ErrorInfrastructure) - var err = errors.New("Request to ABAP System not successful") - return body, resp.Status, err - } - return body, resp.Status, nil -} - -func GetProtocol(failureMessage string, connectionDetails ConnectionDetailsHTTP, client piperhttp.Sender) (body LogProtocolResults, err error) { - resp, err := GetHTTPResponse("GET", connectionDetails, nil, client) - if err != nil { - log.SetErrorCategory(log.ErrorInfrastructure) - err = HandleHTTPError(resp, err, failureMessage, connectionDetails) - return body, err - } - defer resp.Body.Close() - - // Parse response - var abapResp map[string]*json.RawMessage - bodyText, _ := io.ReadAll(resp.Body) - - marshallError := json.Unmarshal(bodyText, &abapResp) - if marshallError != nil { - return body, errors.Wrap(marshallError, "Could not parse response from the ABAP Environment system") - } - marshallError = json.Unmarshal(*abapResp["d"], &body) - if marshallError != nil { - return body, errors.Wrap(marshallError, "Could not parse response from the ABAP Environment system") - } - - return body, nil -} - // GetRepositories for parsing one or multiple branches and repositories from repositories file or branchName and repositoryName configuration func GetRepositories(config *RepositoriesConfig, branchRequired bool) ([]Repository, error) { var repositories = make([]Repository, 0) @@ -308,123 +226,12 @@ func (repo *Repository) GetPullRequestBody() (body string) { return body } +func (repo *Repository) GetPullActionRequestBody() (body string) { + return `{` + `"commit_id":"` + repo.CommitID + `", ` + `"tag_name":"` + repo.Tag + `"` + `}` +} + func (repo *Repository) GetPullLogString() (logString string) { commitOrTag := repo.GetLogStringForCommitOrTag() logString = "repository / software component '" + repo.Name + "'" + commitOrTag return logString } - -/**************************************** - * Structs for the A4C_A2G_GHA service * - ****************************************/ - -// PullEntity struct for the Pull/Import entity A4C_A2G_GHA_SC_IMP -type PullEntity struct { - Metadata AbapMetadata `json:"__metadata"` - UUID string `json:"uuid"` - Namespace string `json:"namepsace"` - ScName string `json:"sc_name"` - ImportType string `json:"import_type"` - BranchName string `json:"branch_name"` - StartedByUser string `json:"user_name"` - Status string `json:"status"` - StatusDescription string `json:"status_descr"` - CommitID string `json:"commit_id"` - StartTime string `json:"start_time"` - ChangeTime string `json:"change_time"` - ToExecutionLog AbapLogs `json:"to_Execution_log"` - ToTransportLog AbapLogs `json:"to_Transport_log"` - ToLogOverview AbapLogsV2 `json:"to_Log_Overview"` -} - -// BranchEntity struct for the Branch entity A4C_A2G_GHA_SC_BRANCH -type BranchEntity struct { - Metadata AbapMetadata `json:"__metadata"` - ScName string `json:"sc_name"` - Namespace string `json:"namepsace"` - BranchName string `json:"branch_name"` - ParentBranch string `json:"derived_from"` - CreatedBy string `json:"created_by"` - CreatedOn string `json:"created_on"` - IsActive bool `json:"is_active"` - CommitID string `json:"commit_id"` - CommitMessage string `json:"commit_message"` - LastCommitBy string `json:"last_commit_by"` - LastCommitOn string `json:"last_commit_on"` -} - -// CloneEntity struct for the Clone entity A4C_A2G_GHA_SC_CLONE -type CloneEntity struct { - Metadata AbapMetadata `json:"__metadata"` - UUID string `json:"uuid"` - ScName string `json:"sc_name"` - BranchName string `json:"branch_name"` - ImportType string `json:"import_type"` - Namespace string `json:"namepsace"` - Status string `json:"status"` - StatusDescription string `json:"status_descr"` - StartedByUser string `json:"user_name"` - StartTime string `json:"start_time"` - ChangeTime string `json:"change_time"` -} - -// AbapLogs struct for ABAP logs -type AbapLogs struct { - Results []LogResults `json:"results"` -} - -type AbapLogsV2 struct { - Results []LogResultsV2 `json:"results"` -} - -type LogResultsV2 struct { - Metadata AbapMetadata `json:"__metadata"` - Index int `json:"log_index"` - Name string `json:"log_name"` - Status string `json:"type_of_found_issues"` - Timestamp string `json:"timestamp"` - ToLogProtocol LogProtocolDeferred `json:"to_Log_Protocol"` -} - -type LogProtocolDeferred struct { - Deferred URI `json:"__deferred"` -} - -type URI struct { - URI string `json:"uri"` -} - -type LogProtocolResults struct { - Results []LogProtocol `json:"results"` - Count string `json:"__count"` -} - -type LogProtocol struct { - Metadata AbapMetadata `json:"__metadata"` - OverviewIndex int `json:"log_index"` - ProtocolLine int `json:"index_no"` - Type string `json:"type"` - Description string `json:"descr"` - Timestamp string `json:"timestamp"` -} - -// LogResults struct for Execution and Transport Log entities A4C_A2G_GHA_SC_LOG_EXE and A4C_A2G_GHA_SC_LOG_TP -type LogResults struct { - Index string `json:"index_no"` - Type string `json:"type"` - Description string `json:"descr"` - Timestamp string `json:"timestamp"` -} - -// RepositoriesConfig struct for parsing one or multiple branches and repositories configurations -type RepositoriesConfig struct { - BranchName string - CommitID string - RepositoryName string - RepositoryNames []string - Repositories string -} - -type EntitySetsForManageGitRepository struct { - EntitySets []string `json:"EntitySets"` -} diff --git a/pkg/abaputils/manageGitRepositoryUtils_test.go b/pkg/abaputils/manageGitRepositoryUtils_test.go index 4f791e39d6..9b6cf3ca3f 100644 --- a/pkg/abaputils/manageGitRepositoryUtils_test.go +++ b/pkg/abaputils/manageGitRepositoryUtils_test.go @@ -10,7 +10,6 @@ import ( "os" "testing" - "github.com/pkg/errors" "github.com/stretchr/testify/assert" ) @@ -46,33 +45,25 @@ func TestPollEntity(t *testing.T) { logResultSuccess, `{"d" : { "status" : "S" } }`, `{"d" : { "status" : "R" } }`, + `{"d" : { "status" : "Q" } }`, + `{}`, }, Token: "myToken", StatusCode: 200, } - options := AbapEnvironmentOptions{ - CfAPIEndpoint: "https://api.endpoint.com", - CfOrg: "testOrg", - CfSpace: "testSpace", - CfServiceInstance: "testInstance", - CfServiceKeyName: "testServiceKey", - Username: "testUser", - Password: "testPassword", - } - - config := AbapEnvironmentCheckoutBranchOptions{ - AbapEnvOptions: options, - RepositoryName: "testRepo1", - } - con := ConnectionDetailsHTTP{ User: "MY_USER", Password: "MY_PW", URL: "https://api.endpoint.com/Entity/", XCsrfToken: "MY_TOKEN", } - status, _ := PollEntity(config.RepositoryName, con, client, 0) + + swcManager := SoftwareComponentApiManager{Client: client, Force0510: true} + repo := Repository{Name: "testRepo1"} + api, _ := swcManager.GetAPI(con, repo) + + status, _ := PollEntity(api, 0) assert.Equal(t, "S", status) assert.Equal(t, 0, len(client.BodyList), "Not all requests were done") }) @@ -86,33 +77,25 @@ func TestPollEntity(t *testing.T) { logResultError, `{"d" : { "status" : "E" } }`, `{"d" : { "status" : "R" } }`, + `{"d" : { "status" : "Q" } }`, + `{}`, }, Token: "myToken", StatusCode: 200, } - options := AbapEnvironmentOptions{ - CfAPIEndpoint: "https://api.endpoint.com", - CfOrg: "testOrg", - CfSpace: "testSpace", - CfServiceInstance: "testInstance", - CfServiceKeyName: "testServiceKey", - Username: "testUser", - Password: "testPassword", - } - - config := AbapEnvironmentCheckoutBranchOptions{ - AbapEnvOptions: options, - RepositoryName: "testRepo1", - } - con := ConnectionDetailsHTTP{ User: "MY_USER", Password: "MY_PW", URL: "https://api.endpoint.com/Entity/", XCsrfToken: "MY_TOKEN", } - status, _ := PollEntity(config.RepositoryName, con, client, 0) + + swcManager := SoftwareComponentApiManager{Client: client, Force0510: true} + repo := Repository{Name: "testRepo1"} + api, _ := swcManager.GetAPI(con, repo) + + status, _ := PollEntity(api, 0) assert.Equal(t, "E", status) assert.Equal(t, 0, len(client.BodyList), "Not all requests were done") }) @@ -317,22 +300,3 @@ func TestCreateRequestBodies(t *testing.T) { assert.Equal(t, `{"sc_name":"/DMO/REPO", "tag_name":"myTag"}`, body, "Expected different body") }) } - -func TestGetStatus(t *testing.T) { - t.Run("Graceful Exit", func(t *testing.T) { - - client := &ClientMock{ - NilResponse: true, - Error: errors.New("Backend Error"), - StatusCode: 500, - } - connectionDetails := ConnectionDetailsHTTP{ - URL: "example.com", - } - - _, status, err := GetStatus("failure message", connectionDetails, client) - - assert.Error(t, err, "Expected Error") - assert.Equal(t, "", status) - }) -} diff --git a/pkg/abaputils/sap_com_0510.go b/pkg/abaputils/sap_com_0510.go new file mode 100644 index 0000000000..fcf0b6938e --- /dev/null +++ b/pkg/abaputils/sap_com_0510.go @@ -0,0 +1,385 @@ +package abaputils + +import ( + "encoding/json" + "fmt" + "io" + "net/http" + "net/http/cookiejar" + "reflect" + "strconv" + "strings" + "time" + + piperhttp "github.com/SAP/jenkins-library/pkg/http" + "github.com/SAP/jenkins-library/pkg/log" + "github.com/pkg/errors" + "k8s.io/utils/strings/slices" +) + +type SAP_COM_0510 struct { + con ConnectionDetailsHTTP + client piperhttp.Sender + repository Repository + path string + cloneEntity string + repositoryEntity string + tagsEntity string + checkoutAction string + actionEntity string + uuid string + failureMessage string + maxRetries int + retryBaseSleepUnit time.Duration + retryMaxSleepTime time.Duration + retryAllowedErrorCodes []string +} + +func (api *SAP_COM_0510) init(con ConnectionDetailsHTTP, client piperhttp.Sender, repo Repository) { + api.con = con + api.client = client + api.repository = repo + api.path = "/sap/opu/odata/sap/MANAGE_GIT_REPOSITORY" + api.cloneEntity = "/Clones" + api.repositoryEntity = "/Repositories" + api.tagsEntity = "/Tags" + api.actionEntity = "/Pull" + api.checkoutAction = "/checkout_branch" + api.failureMessage = "The action of the Repository / Software Component " + api.repository.Name + " failed" + api.maxRetries = 3 + api.setSleepTimeConfig(1*time.Second, 120*time.Second) + api.retryAllowedErrorCodes = append(api.retryAllowedErrorCodes, "A4C_A2G/228") + api.retryAllowedErrorCodes = append(api.retryAllowedErrorCodes, "A4C_A2G/501") +} + +func (api *SAP_COM_0510) getUUID() string { + return api.uuid +} + +func (api *SAP_COM_0510) CreateTag(tag Tag) error { + + if reflect.DeepEqual(Tag{}, tag) { + return errors.New("No Tag provided") + } + + con := api.con + con.URL = api.con.URL + api.path + api.tagsEntity + + requestBodyStruct := CreateTagBody{RepositoryName: api.repository.Name, CommitID: api.repository.CommitID, Tag: tag.TagName, Description: tag.TagDescription} + jsonBody, err := json.Marshal(&requestBodyStruct) + if err != nil { + return err + } + return api.triggerRequest(con, jsonBody) +} + +func (api *SAP_COM_0510) CheckoutBranch() error { + + if api.repository.Name == "" || api.repository.Branch == "" { + return fmt.Errorf("Failed to trigger checkout: %w", errors.New("Repository and/or Branch Configuration is empty. Please make sure that you have specified the correct values")) + } + + // the request looks like: POST/sap/opu/odata/sap/MANAGE_GIT_REPOSITORY/checkout_branch?branch_name='newBranch'&sc_name=/DMO/GIT_REPOSITORY' + checkoutConnectionDetails := api.con + checkoutConnectionDetails.URL = api.con.URL + api.path + api.checkoutAction + `?branch_name='` + api.repository.Branch + `'&sc_name='` + api.repository.Name + `'` + jsonBody := []byte(``) + + return api.triggerRequest(checkoutConnectionDetails, jsonBody) +} + +func (api *SAP_COM_0510) parseActionResponse(resp *http.Response, err error) (ActionEntity, error) { + var body ActionEntity + var abapResp map[string]*json.RawMessage + bodyText, errRead := io.ReadAll(resp.Body) + if errRead != nil { + return ActionEntity{}, err + } + if err := json.Unmarshal(bodyText, &abapResp); err != nil { + return ActionEntity{}, err + } + if err := json.Unmarshal(*abapResp["d"], &body); err != nil { + return ActionEntity{}, err + } + + if reflect.DeepEqual(ActionEntity{}, body) { + log.Entry().WithField("StatusCode", resp.Status).WithField("branchName", api.repository.Branch).Error("Could not switch to specified branch") + err := errors.New("Request to ABAP System not successful") + return ActionEntity{}, err + } + return body, nil +} + +func (api *SAP_COM_0510) Pull() error { + + // Trigger the Pull of a Repository + if api.repository.Name == "" { + return errors.New("An empty string was passed for the parameter 'repositoryName'") + } + + pullConnectionDetails := api.con + pullConnectionDetails.URL = api.con.URL + api.path + api.actionEntity + + jsonBody := []byte(api.repository.GetPullRequestBody()) + return api.triggerRequest(pullConnectionDetails, jsonBody) +} + +func (api *SAP_COM_0510) GetLogProtocol(logOverviewEntry LogResultsV2, page int) (result []LogProtocol, count int, err error) { + + connectionDetails := api.con + connectionDetails.URL = logOverviewEntry.ToLogProtocol.Deferred.URI + api.getLogProtocolQuery(page) + resp, err := GetHTTPResponse("GET", connectionDetails, nil, api.client) + if err != nil { + log.SetErrorCategory(log.ErrorInfrastructure) + _, err = HandleHTTPError(resp, err, api.failureMessage, connectionDetails) + return nil, 0, err + } + defer resp.Body.Close() + + // Parse response + var body LogProtocolResults + var abapResp map[string]*json.RawMessage + bodyText, _ := io.ReadAll(resp.Body) + + marshallError := json.Unmarshal(bodyText, &abapResp) + if marshallError != nil { + return nil, 0, errors.Wrap(marshallError, "Could not parse response from the ABAP Environment system") + } + marshallError = json.Unmarshal(*abapResp["d"], &body) + if marshallError != nil { + return nil, 0, errors.Wrap(marshallError, "Could not parse response from the ABAP Environment system") + } + + count, errConv := strconv.Atoi(body.Count) + if errConv != nil { + return nil, 0, errors.Wrap(errConv, "Could not parse response from the ABAP Environment system") + } + + return body.Results, count, nil +} + +func (api *SAP_COM_0510) GetLogOverview() (result []LogResultsV2, err error) { + + connectionDetails := api.con + connectionDetails.URL = api.con.URL + api.path + api.actionEntity + "(uuid=guid'" + api.getUUID() + "')" + "?$expand=to_Log_Overview" + resp, err := GetHTTPResponse("GET", connectionDetails, nil, api.client) + if err != nil { + log.SetErrorCategory(log.ErrorInfrastructure) + _, err = HandleHTTPError(resp, err, api.failureMessage, connectionDetails) + return nil, err + } + defer resp.Body.Close() + + // Parse response + var body ActionEntity + var abapResp map[string]*json.RawMessage + bodyText, _ := io.ReadAll(resp.Body) + + marshallError := json.Unmarshal(bodyText, &abapResp) + if marshallError != nil { + return nil, errors.Wrap(marshallError, "Could not parse response from the ABAP Environment system") + } + marshallError = json.Unmarshal(*abapResp["d"], &body) + if marshallError != nil { + return nil, errors.Wrap(marshallError, "Could not parse response from the ABAP Environment system") + } + + if reflect.DeepEqual(ActionEntity{}, body) { + log.Entry().WithField("StatusCode", resp.Status).Error(api.failureMessage) + log.SetErrorCategory(log.ErrorInfrastructure) + var err = errors.New("Request to ABAP System not successful") + return nil, err + } + + abapStatusCode := body.Status + log.Entry().Info("Status: " + abapStatusCode + " - " + body.StatusDescription) + return body.ToLogOverview.Results, nil + +} + +func (api *SAP_COM_0510) GetAction() (string, error) { + + connectionDetails := api.con + connectionDetails.URL = api.con.URL + api.path + api.actionEntity + "(uuid=guid'" + api.getUUID() + "')" + resp, err := GetHTTPResponse("GET", connectionDetails, nil, api.client) + if err != nil { + log.SetErrorCategory(log.ErrorInfrastructure) + _, err = HandleHTTPError(resp, err, api.failureMessage, connectionDetails) + return "E", err + } + defer resp.Body.Close() + + // Parse Response + body, parseError := api.parseActionResponse(resp, err) + if parseError != nil { + return "E", parseError + } + + api.uuid = body.UUID + + abapStatusCode := body.Status + log.Entry().Info("Status: " + abapStatusCode + " - " + body.StatusDescription) + return abapStatusCode, nil +} + +func (api *SAP_COM_0510) GetRepository() (bool, string, error) { + + if api.repository.Name == "" { + return false, "", errors.New("An empty string was passed for the parameter 'repositoryName'") + } + + swcConnectionDetails := api.con + swcConnectionDetails.URL = api.con.URL + api.path + api.repositoryEntity + "('" + strings.Replace(api.repository.Name, "/", "%2F", -1) + "')" + resp, err := GetHTTPResponse("GET", swcConnectionDetails, nil, api.client) + if err != nil { + _, errRepo := HandleHTTPError(resp, err, "Reading the Repository / Software Component failed", api.con) + return false, "", errRepo + } + defer resp.Body.Close() + + var body RepositoryEntity + var abapResp map[string]*json.RawMessage + bodyText, errRead := io.ReadAll(resp.Body) + if errRead != nil { + return false, "", err + } + + if err := json.Unmarshal(bodyText, &abapResp); err != nil { + return false, "", err + } + if err := json.Unmarshal(*abapResp["d"], &body); err != nil { + return false, "", err + } + if reflect.DeepEqual(RepositoryEntity{}, body) { + log.Entry().WithField("StatusCode", resp.Status).WithField("repositoryName", api.repository.Name).WithField("branchName", api.repository.Branch).WithField("commitID", api.repository.CommitID).WithField("Tag", api.repository.Tag).Error("Could not Clone the Repository / Software Component") + err := errors.New("Request to ABAP System not successful") + return false, "", err + } + + if body.AvailOnInst { + return true, body.ActiveBranch, nil + } + return false, "", err + +} + +func (api *SAP_COM_0510) Clone() error { + + // Trigger the Clone of a Repository + if api.repository.Name == "" { + return errors.New("An empty string was passed for the parameter 'repositoryName'") + } + + cloneConnectionDetails := api.con + cloneConnectionDetails.URL = api.con.URL + api.path + api.cloneEntity + body := []byte(api.repository.GetCloneRequestBody()) + + return api.triggerRequest(cloneConnectionDetails, body) + +} + +func (api *SAP_COM_0510) triggerRequest(cloneConnectionDetails ConnectionDetailsHTTP, jsonBody []byte) error { + var err error + var body ActionEntity + var resp *http.Response + var errorCode string + + for i := 0; i <= api.maxRetries; i++ { + if i > 0 { + sleepTime, err := api.getSleepTime(i + 5) + if err != nil { + // reached max retry duration + break + } + log.Entry().Infof("Retrying in %s", sleepTime.String()) + time.Sleep(sleepTime) + } + resp, err = GetHTTPResponse("POST", cloneConnectionDetails, jsonBody, api.client) + if err != nil { + errorCode, err = HandleHTTPError(resp, err, "Triggering the action failed", api.con) + if slices.Contains(api.retryAllowedErrorCodes, errorCode) { + // Error Code allows for retry + continue + } else { + break + } + } + defer resp.Body.Close() + log.Entry().WithField("StatusCode", resp.Status).WithField("repositoryName", api.repository.Name).WithField("branchName", api.repository.Branch).WithField("commitID", api.repository.CommitID).WithField("Tag", api.repository.Tag).Info("Triggered action of Repository / Software Component") + + body, err = api.parseActionResponse(resp, err) + break + } + api.uuid = body.UUID + return err +} + +// initialRequest implements SoftwareComponentApiInterface. +func (api *SAP_COM_0510) initialRequest() error { + // Configuring the HTTP Client and CookieJar + cookieJar, errorCookieJar := cookiejar.New(nil) + if errorCookieJar != nil { + return errors.Wrap(errorCookieJar, "Could not create a Cookie Jar") + } + + api.client.SetOptions(piperhttp.ClientOptions{ + MaxRequestDuration: 180 * time.Second, + CookieJar: cookieJar, + Username: api.con.User, + Password: api.con.Password, + }) + + headConnection := api.con + headConnection.XCsrfToken = "fetch" + headConnection.URL = api.con.URL + api.path + + // Loging into the ABAP System - getting the x-csrf-token and cookies + resp, err := GetHTTPResponse("HEAD", headConnection, nil, api.client) + if err != nil { + _, err = HandleHTTPError(resp, err, "Authentication on the ABAP system failed", api.con) + return err + } + defer resp.Body.Close() + + log.Entry().WithField("StatusCode", resp.Status).WithField("ABAP Endpoint", api.con).Debug("Authentication on the ABAP system successful") + api.con.XCsrfToken = resp.Header.Get("X-Csrf-Token") + return nil +} + +// getSleepTime Should return the Fibonacci numbers in the define time unit up to the defined maximum duration +func (api *SAP_COM_0510) getSleepTime(n int) (time.Duration, error) { + + if n == 0 { + return 0, nil + } else if n == 1 { + return 1 * api.retryBaseSleepUnit, nil + } else if n < 0 { + return 0, errors.New("Negative numbers are not allowed") + } + var result, i int + prev := 0 + next := 1 + for i = 2; i <= n; i++ { + result = prev + next + prev = next + next = result + } + sleepTime := time.Duration(result) * api.retryBaseSleepUnit + + if sleepTime > api.retryMaxSleepTime { + return 0, errors.New("Exceeded max sleep time") + } + return sleepTime, nil +} + +// setSleepTimeConfig sets the time unit (seconds, nanoseconds) and the maximum sleep duration +func (api *SAP_COM_0510) setSleepTimeConfig(timeUnit time.Duration, maxSleepTime time.Duration) { + api.retryBaseSleepUnit = timeUnit + api.retryMaxSleepTime = maxSleepTime +} + +func (api *SAP_COM_0510) getLogProtocolQuery(page int) string { + skip := page * numberOfEntriesPerPage + top := numberOfEntriesPerPage + + return fmt.Sprintf("?$skip=%s&$top=%s&$inlinecount=allpages", fmt.Sprint(skip), fmt.Sprint(top)) +} diff --git a/pkg/abaputils/sap_com_0510_test.go b/pkg/abaputils/sap_com_0510_test.go new file mode 100644 index 0000000000..776aa28d7f --- /dev/null +++ b/pkg/abaputils/sap_com_0510_test.go @@ -0,0 +1,483 @@ +//go:build unit +// +build unit + +package abaputils + +import ( + "testing" + "time" + + "github.com/pkg/errors" + "github.com/stretchr/testify/assert" +) + +var con ConnectionDetailsHTTP +var repo Repository + +func init() { + + con.User = "CC_USER" + con.Password = "123abc" + con.URL = "https://example.com" + + repo.Name = "/DMO/REPO" + repo.Branch = "main" + +} + +func TestRetry(t *testing.T) { + t.Run("Test retry success", func(t *testing.T) { + + client := &ClientMock{ + BodyList: []string{ + `{"d" : { "status" : "R", "UUID" : "GUID" } }`, + `{"error" : { "code" : "A4C_A2G/228", "message" : { "lang" : "de", "value" : "Software component lifecycle activities in progress. Try again later..."} } }`, + `{ }`, + }, + Token: "myToken", + StatusCode: 200, + ErrorList: []error{ + nil, + errors.New("HTTP 400"), + nil, + }, + } + + apiManager := &SoftwareComponentApiManager{Client: client, PollIntervall: 1 * time.Microsecond, Force0510: true} + + api, err := apiManager.GetAPI(con, repo) + api.setSleepTimeConfig(time.Nanosecond, 120*time.Nanosecond) + assert.NoError(t, err) + assert.IsType(t, &SAP_COM_0510{}, api.(*SAP_COM_0510), "API has wrong type") + + errAction := api.(*SAP_COM_0510).triggerRequest(ConnectionDetailsHTTP{User: "CC_USER", Password: "abc123", URL: "https://example.com/path"}, []byte("{}")) + assert.NoError(t, errAction) + assert.Equal(t, "GUID", api.getUUID(), "API does not cotain correct UUID") + + }) + + t.Run("Test retry not allowed", func(t *testing.T) { + + client := &ClientMock{ + BodyList: []string{ + `{"d" : { "status" : "R", "UUID" : "GUID" } }`, + `{"error" : { "code" : "A4C_A2G/224", "message" : { "lang" : "de", "value" : "Error Text"} } }`, + `{ }`, + }, + Token: "myToken", + StatusCode: 200, + ErrorList: []error{ + nil, + errors.New("HTTP 400"), + nil, + }, + } + + apiManager := &SoftwareComponentApiManager{Client: client, PollIntervall: 1 * time.Microsecond, Force0510: true} + + api, err := apiManager.GetAPI(con, repo) + api.setSleepTimeConfig(time.Nanosecond, 120*time.Nanosecond) + assert.NoError(t, err) + assert.IsType(t, &SAP_COM_0510{}, api.(*SAP_COM_0510), "API has wrong type") + + errAction := api.(*SAP_COM_0510).triggerRequest(ConnectionDetailsHTTP{User: "CC_USER", Password: "abc123", URL: "https://example.com/path"}, []byte("{}")) + assert.ErrorContains(t, errAction, "HTTP 400: A4C_A2G/224 - Error Text") + assert.Empty(t, api.getUUID(), "API does not cotain correct UUID") + + }) + + t.Run("Test retry maxSleepTime", func(t *testing.T) { + + client := &ClientMock{ + BodyList: []string{ + `{"error" : { "code" : "A4C_A2G/228", "message" : { "lang" : "de", "value" : "Error Text"} } }`, + `{"error" : { "code" : "A4C_A2G/228", "message" : { "lang" : "de", "value" : "Error Text"} } }`, + `{"error" : { "code" : "A4C_A2G/228", "message" : { "lang" : "de", "value" : "Error Text"} } }`, + `{"error" : { "code" : "A4C_A2G/228", "message" : { "lang" : "de", "value" : "Error Text"} } }`, + `{"error" : { "code" : "A4C_A2G/228", "message" : { "lang" : "de", "value" : "Error Text"} } }`, + `{"error" : { "code" : "A4C_A2G/228", "message" : { "lang" : "de", "value" : "Error Text"} } }`, + `{"error" : { "code" : "A4C_A2G/228", "message" : { "lang" : "de", "value" : "Error Text"} } }`, + `{"error" : { "code" : "A4C_A2G/228", "message" : { "lang" : "de", "value" : "Error Text"} } }`, + `{"error" : { "code" : "A4C_A2G/228", "message" : { "lang" : "de", "value" : "Error Text"} } }`, + `{ }`, + }, + Token: "myToken", + StatusCode: 200, + ErrorList: []error{ + errors.New("HTTP 400"), + errors.New("HTTP 400"), + errors.New("HTTP 400"), + errors.New("HTTP 400"), + errors.New("HTTP 400"), + errors.New("HTTP 400"), + errors.New("HTTP 400"), + errors.New("HTTP 400"), + errors.New("HTTP 400"), + nil, + }, + } + + apiManager := &SoftwareComponentApiManager{Client: client, PollIntervall: 1 * time.Microsecond, Force0510: true} + + api, err := apiManager.GetAPI(con, repo) + api.setSleepTimeConfig(time.Nanosecond, 20*time.Nanosecond) + assert.NoError(t, err) + assert.IsType(t, &SAP_COM_0510{}, api.(*SAP_COM_0510), "API has wrong type") + + api.(*SAP_COM_0510).maxRetries = 20 + + errAction := api.(*SAP_COM_0510).triggerRequest(ConnectionDetailsHTTP{User: "CC_USER", Password: "abc123", URL: "https://example.com/path"}, []byte("{}")) + assert.ErrorContains(t, errAction, "HTTP 400: A4C_A2G/228 - Error Text") + assert.Empty(t, api.getUUID(), "API does not cotain correct UUID") + + assert.Equal(t, 6, len(client.BodyList), "Expected maxSleepTime to limit requests") + }) + + t.Run("Test retry maxRetries", func(t *testing.T) { + + client := &ClientMock{ + BodyList: []string{ + `{"error" : { "code" : "A4C_A2G/228", "message" : { "lang" : "de", "value" : "Error Text"} } }`, + `{"error" : { "code" : "A4C_A2G/228", "message" : { "lang" : "de", "value" : "Error Text"} } }`, + `{"error" : { "code" : "A4C_A2G/228", "message" : { "lang" : "de", "value" : "Error Text"} } }`, + `{"error" : { "code" : "A4C_A2G/228", "message" : { "lang" : "de", "value" : "Error Text"} } }`, + `{"error" : { "code" : "A4C_A2G/228", "message" : { "lang" : "de", "value" : "Error Text"} } }`, + `{"error" : { "code" : "A4C_A2G/228", "message" : { "lang" : "de", "value" : "Error Text"} } }`, + `{"error" : { "code" : "A4C_A2G/228", "message" : { "lang" : "de", "value" : "Error Text"} } }`, + `{"error" : { "code" : "A4C_A2G/228", "message" : { "lang" : "de", "value" : "Error Text"} } }`, + `{"error" : { "code" : "A4C_A2G/228", "message" : { "lang" : "de", "value" : "Error Text"} } }`, + `{ }`, + }, + Token: "myToken", + StatusCode: 200, + ErrorList: []error{ + errors.New("HTTP 400"), + errors.New("HTTP 400"), + errors.New("HTTP 400"), + errors.New("HTTP 400"), + errors.New("HTTP 400"), + errors.New("HTTP 400"), + errors.New("HTTP 400"), + errors.New("HTTP 400"), + errors.New("HTTP 400"), + nil, + }, + } + + apiManager := &SoftwareComponentApiManager{Client: client, PollIntervall: 1 * time.Microsecond, Force0510: true} + + api, err := apiManager.GetAPI(con, repo) + api.setSleepTimeConfig(time.Nanosecond, 999*time.Nanosecond) + assert.NoError(t, err) + assert.IsType(t, &SAP_COM_0510{}, api.(*SAP_COM_0510), "API has wrong type") + + api.(*SAP_COM_0510).maxRetries = 3 + + errAction := api.(*SAP_COM_0510).triggerRequest(ConnectionDetailsHTTP{User: "CC_USER", Password: "abc123", URL: "https://example.com/path"}, []byte("{}")) + assert.ErrorContains(t, errAction, "HTTP 400: A4C_A2G/228 - Error Text") + assert.Empty(t, api.getUUID(), "API does not cotain correct UUID") + + assert.Equal(t, 5, len(client.BodyList), "Expected maxRetries to limit requests") + }) + +} +func TestClone(t *testing.T) { + t.Run("Test Clone Success", func(t *testing.T) { + + client := &ClientMock{ + BodyList: []string{ + `{"d" : { "status" : "R", "UUID" : "GUID" } }`, + `{ }`, + }, + Token: "myToken", + StatusCode: 200, + } + + apiManager := &SoftwareComponentApiManager{Client: client, PollIntervall: 1 * time.Microsecond, Force0510: true} + + api, err := apiManager.GetAPI(con, repo) + assert.NoError(t, err) + assert.IsType(t, &SAP_COM_0510{}, api.(*SAP_COM_0510), "API has wrong type") + + errClone := api.Clone() + assert.NoError(t, errClone) + assert.Equal(t, "GUID", api.getUUID(), "API does not cotain correct UUID") + }) + + t.Run("Test Clone Failure", func(t *testing.T) { + + client := &ClientMock{ + BodyList: []string{ + `{ "d" : {} }`, + `{ "d" : {} }`, + `{ }`, + }, + Token: "myToken", + StatusCode: 200, + } + + apiManager := &SoftwareComponentApiManager{Client: client, PollIntervall: 1 * time.Microsecond, Force0510: true} + + api, err := apiManager.GetAPI(con, repo) + api.setSleepTimeConfig(time.Nanosecond, 120*time.Nanosecond) + assert.NoError(t, err) + assert.IsType(t, &SAP_COM_0510{}, api.(*SAP_COM_0510), "API has wrong type") + + errClone := api.Clone() + assert.ErrorContains(t, errClone, "Request to ABAP System not successful") + assert.Empty(t, api.getUUID(), "API does not cotain correct UUID") + }) + + t.Run("Test Clone Retry", func(t *testing.T) { + + client := &ClientMock{ + BodyList: []string{ + `{"d" : { "status" : "R", "UUID" : "GUID" } }`, + `{"error" : { "code" : "A4C_A2G/228", "message" : { "lang" : "de", "value" : "Software component lifecycle activities in progress. Try again later..."} } }`, + `{ }`, + }, + Token: "myToken", + StatusCode: 200, + ErrorList: []error{ + nil, + errors.New("HTTP 400"), + nil, + }, + } + + apiManager := &SoftwareComponentApiManager{Client: client, PollIntervall: 1 * time.Microsecond, Force0510: true} + + api, err := apiManager.GetAPI(con, repo) + api.setSleepTimeConfig(time.Nanosecond, 120*time.Nanosecond) + assert.NoError(t, err) + assert.IsType(t, &SAP_COM_0510{}, api.(*SAP_COM_0510), "API has wrong type") + + errClone := api.Clone() + assert.NoError(t, errClone) + assert.Equal(t, "GUID", api.getUUID(), "API does not cotain correct UUID") + }) +} + +func TestPull(t *testing.T) { + t.Run("Test Pull Success", func(t *testing.T) { + + client := &ClientMock{ + BodyList: []string{ + `{"d" : { "status" : "R", "UUID" : "GUID" } }`, + `{ }`, + }, + Token: "myToken", + StatusCode: 200, + } + + apiManager := &SoftwareComponentApiManager{Client: client, PollIntervall: 1 * time.Microsecond, Force0510: true} + + api, err := apiManager.GetAPI(con, repo) + assert.NoError(t, err) + assert.IsType(t, &SAP_COM_0510{}, api.(*SAP_COM_0510), "API has wrong type") + + errPull := api.Pull() + assert.NoError(t, errPull) + assert.Equal(t, "GUID", api.getUUID(), "API does not cotain correct UUID") + }) + + t.Run("Test Pull Failure", func(t *testing.T) { + + client := &ClientMock{ + BodyList: []string{ + `{ "d" : {} }`, + `{ }`, + }, + Token: "myToken", + StatusCode: 200, + } + + apiManager := &SoftwareComponentApiManager{Client: client, PollIntervall: 1 * time.Microsecond, Force0510: true} + + api, err := apiManager.GetAPI(con, repo) + assert.NoError(t, err) + assert.IsType(t, &SAP_COM_0510{}, api.(*SAP_COM_0510), "API has wrong type") + + errPull := api.Pull() + assert.ErrorContains(t, errPull, "Request to ABAP System not successful") + assert.Empty(t, api.getUUID(), "API does not cotain correct UUID") + }) +} + +func TestCheckout(t *testing.T) { + t.Run("Test Checkout Success", func(t *testing.T) { + + client := &ClientMock{ + BodyList: []string{ + `{"d" : { "status" : "R", "UUID" : "GUID" } }`, + `{ }`, + }, + Token: "myToken", + StatusCode: 200, + } + + apiManager := &SoftwareComponentApiManager{Client: client, PollIntervall: 1 * time.Microsecond, Force0510: true} + + api, err := apiManager.GetAPI(con, repo) + assert.NoError(t, err) + assert.IsType(t, &SAP_COM_0510{}, api.(*SAP_COM_0510), "API has wrong type") + + errCheckout := api.CheckoutBranch() + assert.NoError(t, errCheckout) + assert.Equal(t, "GUID", api.getUUID(), "API does not cotain correct UUID") + }) + + t.Run("Test Checkout Failure", func(t *testing.T) { + + client := &ClientMock{ + BodyList: []string{ + `{ "d" : {} }`, + `{ }`, + }, + Token: "myToken", + StatusCode: 200, + } + + apiManager := &SoftwareComponentApiManager{Client: client, PollIntervall: 1 * time.Microsecond, Force0510: true} + + api, err := apiManager.GetAPI(con, repo) + assert.NoError(t, err) + assert.IsType(t, &SAP_COM_0510{}, api.(*SAP_COM_0510), "API has wrong type") + + errCheckoput := api.CheckoutBranch() + assert.ErrorContains(t, errCheckoput, "Request to ABAP System not successful") + assert.Empty(t, api.getUUID(), "API does not cotain correct UUID") + }) +} + +func TestGetRepo(t *testing.T) { + t.Run("Test GetRepo Success", func(t *testing.T) { + + client := &ClientMock{ + BodyList: []string{ + `{"d" : { "sc_name" : "testRepo1", "avail_on_inst" : true, "active_branch": "testBranch1" } }`, + `{"d" : [] }`, + }, + Token: "myToken", + StatusCode: 200, + } + + apiManager := &SoftwareComponentApiManager{Client: client, PollIntervall: 1 * time.Microsecond, Force0510: true} + + api, err := apiManager.GetAPI(con, repo) + assert.NoError(t, err) + assert.IsType(t, &SAP_COM_0510{}, api.(*SAP_COM_0510), "API has wrong type") + + cloned, activeBranch, errAction := api.GetRepository() + assert.True(t, cloned) + assert.Equal(t, "testBranch1", activeBranch) + assert.NoError(t, errAction) + }) +} + +func TestCreateTag(t *testing.T) { + t.Run("Test Tag Success", func(t *testing.T) { + + client := &ClientMock{ + BodyList: []string{ + `{"d" : { "status" : "R", "UUID" : "GUID" } }`, + `{ }`, + }, + Token: "myToken", + StatusCode: 200, + } + + apiManager := &SoftwareComponentApiManager{Client: client, PollIntervall: 1 * time.Microsecond, Force0510: true} + + api, err := apiManager.GetAPI(con, repo) + assert.NoError(t, err) + assert.IsType(t, &SAP_COM_0510{}, api.(*SAP_COM_0510), "API has wrong type") + + errCreateTag := api.CreateTag(Tag{TagName: "myTag", TagDescription: "descr"}) + assert.NoError(t, errCreateTag) + assert.Equal(t, "GUID", api.getUUID(), "API does not cotain correct UUID") + }) + + t.Run("Test Tag Failure", func(t *testing.T) { + + client := &ClientMock{ + BodyList: []string{ + `{ "d" : {} }`, + `{ }`, + }, + Token: "myToken", + StatusCode: 200, + } + + apiManager := &SoftwareComponentApiManager{Client: client, PollIntervall: 1 * time.Microsecond, Force0510: true} + + api, err := apiManager.GetAPI(con, repo) + assert.NoError(t, err) + assert.IsType(t, &SAP_COM_0510{}, api.(*SAP_COM_0510), "API has wrong type") + + errCreateTag := api.CreateTag(Tag{TagName: "myTag", TagDescription: "descr"}) + assert.ErrorContains(t, errCreateTag, "Request to ABAP System not successful") + assert.Empty(t, api.getUUID(), "API does not cotain correct UUID") + }) + + t.Run("Test Tag Empty", func(t *testing.T) { + + client := &ClientMock{ + BodyList: []string{ + `{ "d" : {} }`, + `{ }`, + }, + Token: "myToken", + StatusCode: 200, + } + + apiManager := &SoftwareComponentApiManager{Client: client, PollIntervall: 1 * time.Microsecond, Force0510: true} + + api, err := apiManager.GetAPI(con, repo) + assert.NoError(t, err) + assert.IsType(t, &SAP_COM_0510{}, api.(*SAP_COM_0510), "API has wrong type") + + errCreateTag := api.CreateTag(Tag{}) + assert.ErrorContains(t, errCreateTag, "No Tag provided") + assert.Empty(t, api.getUUID(), "API does not cotain correct UUID") + }) +} + +func TestSleepTime(t *testing.T) { + t.Run("Test Sleep Time", func(t *testing.T) { + + api := SAP_COM_0510{ + retryMaxSleepTime: 120 * time.Nanosecond, + retryBaseSleepUnit: 1 * time.Nanosecond, + } + + expectedResults := make([]time.Duration, 12) + expectedResults[0] = 0 + expectedResults[1] = 1 + expectedResults[2] = 1 + expectedResults[3] = 2 + expectedResults[4] = 3 + expectedResults[5] = 5 + expectedResults[6] = 8 + expectedResults[7] = 13 + expectedResults[8] = 21 + expectedResults[9] = 34 + expectedResults[10] = 55 + expectedResults[11] = 89 + results := make([]time.Duration, 12) + var err error + + for i := 0; i <= 11; i++ { + + results[i], err = api.getSleepTime(i) + assert.NoError(t, err) + } + assert.ElementsMatch(t, expectedResults, results) + + _, err = api.getSleepTime(-10) + assert.Error(t, err) + + _, err = api.getSleepTime(12) + assert.ErrorContains(t, err, "Exceeded max sleep time") + }) +} diff --git a/pkg/abaputils/sap_com_0948.go b/pkg/abaputils/sap_com_0948.go new file mode 100644 index 0000000000..18aa31decd --- /dev/null +++ b/pkg/abaputils/sap_com_0948.go @@ -0,0 +1,376 @@ +package abaputils + +import ( + "encoding/json" + "fmt" + "io" + "net/http" + "net/http/cookiejar" + "reflect" + "strings" + "time" + + piperhttp "github.com/SAP/jenkins-library/pkg/http" + "github.com/SAP/jenkins-library/pkg/log" + "github.com/pkg/errors" + "k8s.io/utils/strings/slices" +) + +type SAP_COM_0948 struct { + con ConnectionDetailsHTTP + client piperhttp.Sender + repository Repository + path string + cloneAction string + pullAction string + softwareComponentEntity string + branchEntity string + tagsEntity string + checkoutAction string + actionsEntity string + uuid string + failureMessage string + maxRetries int + retryBaseSleepUnit time.Duration + retryMaxSleepTime time.Duration + retryAllowedErrorCodes []string +} + +func (api *SAP_COM_0948) init(con ConnectionDetailsHTTP, client piperhttp.Sender, repo Repository) { + api.con = con + api.client = client + api.repository = repo + api.path = "/sap/opu/odata4/sap/a4c_mswc_api/srvd_a2x/sap/manage_software_components/0001" + api.checkoutAction = "/SAP__self.checkout_branch" + api.softwareComponentEntity = "/SoftwareComponents" + api.actionsEntity = "/Actions" + api.branchEntity = "/Branches" + api.cloneAction = "/SAP__self.clone" + api.pullAction = "/SAP__self.pull" + api.tagsEntity = "/Tags" + api.failureMessage = "The action of the Repository / Software Component " + api.repository.Name + " failed" + api.maxRetries = 3 + api.setSleepTimeConfig(1*time.Second, 120*time.Second) + api.retryAllowedErrorCodes = append(api.retryAllowedErrorCodes, "A4C_A2G/228") + api.retryAllowedErrorCodes = append(api.retryAllowedErrorCodes, "A4C_A2G/501") +} + +func (api *SAP_COM_0948) getUUID() string { + return api.uuid +} + +func (api *SAP_COM_0948) CreateTag(tag Tag) error { + + if reflect.DeepEqual(Tag{}, tag) { + return errors.New("No Tag provided") + } + + con := api.con + con.URL = api.con.URL + api.path + api.tagsEntity + + requestBodyStruct := CreateTagBody{RepositoryName: api.repository.Name, CommitID: api.repository.CommitID, Tag: tag.TagName, Description: tag.TagDescription} + jsonBody, err := json.Marshal(&requestBodyStruct) + if err != nil { + return err + } + return api.triggerRequest(con, jsonBody) +} + +func (api *SAP_COM_0948) CheckoutBranch() error { + + if api.repository.Name == "" || api.repository.Branch == "" { + return fmt.Errorf("Failed to trigger checkout: %w", errors.New("Repository and/or Branch Configuration is empty. Please make sure that you have specified the correct values")) + } + + checkoutConnectionDetails := api.con + checkoutConnectionDetails.URL = api.con.URL + api.path + api.branchEntity + api.getRepoNameForPath() + api.getBranchNameForPath() + api.checkoutAction + jsonBody := []byte(`{ + "import_mode" : "", + "execution_mode": "" + }`) + + return api.triggerRequest(checkoutConnectionDetails, jsonBody) +} + +func (api *SAP_COM_0948) parseActionResponse(resp *http.Response, err error) (ActionEntity, error) { + + var body ActionEntity + bodyText, errRead := io.ReadAll(resp.Body) + if errRead != nil { + return ActionEntity{}, err + } + if err := json.Unmarshal(bodyText, &body); err != nil { + return ActionEntity{}, err + } + if reflect.DeepEqual(ActionEntity{}, body) { + log.Entry().WithField("StatusCode", resp.Status).WithField("branchName", api.repository.Branch).Error("Could not switch to specified branch") + err := errors.New("Request to ABAP System not successful") + return ActionEntity{}, err + } + return body, nil +} + +func (api *SAP_COM_0948) Pull() error { + + // Trigger the Pull of a Repository + if api.repository.Name == "" { + return errors.New("An empty string was passed for the parameter 'repositoryName'") + } + + pullConnectionDetails := api.con + pullConnectionDetails.URL = api.con.URL + api.path + api.softwareComponentEntity + api.getRepoNameForPath() + api.pullAction + + jsonBody := []byte(api.repository.GetPullActionRequestBody()) + return api.triggerRequest(pullConnectionDetails, jsonBody) +} + +func (api *SAP_COM_0948) GetLogProtocol(logOverviewEntry LogResultsV2, page int) (result []LogProtocol, count int, err error) { + + connectionDetails := api.con + connectionDetails.URL = api.con.URL + api.path + api.actionsEntity + "/" + api.getUUID() + "/_Log_Overview" + "/" + fmt.Sprint(logOverviewEntry.Index) + "/_Log_Protocol" + api.getLogProtocolQuery(page) + resp, err := GetHTTPResponse("GET", connectionDetails, nil, api.client) + if err != nil { + log.SetErrorCategory(log.ErrorInfrastructure) + _, err = HandleHTTPError(resp, err, api.failureMessage, connectionDetails) + return nil, 0, err + } + defer resp.Body.Close() + + // Parse response + var body LogProtocolResultsV4 + bodyText, _ := io.ReadAll(resp.Body) + + marshallError := json.Unmarshal(bodyText, &body) + if marshallError != nil { + return nil, 0, errors.Wrap(marshallError, "Could not parse response from the ABAP Environment system") + } + + return body.Results, body.Count, nil +} + +func (api *SAP_COM_0948) GetLogOverview() (result []LogResultsV2, err error) { + + connectionDetails := api.con + connectionDetails.URL = api.con.URL + api.path + api.actionsEntity + "/" + api.getUUID() + "/_Log_Overview" + resp, err := GetHTTPResponse("GET", connectionDetails, nil, api.client) + if err != nil { + log.SetErrorCategory(log.ErrorInfrastructure) + _, err = HandleHTTPError(resp, err, api.failureMessage, connectionDetails) + return nil, err + } + defer resp.Body.Close() + + // Parse response + var abapResp map[string]*json.RawMessage + bodyText, _ := io.ReadAll(resp.Body) + + marshallError := json.Unmarshal(bodyText, &abapResp) + if marshallError != nil { + return nil, errors.Wrap(marshallError, "Could not parse response from the ABAP Environment system") + } + marshallError = json.Unmarshal(*abapResp["value"], &result) + if marshallError != nil { + return nil, errors.Wrap(marshallError, "Could not parse response from the ABAP Environment system") + } + + if reflect.DeepEqual(LogResultsV2{}, result) { + log.Entry().WithField("StatusCode", resp.Status).Error(api.failureMessage) + log.SetErrorCategory(log.ErrorInfrastructure) + var err = errors.New("Request to ABAP System not successful") + return nil, err + } + return result, nil + +} + +func (api *SAP_COM_0948) GetAction() (string, error) { + + connectionDetails := api.con + connectionDetails.URL = api.con.URL + api.path + api.actionsEntity + "/" + api.getUUID() + resp, err := GetHTTPResponse("GET", connectionDetails, nil, api.client) + if err != nil { + log.SetErrorCategory(log.ErrorInfrastructure) + _, err = HandleHTTPError(resp, err, api.failureMessage, connectionDetails) + return "E", err + } + defer resp.Body.Close() + + // Parse Response + body, parseError := api.parseActionResponse(resp, err) + if parseError != nil { + return "E", parseError + } + + api.uuid = body.UUID + + abapStatusCode := body.Status + log.Entry().Info("Status: " + abapStatusCode + " - " + body.StatusDescription) + return abapStatusCode, nil +} + +func (api *SAP_COM_0948) GetRepository() (bool, string, error) { + + if api.repository.Name == "" { + return false, "", errors.New("An empty string was passed for the parameter 'repositoryName'") + } + + swcConnectionDetails := api.con + swcConnectionDetails.URL = api.con.URL + api.path + api.softwareComponentEntity + api.getRepoNameForPath() + resp, err := GetHTTPResponse("GET", swcConnectionDetails, nil, api.client) + if err != nil { + _, errRepo := HandleHTTPError(resp, err, "Reading the Repository / Software Component failed", api.con) + return false, "", errRepo + } + defer resp.Body.Close() + + var body RepositoryEntity + bodyText, errRead := io.ReadAll(resp.Body) + if errRead != nil { + return false, "", err + } + + if err := json.Unmarshal(bodyText, &body); err != nil { + return false, "", err + } + if reflect.DeepEqual(RepositoryEntity{}, body) { + log.Entry().WithField("StatusCode", resp.Status).WithField("repositoryName", api.repository.Name).WithField("branchName", api.repository.Branch).WithField("commitID", api.repository.CommitID).WithField("Tag", api.repository.Tag).Error("Could not Clone the Repository / Software Component") + err := errors.New("Request to ABAP System not successful") + return false, "", err + } + + if body.AvailOnInst { + return true, body.ActiveBranch, nil + } + return false, "", err + +} + +func (api *SAP_COM_0948) Clone() error { + + // Trigger the Clone of a Repository + if api.repository.Name == "" { + return errors.New("An empty string was passed for the parameter 'repositoryName'") + } + + cloneConnectionDetails := api.con + cloneConnectionDetails.URL = api.con.URL + api.path + api.softwareComponentEntity + api.getRepoNameForPath() + api.cloneAction + body := []byte(api.repository.GetCloneRequestBody()) + + return api.triggerRequest(cloneConnectionDetails, body) + +} + +func (api *SAP_COM_0948) triggerRequest(cloneConnectionDetails ConnectionDetailsHTTP, jsonBody []byte) error { + var err error + var body ActionEntity + var resp *http.Response + var errorCode string + + for i := 0; i <= api.maxRetries; i++ { + if i > 0 { + sleepTime, err := api.getSleepTime(i + 5) + if err != nil { + // reached max retry duration + break + } + log.Entry().Infof("Retrying in %s", sleepTime.String()) + time.Sleep(sleepTime) + } + resp, err = GetHTTPResponse("POST", cloneConnectionDetails, jsonBody, api.client) + if err != nil { + errorCode, err = HandleHTTPError(resp, err, "Triggering the action failed", api.con) + if slices.Contains(api.retryAllowedErrorCodes, errorCode) { + // Error Code allows for retry + continue + } else { + break + } + } + defer resp.Body.Close() + log.Entry().WithField("StatusCode", resp.Status).WithField("repositoryName", api.repository.Name).WithField("branchName", api.repository.Branch).WithField("commitID", api.repository.CommitID).WithField("Tag", api.repository.Tag).Info("Triggered action of Repository / Software Component") + + body, err = api.parseActionResponse(resp, err) + break + } + api.uuid = body.UUID + return err +} + +// initialRequest implements SoftwareComponentApiInterface. +func (api *SAP_COM_0948) initialRequest() error { + // Configuring the HTTP Client and CookieJar + cookieJar, errorCookieJar := cookiejar.New(nil) + if errorCookieJar != nil { + return errors.Wrap(errorCookieJar, "Could not create a Cookie Jar") + } + + api.client.SetOptions(piperhttp.ClientOptions{ + MaxRequestDuration: 180 * time.Second, + CookieJar: cookieJar, + Username: api.con.User, + Password: api.con.Password, + }) + + headConnection := api.con + headConnection.XCsrfToken = "fetch" + headConnection.URL = api.con.URL + api.path + + // Loging into the ABAP System - getting the x-csrf-token and cookies + resp, err := GetHTTPResponse("HEAD", headConnection, nil, api.client) + if err != nil { + _, err = HandleHTTPError(resp, err, "Authentication on the ABAP system failed", api.con) + return err + } + defer resp.Body.Close() + + log.Entry().WithField("StatusCode", resp.Status).WithField("ABAP Endpoint", api.con).Debug("Authentication on the ABAP system successful") + api.con.XCsrfToken = resp.Header.Get("X-Csrf-Token") + return nil +} + +// getSleepTime Should return the Fibonacci numbers in the define time unit up to the defined maximum duration +func (api *SAP_COM_0948) getSleepTime(n int) (time.Duration, error) { + + if n == 0 { + return 0, nil + } else if n == 1 { + return 1 * api.retryBaseSleepUnit, nil + } else if n < 0 { + return 0, errors.New("Negative numbers are not allowed") + } + var result, i int + prev := 0 + next := 1 + for i = 2; i <= n; i++ { + result = prev + next + prev = next + next = result + } + sleepTime := time.Duration(result) * api.retryBaseSleepUnit + + if sleepTime > api.retryMaxSleepTime { + return 0, errors.New("Exceeded max sleep time") + } + return sleepTime, nil +} + +// setSleepTimeConfig sets the time unit (seconds, nanoseconds) and the maximum sleep duration +func (api *SAP_COM_0948) setSleepTimeConfig(timeUnit time.Duration, maxSleepTime time.Duration) { + api.retryBaseSleepUnit = timeUnit + api.retryMaxSleepTime = maxSleepTime +} + +func (api *SAP_COM_0948) getRepoNameForPath() string { + return "/" + strings.ReplaceAll(api.repository.Name, "/", "%2F") +} + +func (api *SAP_COM_0948) getBranchNameForPath() string { + return "/" + api.repository.Branch +} + +func (api *SAP_COM_0948) getLogProtocolQuery(page int) string { + skip := page * numberOfEntriesPerPage + top := numberOfEntriesPerPage + + return fmt.Sprintf("?$skip=%s&$top=%s&$count=true", fmt.Sprint(skip), fmt.Sprint(top)) +} diff --git a/pkg/abaputils/sap_com_0948_test.go b/pkg/abaputils/sap_com_0948_test.go new file mode 100644 index 0000000000..72c8fdd158 --- /dev/null +++ b/pkg/abaputils/sap_com_0948_test.go @@ -0,0 +1,483 @@ +//go:build unit +// +build unit + +package abaputils + +import ( + "testing" + "time" + + "github.com/pkg/errors" + "github.com/stretchr/testify/assert" +) + +var connection ConnectionDetailsHTTP +var repository Repository + +func init() { + + connection.User = "CC_USER" + connection.Password = "123abc" + connection.URL = "https://example.com" + + repository.Name = "/DMO/REPO" + repository.Branch = "main" + +} + +func TestRetry0948(t *testing.T) { + t.Run("Test retry success", func(t *testing.T) { + + client := &ClientMock{ + BodyList: []string{ + `{ "status" : "R", "UUID" : "GUID" }`, + `{"error" : { "code" : "A4C_A2G/228", "message" : { "lang" : "de", "value" : "Software component lifecycle activities in progress. Try again later..."} } }`, + `{ }`, + }, + Token: "myToken", + StatusCode: 200, + ErrorList: []error{ + nil, + errors.New("HTTP 400"), + nil, + }, + } + + apiManager := &SoftwareComponentApiManager{Client: client, PollIntervall: 1 * time.Microsecond} + + api, err := apiManager.GetAPI(con, repo) + api.setSleepTimeConfig(time.Nanosecond, 120*time.Nanosecond) + assert.NoError(t, err) + assert.IsType(t, &SAP_COM_0948{}, api.(*SAP_COM_0948), "API has wrong type") + + errAction := api.(*SAP_COM_0948).triggerRequest(ConnectionDetailsHTTP{User: "CC_USER", Password: "abc123", URL: "https://example.com/path"}, []byte("{}")) + assert.NoError(t, errAction) + assert.Equal(t, "GUID", api.getUUID(), "API does not cotain correct UUID") + + }) + + t.Run("Test retry not allowed", func(t *testing.T) { + + client := &ClientMock{ + BodyList: []string{ + `{ "status" : "R", "UUID" : "GUID" }`, + `{"error" : { "code" : "A4C_A2G/224", "message" : { "lang" : "de", "value" : "Error Text"} } }`, + `{ }`, + }, + Token: "myToken", + StatusCode: 200, + ErrorList: []error{ + nil, + errors.New("HTTP 400"), + nil, + }, + } + + apiManager := &SoftwareComponentApiManager{Client: client, PollIntervall: 1 * time.Microsecond} + + api, err := apiManager.GetAPI(con, repo) + api.setSleepTimeConfig(time.Nanosecond, 120*time.Nanosecond) + assert.NoError(t, err) + assert.IsType(t, &SAP_COM_0948{}, api.(*SAP_COM_0948), "API has wrong type") + + errAction := api.(*SAP_COM_0948).triggerRequest(ConnectionDetailsHTTP{User: "CC_USER", Password: "abc123", URL: "https://example.com/path"}, []byte("{}")) + assert.ErrorContains(t, errAction, "HTTP 400: A4C_A2G/224 - Error Text") + assert.Empty(t, api.getUUID(), "API does not cotain correct UUID") + + }) + + t.Run("Test retry maxSleepTime", func(t *testing.T) { + + client := &ClientMock{ + BodyList: []string{ + `{"error" : { "code" : "A4C_A2G/228", "message" : { "lang" : "de", "value" : "Error Text"} } }`, + `{"error" : { "code" : "A4C_A2G/228", "message" : { "lang" : "de", "value" : "Error Text"} } }`, + `{"error" : { "code" : "A4C_A2G/228", "message" : { "lang" : "de", "value" : "Error Text"} } }`, + `{"error" : { "code" : "A4C_A2G/228", "message" : { "lang" : "de", "value" : "Error Text"} } }`, + `{"error" : { "code" : "A4C_A2G/228", "message" : { "lang" : "de", "value" : "Error Text"} } }`, + `{"error" : { "code" : "A4C_A2G/228", "message" : { "lang" : "de", "value" : "Error Text"} } }`, + `{"error" : { "code" : "A4C_A2G/228", "message" : { "lang" : "de", "value" : "Error Text"} } }`, + `{"error" : { "code" : "A4C_A2G/228", "message" : { "lang" : "de", "value" : "Error Text"} } }`, + `{"error" : { "code" : "A4C_A2G/228", "message" : { "lang" : "de", "value" : "Error Text"} } }`, + `{ }`, + }, + Token: "myToken", + StatusCode: 200, + ErrorList: []error{ + errors.New("HTTP 400"), + errors.New("HTTP 400"), + errors.New("HTTP 400"), + errors.New("HTTP 400"), + errors.New("HTTP 400"), + errors.New("HTTP 400"), + errors.New("HTTP 400"), + errors.New("HTTP 400"), + errors.New("HTTP 400"), + nil, + }, + } + + apiManager := &SoftwareComponentApiManager{Client: client, PollIntervall: 1 * time.Microsecond} + + api, err := apiManager.GetAPI(con, repo) + api.setSleepTimeConfig(time.Nanosecond, 20*time.Nanosecond) + assert.NoError(t, err) + assert.IsType(t, &SAP_COM_0948{}, api.(*SAP_COM_0948), "API has wrong type") + + api.(*SAP_COM_0948).maxRetries = 20 + + errAction := api.(*SAP_COM_0948).triggerRequest(ConnectionDetailsHTTP{User: "CC_USER", Password: "abc123", URL: "https://example.com/path"}, []byte("{}")) + assert.ErrorContains(t, errAction, "HTTP 400: A4C_A2G/228 - Error Text") + assert.Empty(t, api.getUUID(), "API does not cotain correct UUID") + + assert.Equal(t, 6, len(client.BodyList), "Expected maxSleepTime to limit requests") + }) + + t.Run("Test retry maxRetries", func(t *testing.T) { + + client := &ClientMock{ + BodyList: []string{ + `{"error" : { "code" : "A4C_A2G/228", "message" : { "lang" : "de", "value" : "Error Text"} } }`, + `{"error" : { "code" : "A4C_A2G/228", "message" : { "lang" : "de", "value" : "Error Text"} } }`, + `{"error" : { "code" : "A4C_A2G/228", "message" : { "lang" : "de", "value" : "Error Text"} } }`, + `{"error" : { "code" : "A4C_A2G/228", "message" : { "lang" : "de", "value" : "Error Text"} } }`, + `{"error" : { "code" : "A4C_A2G/228", "message" : { "lang" : "de", "value" : "Error Text"} } }`, + `{"error" : { "code" : "A4C_A2G/228", "message" : { "lang" : "de", "value" : "Error Text"} } }`, + `{"error" : { "code" : "A4C_A2G/228", "message" : { "lang" : "de", "value" : "Error Text"} } }`, + `{"error" : { "code" : "A4C_A2G/228", "message" : { "lang" : "de", "value" : "Error Text"} } }`, + `{"error" : { "code" : "A4C_A2G/228", "message" : { "lang" : "de", "value" : "Error Text"} } }`, + `{ }`, + }, + Token: "myToken", + StatusCode: 200, + ErrorList: []error{ + errors.New("HTTP 400"), + errors.New("HTTP 400"), + errors.New("HTTP 400"), + errors.New("HTTP 400"), + errors.New("HTTP 400"), + errors.New("HTTP 400"), + errors.New("HTTP 400"), + errors.New("HTTP 400"), + errors.New("HTTP 400"), + nil, + }, + } + + apiManager := &SoftwareComponentApiManager{Client: client, PollIntervall: 1 * time.Microsecond} + + api, err := apiManager.GetAPI(con, repo) + api.setSleepTimeConfig(time.Nanosecond, 999*time.Nanosecond) + assert.NoError(t, err) + assert.IsType(t, &SAP_COM_0948{}, api.(*SAP_COM_0948), "API has wrong type") + + api.(*SAP_COM_0948).maxRetries = 3 + + errAction := api.(*SAP_COM_0948).triggerRequest(ConnectionDetailsHTTP{User: "CC_USER", Password: "abc123", URL: "https://example.com/path"}, []byte("{}")) + assert.ErrorContains(t, errAction, "HTTP 400: A4C_A2G/228 - Error Text") + assert.Empty(t, api.getUUID(), "API does not cotain correct UUID") + + assert.Equal(t, 5, len(client.BodyList), "Expected maxRetries to limit requests") + }) + +} +func TestClone0948(t *testing.T) { + t.Run("Test Clone Success", func(t *testing.T) { + + client := &ClientMock{ + BodyList: []string{ + `{ "status" : "R", "UUID" : "GUID" }`, + `{ }`, + }, + Token: "myToken", + StatusCode: 200, + } + + apiManager := &SoftwareComponentApiManager{Client: client, PollIntervall: 1 * time.Microsecond} + + api, err := apiManager.GetAPI(con, repo) + assert.NoError(t, err) + assert.IsType(t, &SAP_COM_0948{}, api.(*SAP_COM_0948), "API has wrong type") + + errClone := api.Clone() + assert.NoError(t, errClone) + assert.Equal(t, "GUID", api.getUUID(), "API does not cotain correct UUID") + }) + + t.Run("Test Clone Failure", func(t *testing.T) { + + client := &ClientMock{ + BodyList: []string{ + `{ "d" : {} }`, + `{ "d" : {} }`, + `{ }`, + }, + Token: "myToken", + StatusCode: 200, + } + + apiManager := &SoftwareComponentApiManager{Client: client, PollIntervall: 1 * time.Microsecond} + + api, err := apiManager.GetAPI(con, repo) + api.setSleepTimeConfig(time.Nanosecond, 120*time.Nanosecond) + assert.NoError(t, err) + assert.IsType(t, &SAP_COM_0948{}, api.(*SAP_COM_0948), "API has wrong type") + + errClone := api.Clone() + assert.ErrorContains(t, errClone, "Request to ABAP System not successful") + assert.Empty(t, api.getUUID(), "API does not cotain correct UUID") + }) + + t.Run("Test Clone Retry", func(t *testing.T) { + + client := &ClientMock{ + BodyList: []string{ + `{ "status" : "R", "UUID" : "GUID" }`, + `{"error" : { "code" : "A4C_A2G/228", "message" : { "lang" : "de", "value" : "Software component lifecycle activities in progress. Try again later..."} } }`, + `{ }`, + }, + Token: "myToken", + StatusCode: 200, + ErrorList: []error{ + nil, + errors.New("HTTP 400"), + nil, + }, + } + + apiManager := &SoftwareComponentApiManager{Client: client, PollIntervall: 1 * time.Microsecond} + + api, err := apiManager.GetAPI(con, repo) + api.setSleepTimeConfig(time.Nanosecond, 120*time.Nanosecond) + assert.NoError(t, err) + assert.IsType(t, &SAP_COM_0948{}, api.(*SAP_COM_0948), "API has wrong type") + + errClone := api.Clone() + assert.NoError(t, errClone) + assert.Equal(t, "GUID", api.getUUID(), "API does not cotain correct UUID") + }) +} + +func TestPull0948(t *testing.T) { + t.Run("Test Pull Success", func(t *testing.T) { + + client := &ClientMock{ + BodyList: []string{ + `{ "status" : "R", "UUID" : "GUID" }`, + `{ }`, + }, + Token: "myToken", + StatusCode: 200, + } + + apiManager := &SoftwareComponentApiManager{Client: client, PollIntervall: 1 * time.Microsecond} + + api, err := apiManager.GetAPI(con, repo) + assert.NoError(t, err) + assert.IsType(t, &SAP_COM_0948{}, api.(*SAP_COM_0948), "API has wrong type") + + errPull := api.Pull() + assert.NoError(t, errPull) + assert.Equal(t, "GUID", api.getUUID(), "API does not cotain correct UUID") + }) + + t.Run("Test Pull Failure", func(t *testing.T) { + + client := &ClientMock{ + BodyList: []string{ + `{ "d" : {} }`, + `{ }`, + }, + Token: "myToken", + StatusCode: 200, + } + + apiManager := &SoftwareComponentApiManager{Client: client, PollIntervall: 1 * time.Microsecond} + + api, err := apiManager.GetAPI(con, repo) + assert.NoError(t, err) + assert.IsType(t, &SAP_COM_0948{}, api.(*SAP_COM_0948), "API has wrong type") + + errPull := api.Pull() + assert.ErrorContains(t, errPull, "Request to ABAP System not successful") + assert.Empty(t, api.getUUID(), "API does not cotain correct UUID") + }) +} + +func TestCheckout0948(t *testing.T) { + t.Run("Test Checkout Success", func(t *testing.T) { + + client := &ClientMock{ + BodyList: []string{ + `{ "status" : "R", "UUID" : "GUID" }`, + `{ }`, + }, + Token: "myToken", + StatusCode: 200, + } + + apiManager := &SoftwareComponentApiManager{Client: client, PollIntervall: 1 * time.Microsecond} + + api, err := apiManager.GetAPI(con, repo) + assert.NoError(t, err) + assert.IsType(t, &SAP_COM_0948{}, api.(*SAP_COM_0948), "API has wrong type") + + errCheckout := api.CheckoutBranch() + assert.NoError(t, errCheckout) + assert.Equal(t, "GUID", api.getUUID(), "API does not cotain correct UUID") + }) + + t.Run("Test Checkout Failure", func(t *testing.T) { + + client := &ClientMock{ + BodyList: []string{ + `{ "d" : {} }`, + `{ }`, + }, + Token: "myToken", + StatusCode: 200, + } + + apiManager := &SoftwareComponentApiManager{Client: client, PollIntervall: 1 * time.Microsecond} + + api, err := apiManager.GetAPI(con, repo) + assert.NoError(t, err) + assert.IsType(t, &SAP_COM_0948{}, api.(*SAP_COM_0948), "API has wrong type") + + errCheckoput := api.CheckoutBranch() + assert.ErrorContains(t, errCheckoput, "Request to ABAP System not successful") + assert.Empty(t, api.getUUID(), "API does not cotain correct UUID") + }) +} + +func TestGetRepo0948(t *testing.T) { + t.Run("Test GetRepo Success", func(t *testing.T) { + + client := &ClientMock{ + BodyList: []string{ + `{ "sc_name" : "testRepo1", "avail_on_inst" : true, "active_branch": "testBranch1" }`, + `{"d" : [] }`, + }, + Token: "myToken", + StatusCode: 200, + } + + apiManager := &SoftwareComponentApiManager{Client: client, PollIntervall: 1 * time.Microsecond} + + api, err := apiManager.GetAPI(con, repo) + assert.NoError(t, err) + assert.IsType(t, &SAP_COM_0948{}, api.(*SAP_COM_0948), "API has wrong type") + + cloned, activeBranch, errAction := api.GetRepository() + assert.True(t, cloned) + assert.Equal(t, "testBranch1", activeBranch) + assert.NoError(t, errAction) + }) +} + +func TestCreateTag0948(t *testing.T) { + t.Run("Test Tag Success", func(t *testing.T) { + + client := &ClientMock{ + BodyList: []string{ + `{ "status" : "R", "UUID" : "GUID" }`, + `{ }`, + }, + Token: "myToken", + StatusCode: 200, + } + + apiManager := &SoftwareComponentApiManager{Client: client, PollIntervall: 1 * time.Microsecond} + + api, err := apiManager.GetAPI(con, repo) + assert.NoError(t, err) + assert.IsType(t, &SAP_COM_0948{}, api.(*SAP_COM_0948), "API has wrong type") + + errCreateTag := api.CreateTag(Tag{TagName: "myTag", TagDescription: "descr"}) + assert.NoError(t, errCreateTag) + assert.Equal(t, "GUID", api.getUUID(), "API does not cotain correct UUID") + }) + + t.Run("Test Tag Failure", func(t *testing.T) { + + client := &ClientMock{ + BodyList: []string{ + `{ "d" : {} }`, + `{ }`, + }, + Token: "myToken", + StatusCode: 200, + } + + apiManager := &SoftwareComponentApiManager{Client: client, PollIntervall: 1 * time.Microsecond} + + api, err := apiManager.GetAPI(con, repo) + assert.NoError(t, err) + assert.IsType(t, &SAP_COM_0948{}, api.(*SAP_COM_0948), "API has wrong type") + + errCreateTag := api.CreateTag(Tag{TagName: "myTag", TagDescription: "descr"}) + assert.ErrorContains(t, errCreateTag, "Request to ABAP System not successful") + assert.Empty(t, api.getUUID(), "API does not cotain correct UUID") + }) + + t.Run("Test Tag Empty", func(t *testing.T) { + + client := &ClientMock{ + BodyList: []string{ + `{ "d" : {} }`, + `{ }`, + }, + Token: "myToken", + StatusCode: 200, + } + + apiManager := &SoftwareComponentApiManager{Client: client, PollIntervall: 1 * time.Microsecond} + + api, err := apiManager.GetAPI(con, repo) + assert.NoError(t, err) + assert.IsType(t, &SAP_COM_0948{}, api.(*SAP_COM_0948), "API has wrong type") + + errCreateTag := api.CreateTag(Tag{}) + assert.ErrorContains(t, errCreateTag, "No Tag provided") + assert.Empty(t, api.getUUID(), "API does not cotain correct UUID") + }) +} + +func TestSleepTime0948(t *testing.T) { + t.Run("Test Sleep Time", func(t *testing.T) { + + api := SAP_COM_0948{ + retryMaxSleepTime: 120 * time.Nanosecond, + retryBaseSleepUnit: 1 * time.Nanosecond, + } + + expectedResults := make([]time.Duration, 12) + expectedResults[0] = 0 + expectedResults[1] = 1 + expectedResults[2] = 1 + expectedResults[3] = 2 + expectedResults[4] = 3 + expectedResults[5] = 5 + expectedResults[6] = 8 + expectedResults[7] = 13 + expectedResults[8] = 21 + expectedResults[9] = 34 + expectedResults[10] = 55 + expectedResults[11] = 89 + results := make([]time.Duration, 12) + var err error + + for i := 0; i <= 11; i++ { + + results[i], err = api.getSleepTime(i) + assert.NoError(t, err) + } + assert.ElementsMatch(t, expectedResults, results) + + _, err = api.getSleepTime(-10) + assert.Error(t, err) + + _, err = api.getSleepTime(12) + assert.ErrorContains(t, err, "Exceeded max sleep time") + }) +} diff --git a/pkg/abaputils/softwareComponentApiManager.go b/pkg/abaputils/softwareComponentApiManager.go new file mode 100644 index 0000000000..9b5e06332d --- /dev/null +++ b/pkg/abaputils/softwareComponentApiManager.go @@ -0,0 +1,219 @@ +package abaputils + +import ( + "errors" + "time" + + piperhttp "github.com/SAP/jenkins-library/pkg/http" + "github.com/SAP/jenkins-library/pkg/log" +) + +type SoftwareComponentApiManagerInterface interface { + GetAPI(con ConnectionDetailsHTTP, repo Repository) (SoftwareComponentApiInterface, error) + GetPollIntervall() time.Duration +} + +type SoftwareComponentApiManager struct { + Client piperhttp.Sender + PollIntervall time.Duration + Force0510 bool +} + +func (manager *SoftwareComponentApiManager) GetAPI(con ConnectionDetailsHTTP, repo Repository) (SoftwareComponentApiInterface, error) { + + var err0948 error + if !manager.Force0510 { + // Initialize SAP_COM_0948, if it does not work, use SAP_COM_0510 + sap_com_0948 := SAP_COM_0948{} + sap_com_0948.init(con, manager.Client, repo) + err0948 = sap_com_0948.initialRequest() + if err0948 == nil { + return &sap_com_0948, nil + } + } + + sap_com_0510 := SAP_COM_0510{} + sap_com_0510.init(con, manager.Client, repo) + err0510 := sap_com_0510.initialRequest() + if err0510 == nil { + log.Entry().Infof("SAP_COM_0510 will be replaced by SAP_COM_0948 starting from the SAP BTP, ABAP environment release 2402.") + return &sap_com_0510, nil + } + + log.Entry().Errorf("Could not connect via SAP_COM_0948: %s", err0948) + log.Entry().Errorf("Could not connect via SAP_COM_0510: %s", err0510) + + return nil, errors.New("Could not initialize API") +} + +func (manager *SoftwareComponentApiManager) GetPollIntervall() time.Duration { + if manager.PollIntervall == 0 { + manager.PollIntervall = 5 * time.Second + } + return manager.PollIntervall +} + +type SoftwareComponentApiInterface interface { + init(con ConnectionDetailsHTTP, client piperhttp.Sender, repo Repository) + initialRequest() error + setSleepTimeConfig(timeUnit time.Duration, maxSleepTime time.Duration) + getSleepTime(n int) (time.Duration, error) + getUUID() string + Clone() error + Pull() error + CheckoutBranch() error + GetRepository() (bool, string, error) + GetAction() (string, error) + GetLogOverview() ([]LogResultsV2, error) + GetLogProtocol(LogResultsV2, int) (result []LogProtocol, count int, err error) + CreateTag(tag Tag) error +} + +/**************************************** + * Structs for the A4C_A2G_GHA service * + ****************************************/ + +// ActionEntity struct for the Pull/Import entity A4C_A2G_GHA_SC_IMP +type ActionEntity struct { + Metadata AbapMetadata `json:"__metadata"` + UUID string `json:"uuid"` + Namespace string `json:"namespace"` + ScName string `json:"sc_name"` + ImportType string `json:"import_type"` + BranchName string `json:"branch_name"` + StartedByUser string `json:"user_name"` + Status string `json:"status"` + StatusDescription string `json:"status_descr"` + CommitID string `json:"commit_id"` + StartTime string `json:"start_time"` + ChangeTime string `json:"change_time"` + ToExecutionLog AbapLogs `json:"to_Execution_log"` + ToTransportLog AbapLogs `json:"to_Transport_log"` + ToLogOverview AbapLogsV2 `json:"to_Log_Overview"` +} + +// BranchEntity struct for the Branch entity A4C_A2G_GHA_SC_BRANCH +type BranchEntity struct { + Metadata AbapMetadata `json:"__metadata"` + ScName string `json:"sc_name"` + Namespace string `json:"namepsace"` + BranchName string `json:"branch_name"` + ParentBranch string `json:"derived_from"` + CreatedBy string `json:"created_by"` + CreatedOn string `json:"created_on"` + IsActive bool `json:"is_active"` + CommitID string `json:"commit_id"` + CommitMessage string `json:"commit_message"` + LastCommitBy string `json:"last_commit_by"` + LastCommitOn string `json:"last_commit_on"` +} + +// CloneEntity struct for the Clone entity A4C_A2G_GHA_SC_CLONE +type CloneEntity struct { + Metadata AbapMetadata `json:"__metadata"` + UUID string `json:"uuid"` + ScName string `json:"sc_name"` + BranchName string `json:"branch_name"` + ImportType string `json:"import_type"` + Namespace string `json:"namepsace"` + Status string `json:"status"` + StatusDescription string `json:"status_descr"` + StartedByUser string `json:"user_name"` + StartTime string `json:"start_time"` + ChangeTime string `json:"change_time"` +} + +type RepositoryEntity struct { + Metadata AbapMetadata `json:"__metadata"` + ScName string `json:"sc_name"` + ActiveBranch string `json:"active_branch"` + AvailOnInst bool `json:"avail_on_inst"` +} + +// AbapLogs struct for ABAP logs +type AbapLogs struct { + Results []LogResults `json:"results"` +} + +type AbapLogsV2 struct { + Results []LogResultsV2 `json:"results"` +} + +type LogResultsV2 struct { + Metadata AbapMetadata `json:"__metadata"` + Index int `json:"log_index"` + Name string `json:"log_name"` + Status string `json:"type_of_found_issues"` + Timestamp string `json:"timestamp"` + ToLogProtocol LogProtocolDeferred `json:"to_Log_Protocol"` +} + +type LogProtocolDeferred struct { + Deferred URI `json:"__deferred"` +} + +type URI struct { + URI string `json:"uri"` +} + +type LogProtocolResults struct { + Results []LogProtocol `json:"results"` + Count string `json:"__count"` +} + +type LogProtocolResultsV4 struct { + Results []LogProtocol `json:"value"` + Count int `json:"@odata.count"` +} + +type LogProtocol struct { + // Metadata AbapMetadata `json:"__metadata"` + OverviewIndex int `json:"log_index"` + ProtocolLine int `json:"index_no"` + Type string `json:"type"` + Description string `json:"descr"` + Timestamp string `json:"timestamp"` +} + +// LogResults struct for Execution and Transport Log entities A4C_A2G_GHA_SC_LOG_EXE and A4C_A2G_GHA_SC_LOG_TP +type LogResults struct { + Index string `json:"index_no"` + Type string `json:"type"` + Description string `json:"descr"` + Timestamp string `json:"timestamp"` +} + +// RepositoriesConfig struct for parsing one or multiple branches and repositories configurations +type RepositoriesConfig struct { + BranchName string + CommitID string + RepositoryName string + RepositoryNames []string + Repositories string +} + +type EntitySetsForManageGitRepository struct { + EntitySets []string `json:"EntitySets"` +} + +type CreateTagBacklog struct { + RepositoryName string + CommitID string + Tags []Tag +} + +type Tag struct { + TagName string + TagDescription string +} + +type CreateTagBody struct { + RepositoryName string `json:"sc_name"` + CommitID string `json:"commit_id"` + Tag string `json:"tag_name"` + Description string `json:"tag_description"` +} + +type CreateTagResponse struct { + UUID string `json:"uuid"` +} diff --git a/pkg/blackduck/blackduck.go b/pkg/blackduck/blackduck.go index 34bf0229b3..029a29362b 100644 --- a/pkg/blackduck/blackduck.go +++ b/pkg/blackduck/blackduck.go @@ -143,6 +143,7 @@ type VulnerabilityWithRemediation struct { ExploitabilitySubscore float32 `json:"exploitabilitySubscore,omitempty"` ImpactSubscore float32 `json:"impactSubscore,omitempty"` RelatedVulnerability string `json:"relatedVulnerability,omitempty"` + RemidiatedBy string `json:"remediationCreatedBy,omitempty"` } // Title returns the issue title representation of the contents diff --git a/pkg/blackduck/reporting.go b/pkg/blackduck/reporting.go index cc3aec3647..960c0c3357 100644 --- a/pkg/blackduck/reporting.go +++ b/pkg/blackduck/reporting.go @@ -6,6 +6,7 @@ import ( "fmt" "path/filepath" "runtime" + "strings" "github.com/SAP/jenkins-library/pkg/format" "github.com/SAP/jenkins-library/pkg/log" @@ -70,12 +71,17 @@ func CreateSarifResultFile(vulns *Vulnerabilities, projectName, projectVersion, PackageURLPlusCVEHash: base64.URLEncoding.EncodeToString([]byte(fmt.Sprintf("%v+%v", v.Component.ToPackageUrl().ToString(), v.CweID))), }, Properties: &format.SarifProperties{ - Audited: isAudited, - ToolSeverity: v.Severity, - ToolSeverityIndex: severityIndex[v.Severity], - ToolAuditMessage: v.VulnerabilityWithRemediation.RemediationComment, - ToolState: v.RemediationStatus, - UnifiedAuditState: unifiedStatusValue, + Audited: isAudited, + ToolSeverity: v.Severity, + ToolSeverityIndex: severityIndex[v.Severity], + ToolState: v.RemediationStatus, + ToolAuditMessage: v.VulnerabilityWithRemediation.RemediationComment, + UnifiedAuditState: unifiedStatusValue, + UnifiedSeverity: strings.ToLower(v.Severity), + UnifiedCriticality: v.BaseScore, + UnifiedAuditUser: v.VulnerabilityWithRemediation.RemidiatedBy, + AuditRequirement: format.AUDIT_REQUIREMENT_GROUP_1_DESC, + AuditRequirementIndex: format.AUDIT_REQUIREMENT_GROUP_1_INDEX, }, } diff --git a/pkg/blackduck/reporting_test.go b/pkg/blackduck/reporting_test.go index 82018853d1..e0c86bc35e 100644 --- a/pkg/blackduck/reporting_test.go +++ b/pkg/blackduck/reporting_test.go @@ -32,6 +32,7 @@ func TestCreateSarifResultFile(t *testing.T) { BaseScore: 9.8, OverallScore: 10, RemediationStatus: "IGNORED", RemediationComment: "CWE-45456543 Auto-remediated: CWE-45456543 is related to CVE-1, but the CWE team has determined that this component version is not affected.", + RemidiatedBy: "technical_user", }, }, { @@ -60,6 +61,7 @@ func TestCreateSarifResultFile(t *testing.T) { Description: "Some vulnerability that can be exploited by turning it upside down.", BaseScore: 6.5, OverallScore: 7, + RemediationStatus: "IGNORED", }, }, { @@ -110,6 +112,13 @@ func TestCreateSarifResultFile(t *testing.T) { // Test correctness of audit information assert.Equal(t, true, sarif.Runs[0].Results[0].Properties.Audited) assert.Equal(t, "IGNORED", sarif.Runs[0].Results[0].Properties.ToolState) + assert.Equal(t, alerts[0].BaseScore, sarif.Runs[0].Results[0].Properties.UnifiedCriticality) + assert.Equal(t, "critical", sarif.Runs[0].Results[0].Properties.UnifiedSeverity) + assert.Equal(t, "new", sarif.Runs[0].Results[1].Properties.UnifiedAuditState) + assert.Equal(t, "notRelevant", sarif.Runs[0].Results[0].Properties.UnifiedAuditState) + assert.Equal(t, "technical_user", sarif.Runs[0].Results[0].Properties.UnifiedAuditUser) + assert.Equal(t, format.AUDIT_REQUIREMENT_GROUP_1_DESC, sarif.Runs[0].Results[0].Properties.AuditRequirement) + assert.Equal(t, format.AUDIT_REQUIREMENT_GROUP_1_INDEX, sarif.Runs[0].Results[0].Properties.AuditRequirementIndex) assert.Equal(t, "CWE-45456543 Auto-remediated: CWE-45456543 is related to CVE-1, but the CWE team has determined that this component version is not affected.", sarif.Runs[0].Results[0].Properties.ToolAuditMessage, diff --git a/pkg/certutils/certutils.go b/pkg/certutils/certutils.go index 3afc92be87..4e496dda47 100644 --- a/pkg/certutils/certutils.go +++ b/pkg/certutils/certutils.go @@ -5,6 +5,7 @@ import ( "net/http" piperhttp "github.com/SAP/jenkins-library/pkg/http" + "github.com/SAP/jenkins-library/pkg/log" "github.com/SAP/jenkins-library/pkg/piperutils" "github.com/pkg/errors" ) @@ -22,23 +23,42 @@ func CertificateUpdate(certLinks []string, httpClient piperhttp.Sender, fileUtil return errors.Wrapf(err, "failed to load file '%v'", caCertsFile) } - for _, link := range certLinks { - response, err := httpClient.SendRequest(http.MethodGet, link, nil, nil, nil) + byteCerts, err := CertificateDownload(certLinks, httpClient) + if err != nil { + return err + } + + caCerts = append(caCerts, byteCerts...) + + err = fileUtils.FileWrite(caCertsFile, caCerts, 0644) + if err != nil { + return errors.Wrapf(err, "failed to update file '%v'", caCertsFile) + } + return nil +} + +// CertificateDownload downloads certificates and returns them as a byte slice +func CertificateDownload(certLinks []string, client piperhttp.Sender) ([]byte, error) { + if len(certLinks) == 0 { + return nil, nil + } + + var certs []byte + for _, certLink := range certLinks { + log.Entry().Debugf("Downloading CA certificate from URL: %s", certLink) + response, err := client.SendRequest(http.MethodGet, certLink, nil, nil, nil) if err != nil { - return errors.Wrap(err, "failed to load certificate from url") + return nil, errors.Wrap(err, "failed to load certificate from url") } content, err := io.ReadAll(response.Body) if err != nil { - return errors.Wrap(err, "error reading response") + return nil, errors.Wrap(err, "failed to read response") } _ = response.Body.Close() content = append(content, []byte("\n")...) - caCerts = append(caCerts, content...) - } - err = fileUtils.FileWrite(caCertsFile, caCerts, 0644) - if err != nil { - return errors.Wrapf(err, "failed to update file '%v'", caCertsFile) + certs = append(certs, content...) } - return nil + + return certs, nil } diff --git a/pkg/certutils/certutils_test.go b/pkg/certutils/certutils_test.go index f261090f56..d6a218fb4e 100644 --- a/pkg/certutils/certutils_test.go +++ b/pkg/certutils/certutils_test.go @@ -70,3 +70,48 @@ func TestCertificateUpdate(t *testing.T) { }) } + +func TestDownloadCACertbunde(t *testing.T) { + certLinks := []string{"https://test-link-1.com/cert-1.crt", "https://test-link-2.com/cert-2.crt"} + badCaseLink := "http://non-existing-url" + + httpmock.Activate() + defer httpmock.DeactivateAndReset() + httpmock.RegisterResponder(http.MethodGet, certLinks[0], httpmock.NewStringResponder(http.StatusOK, "testCert1")) + httpmock.RegisterResponder(http.MethodGet, certLinks[1], httpmock.NewStringResponder(http.StatusOK, "testCert2")) + httpmock.RegisterResponder(http.MethodGet, badCaseLink, httpmock.NewStringResponder(http.StatusNotFound, "not found")) + + client := &piperhttp.Client{} + client.SetOptions(piperhttp.ClientOptions{MaxRetries: -1, UseDefaultTransport: true}) + + testTable := []struct { + name string + certsLinks []string + expected string + expectedErr string + }{ + { + name: "good case", + certsLinks: certLinks, + expected: "testCert1\ntestCert2\n", + }, + { + name: "no links", + }, + { + name: "bad link", + certsLinks: []string{badCaseLink}, + expectedErr: fmt.Sprintf("failed to load certificate from url: request to %s returned with response 404", badCaseLink), + }, + } + + for _, testCase := range testTable { + t.Run(testCase.name, func(t *testing.T) { + certs, err := CertificateDownload(testCase.certsLinks, client) + if err != nil { + assert.Contains(t, testCase.expectedErr, err.Error()) + } + assert.Equal(t, testCase.expected, string(certs)) + }) + } +} diff --git a/pkg/checkmarxone/checkmarxone.go b/pkg/checkmarxone/checkmarxone.go index e54dfec87d..db22059424 100644 --- a/pkg/checkmarxone/checkmarxone.go +++ b/pkg/checkmarxone/checkmarxone.go @@ -259,6 +259,12 @@ type Status struct { Details ScanStatusDetails `json:"details"` } +type VersionInfo struct { + CxOne string `json:"CxOne"` + KICS string `json:"KICS"` + SAST string `json:"SAST"` +} + type WorkflowLog struct { Source string `json:"Source"` Info string `json:"Info"` @@ -327,6 +333,8 @@ type System interface { GetProjectConfiguration(projectID string) ([]ProjectConfigurationSetting, error) UpdateProjectConfiguration(projectID string, settings []ProjectConfigurationSetting) error + + GetVersion() (VersionInfo, error) } // NewSystemInstance returns a new Checkmarx client for communicating with the backend @@ -833,11 +841,41 @@ func (sys *SystemInstance) CreateProjectInApplication(projectName, applicationID header.Set("Content-Type", "application/json") data, err := sendRequest(sys, http.MethodPost, fmt.Sprintf("/projects/application/%v", applicationID), bytes.NewBuffer(jsonValue), header, []int{}) + + if err != nil && err.Error()[0:8] == "HTTP 404" { // At some point, the api /projects/applications will be removed and instead the normal /projects API will do the job. + jsonData["applicationIds"] = []string{applicationID} + jsonValue, err = json.Marshal(data) + if err != nil { + return project, err + } + data, err = sendRequest(sys, http.MethodPost, "/projects", bytes.NewReader(jsonValue), header, []int{}) + } + if err != nil { return project, errors.Wrapf(err, "failed to create project %v under %v", projectName, applicationID) } err = json.Unmarshal(data, &project) + if err != nil { + return project, errors.Wrapf(err, "failed to unmarshal project data") + } + + // since there is a delay to assign a project to an application, adding a check to ensure project is ready after creation + // (if project is not ready, 403 will be returned) + projectID := project.ProjectID + project, err = sys.GetProjectByID(projectID) + if err != nil { + const max_retry = 12 // 3 minutes + const delay = 15 + retry_counter := 1 + for retry_counter <= max_retry && err != nil { + sys.logger.Debug("Waiting for project assignment to application, retry #", retry_counter) + time.Sleep(delay * time.Second) + retry_counter++ + project, err = sys.GetProjectByID(projectID) + } + } + return project, err } @@ -953,10 +991,6 @@ func (sys *SystemInstance) ScanProject(projectID, sourceUrl, branch, scanType st return Scan{}, errors.New("Invalid scanType provided, must be 'upload' or 'git'") } -//func (sys *SystemInstance) UpdateProjectExcludeSettings(projectID string, excludeFolders string, excludeFiles string) error { -// replaced by SetProjectFileFilter - -// Updated for Cx1: GetPresets loads the preset values defined in the Checkmarx backend func (sys *SystemInstance) GetPresets() ([]Preset, error) { sys.logger.Debug("Getting Presets...") var presets []Preset @@ -971,7 +1005,6 @@ func (sys *SystemInstance) GetPresets() ([]Preset, error) { return presets, err } -// New for Cx1 func (sys *SystemInstance) GetProjectConfiguration(projectID string) ([]ProjectConfigurationSetting, error) { sys.logger.Debug("Getting project configuration") var projectConfigurations []ProjectConfigurationSetting @@ -989,8 +1022,6 @@ func (sys *SystemInstance) GetProjectConfiguration(projectID string) ([]ProjectC return projectConfigurations, err } -// UpdateProjectConfiguration updates the configuration of the project addressed by projectID -// Updated for Cx1 func (sys *SystemInstance) UpdateProjectConfiguration(projectID string, settings []ProjectConfigurationSetting) error { if len(settings) == 0 { return errors.New("Empty list of settings provided.") @@ -1081,7 +1112,6 @@ func (sys *SystemInstance) GetScanMetadata(scanID string) (ScanMetadata, error) return scanmeta, nil } -// GetScans returns all scan status on the project addressed by projectID func (sys *SystemInstance) GetScanWorkflow(scanID string) ([]WorkflowLog, error) { var workflow []WorkflowLog @@ -1095,7 +1125,6 @@ func (sys *SystemInstance) GetScanWorkflow(scanID string) ([]WorkflowLog, error) return workflow, nil } -// GetScans returns all scan status on the project addressed by projectID func (sys *SystemInstance) GetLastScans(projectID string, limit int) ([]Scan, error) { var scanResponse struct { TotalCount uint64 @@ -1359,3 +1388,17 @@ func (sys *SystemInstance) DownloadReport(reportUrl string) ([]byte, error) { } return data, nil } + +func (sys *SystemInstance) GetVersion() (VersionInfo, error) { + sys.logger.Debug("Getting Version information...") + var version VersionInfo + + data, err := sendRequest(sys, http.MethodGet, "/versions", nil, http.Header{}, []int{}) + if err != nil { + sys.logger.Errorf("Fetching versions failed: %s", err) + return version, err + } + + err = json.Unmarshal(data, &version) + return version, err +} diff --git a/pkg/checkmarxone/cxjson_to_sarif.go b/pkg/checkmarxone/cxjson_to_sarif.go index f9b1112749..27d8e0cecc 100644 --- a/pkg/checkmarxone/cxjson_to_sarif.go +++ b/pkg/checkmarxone/cxjson_to_sarif.go @@ -8,7 +8,6 @@ import ( "github.com/SAP/jenkins-library/pkg/format" "github.com/SAP/jenkins-library/pkg/log" "github.com/SAP/jenkins-library/pkg/piperutils" - "github.com/pkg/errors" ) // ConvertCxJSONToSarif is the entrypoint for the Parse function @@ -24,14 +23,9 @@ func ConvertCxJSONToSarif(sys System, serverURL string, scanResults *[]ScanResul sarif.Runs = append(sarif.Runs, checkmarxRun) rulesArray := []format.SarifRule{} - queries, err := sys.GetQueries() - if err != nil { - return sarif, errors.Wrap(err, "Failed to retrieve list of queries") - } - - baseURL := "https://" + serverURL + "/results/" + scanMeta.ScanID + "/" + scanMeta.ProjectID + baseURL := serverURL + "/results/" + scanMeta.ScanID + "/" + scanMeta.ProjectID - cweIdsForTaxonomies := make(map[int64]int) //use a map to avoid duplicates + cweIdsForTaxonomies := make(map[int]int) //use a map to avoid duplicates cweCounter := 0 //maxretries := 5 @@ -41,15 +35,10 @@ func ConvertCxJSONToSarif(sys System, serverURL string, scanResults *[]ScanResul log.Entry().Debug("[SARIF] Now handling results.") for _, r := range *scanResults { - query := getQuery(queries, r.Data.QueryID) - if query == nil { - return sarif, errors.New(fmt.Sprintf("Unknown queryid in results: %d", r.Data.QueryID)) - } - - _, haskey := cweIdsForTaxonomies[query.CweID] + _, haskey := cweIdsForTaxonomies[r.VulnerabilityDetails.CweId] if !haskey { - cweIdsForTaxonomies[query.CweID] = cweCounter + cweIdsForTaxonomies[r.VulnerabilityDetails.CweId] = cweCounter cweCounter++ } @@ -59,14 +48,14 @@ func ConvertCxJSONToSarif(sys System, serverURL string, scanResults *[]ScanResul result := *new(format.Results) //General - result.RuleID = fmt.Sprintf("checkmarxOne-%v/%d", query.Language, query.QueryID) - result.RuleIndex = cweIdsForTaxonomies[query.CweID] + result.RuleID = fmt.Sprintf("checkmarxOne-%v/%d", r.Data.LanguageName, r.Data.QueryID) + result.RuleIndex = cweIdsForTaxonomies[r.VulnerabilityDetails.CweId] result.Level = "none" msg := new(format.Message) if apiDescription != "" { msg.Text = apiDescription } else { - msg.Text = query.Name + msg.Text = r.Data.QueryName } result.Message = msg @@ -199,18 +188,18 @@ func ConvertCxJSONToSarif(sys System, serverURL string, scanResults *[]ScanResul //handle the rules array rule := *new(format.SarifRule) - rule.ID = fmt.Sprintf("checkmarxOne-%v/%d", query.Language, query.QueryID) - words := strings.Split(query.Name, "_") + rule.ID = fmt.Sprintf("checkmarxOne-%v/%d", r.Data.LanguageName, r.Data.QueryID) + words := strings.Split(r.Data.QueryName, "_") for w := 0; w < len(words); w++ { words[w] = piperutils.Title(strings.ToLower(words[w])) } rule.Name = strings.Join(words, "") - rule.HelpURI = fmt.Sprintf("%v/sast/description/%v/%v", baseURL, query.QueryDescriptionID, query.QueryID) + rule.HelpURI = fmt.Sprintf("%v/sast/description/%v/%v", baseURL, r.VulnerabilityDetails.CweId, r.Data.QueryID) rule.Help = new(format.Help) rule.Help.Text = rule.HelpURI rule.ShortDescription = new(format.Message) - rule.ShortDescription.Text = query.Name + rule.ShortDescription.Text = r.Data.QueryName rule.Properties = new(format.SarifRuleProperties) if len(r.VulnerabilityDetails.Compliances) > 0 { @@ -221,7 +210,7 @@ func ConvertCxJSONToSarif(sys System, serverURL string, scanResults *[]ScanResul rule.Properties.Tags = append(rule.Properties.Tags, r.VulnerabilityDetails.Compliances[cat]) } } - switch query.Severity { + switch r.Severity { case "INFORMATION": rule.Properties.SecuritySeverity = "0.0" case "LOW": @@ -234,8 +223,8 @@ func ConvertCxJSONToSarif(sys System, serverURL string, scanResults *[]ScanResul rule.Properties.SecuritySeverity = "10.0" } - if query.CweID != 0 { - rule.Properties.Tags = append(rule.Properties.Tags, fmt.Sprintf("external/cwe/cwe-%d", query.CweID)) + if r.VulnerabilityDetails.CweId != 0 { + rule.Properties.Tags = append(rule.Properties.Tags, fmt.Sprintf("external/cwe/cwe-%d", r.VulnerabilityDetails.CweId)) } rulesArray = append(rulesArray, rule) } diff --git a/pkg/checkmarxone/reporting.go b/pkg/checkmarxone/reporting.go index 21cca0961e..dbf4613b99 100644 --- a/pkg/checkmarxone/reporting.go +++ b/pkg/checkmarxone/reporting.go @@ -41,7 +41,7 @@ type Finding struct { } type LowPerQuery struct { - QueryName string `json:"query"` + QueryName string `json:"name"` Audited int `json:"audited"` Total int `json:"total"` } diff --git a/pkg/cnbutils/user.go b/pkg/cnbutils/user.go new file mode 100644 index 0000000000..189f8a15f3 --- /dev/null +++ b/pkg/cnbutils/user.go @@ -0,0 +1,32 @@ +package cnbutils + +import ( + "os" + "strconv" + + "github.com/pkg/errors" +) + +func CnbUserInfo() (int, int, error) { + uidStr, ok := os.LookupEnv("CNB_USER_ID") + if !ok { + return 0, 0, errors.New("environment variable CNB_USER_ID not found") + } + + gidStr, ok := os.LookupEnv("CNB_GROUP_ID") + if !ok { + return 0, 0, errors.New("environment variable CNB_GROUP_ID not found") + } + + uid, err := strconv.Atoi(uidStr) + if err != nil { + return 0, 0, err + } + + gid, err := strconv.Atoi(gidStr) + if err != nil { + return 0, 0, err + } + + return uid, gid, nil +} diff --git a/pkg/codeql/codeql.go b/pkg/codeql/codeql.go index a365c96502..0f537b588d 100644 --- a/pkg/codeql/codeql.go +++ b/pkg/codeql/codeql.go @@ -49,6 +49,8 @@ func getVulnerabilitiesFromClient(ctx context.Context, codeScanning githubCodeql page := 1 audited := 0 totalAlerts := 0 + optionalAudited := 0 + totalOptionalAlerts := 0 for page != 0 { alertOptions := github.AlertListOptions{ @@ -72,13 +74,31 @@ func getVulnerabilitiesFromClient(ctx context.Context, codeScanning githubCodeql continue } - if *alert.State == auditStateDismissed { - audited += 1 - totalAlerts += 1 + isSecurityIssue := false + for _, tag := range alert.Rule.Tags { + if tag == "security" { + isSecurityIssue = true + } } - if *alert.State == auditStateOpen { - totalAlerts += 1 + if isSecurityIssue { + if *alert.State == auditStateDismissed { + audited += 1 + totalAlerts += 1 + } + + if *alert.State == auditStateOpen { + totalAlerts += 1 + } + } else { + if *alert.State == auditStateDismissed { + optionalAudited += 1 + totalOptionalAlerts += 1 + } + + if *alert.State == auditStateOpen { + totalOptionalAlerts += 1 + } } } } @@ -88,7 +108,12 @@ func getVulnerabilitiesFromClient(ctx context.Context, codeScanning githubCodeql Total: totalAlerts, Audited: audited, } - codeqlScanning := []CodeqlFindings{auditAll} + optionalIssues := CodeqlFindings{ + ClassificationName: "Optional", + Total: totalOptionalAlerts, + Audited: optionalAudited, + } + codeqlScanning := []CodeqlFindings{auditAll, optionalIssues} return codeqlScanning, nil } diff --git a/pkg/codeql/codeql_test.go b/pkg/codeql/codeql_test.go index 84b0ac40ee..e056a51926 100644 --- a/pkg/codeql/codeql_test.go +++ b/pkg/codeql/codeql_test.go @@ -24,30 +24,36 @@ func (g *githubCodeqlScanningMock) ListAlertsForRepo(ctx context.Context, owner, testToolName := "Test" if repo == "testRepo1" { - alerts = append(alerts, &github.Alert{State: &openState, Tool: &github.Tool{Name: &codeqlToolName}}) - alerts = append(alerts, &github.Alert{State: &openState, Tool: &github.Tool{Name: &codeqlToolName}}) - alerts = append(alerts, &github.Alert{State: &dismissedState, Tool: &github.Tool{Name: &codeqlToolName}}) - alerts = append(alerts, &github.Alert{State: &dismissedState, Tool: &github.Tool{Name: &testToolName}}) + alerts = append(alerts, &github.Alert{State: &openState, Tool: &github.Tool{Name: &codeqlToolName}, Rule: &github.Rule{Tags: []string{"security"}}}) + alerts = append(alerts, &github.Alert{State: &openState, Tool: &github.Tool{Name: &codeqlToolName}, Rule: &github.Rule{Tags: []string{"security"}}}) + alerts = append(alerts, &github.Alert{State: &dismissedState, Tool: &github.Tool{Name: &codeqlToolName}, Rule: &github.Rule{Tags: []string{"security"}}}) + alerts = append(alerts, &github.Alert{State: &dismissedState, Tool: &github.Tool{Name: &testToolName}, Rule: &github.Rule{Tags: []string{"security"}}}) + alerts = append(alerts, &github.Alert{State: &dismissedState, Tool: &github.Tool{Name: &codeqlToolName}, Rule: &github.Rule{Tags: []string{"useless_code"}}}) + alerts = append(alerts, &github.Alert{State: &dismissedState, Tool: &github.Tool{Name: &testToolName}, Rule: &github.Rule{Tags: []string{"useless_code"}}}) response.NextPage = 0 } if repo == "testRepo2" { if opts.Page == 1 { for i := 0; i < 50; i++ { - alerts = append(alerts, &github.Alert{State: &openState, Tool: &github.Tool{Name: &codeqlToolName}}) + alerts = append(alerts, &github.Alert{State: &openState, Tool: &github.Tool{Name: &codeqlToolName}, Rule: &github.Rule{Tags: []string{"security"}}}) + alerts = append(alerts, &github.Alert{State: &dismissedState, Tool: &github.Tool{Name: &testToolName}, Rule: &github.Rule{Tags: []string{"useless_code"}}}) } for i := 0; i < 50; i++ { - alerts = append(alerts, &github.Alert{State: &dismissedState, Tool: &github.Tool{Name: &codeqlToolName}}) + alerts = append(alerts, &github.Alert{State: &dismissedState, Tool: &github.Tool{Name: &codeqlToolName}, Rule: &github.Rule{Tags: []string{"security"}}}) + alerts = append(alerts, &github.Alert{State: &dismissedState, Tool: &github.Tool{Name: &testToolName}, Rule: &github.Rule{Tags: []string{"useless_code"}}}) } response.NextPage = 2 } if opts.Page == 2 { for i := 0; i < 10; i++ { - alerts = append(alerts, &github.Alert{State: &openState, Tool: &github.Tool{Name: &codeqlToolName}}) + alerts = append(alerts, &github.Alert{State: &openState, Tool: &github.Tool{Name: &codeqlToolName}, Rule: &github.Rule{Tags: []string{"security"}}}) + alerts = append(alerts, &github.Alert{State: &dismissedState, Tool: &github.Tool{Name: &testToolName}, Rule: &github.Rule{Tags: []string{"useless_code"}}}) } for i := 0; i < 30; i++ { - alerts = append(alerts, &github.Alert{State: &dismissedState, Tool: &github.Tool{Name: &codeqlToolName}}) + alerts = append(alerts, &github.Alert{State: &dismissedState, Tool: &github.Tool{Name: &codeqlToolName}, Rule: &github.Rule{Tags: []string{"security"}}}) + alerts = append(alerts, &github.Alert{State: &dismissedState, Tool: &github.Tool{Name: &testToolName}, Rule: &github.Rule{Tags: []string{"useless_code"}}}) } response.NextPage = 0 } @@ -72,7 +78,7 @@ func TestGetVulnerabilitiesFromClient(t *testing.T) { codeScanning, err := getVulnerabilitiesFromClient(ctx, &ghCodeqlScanningMock, "ref", &codeqlScanAuditInstance) assert.NoError(t, err) assert.NotEmpty(t, codeScanning) - assert.Equal(t, 1, len(codeScanning)) + assert.Equal(t, 2, len(codeScanning)) assert.Equal(t, 3, codeScanning[0].Total) assert.Equal(t, 1, codeScanning[0].Audited) }) @@ -83,7 +89,7 @@ func TestGetVulnerabilitiesFromClient(t *testing.T) { codeScanning, err := getVulnerabilitiesFromClient(ctx, &ghCodeqlScanningMock, "ref", &codeqlScanAuditInstance) assert.NoError(t, err) assert.NotEmpty(t, codeScanning) - assert.Equal(t, 1, len(codeScanning)) + assert.Equal(t, 2, len(codeScanning)) assert.Equal(t, 140, codeScanning[0].Total) assert.Equal(t, 80, codeScanning[0].Audited) }) diff --git a/pkg/codeql/github_repo_upload.go b/pkg/codeql/github_repo_upload.go new file mode 100644 index 0000000000..c857c2b988 --- /dev/null +++ b/pkg/codeql/github_repo_upload.go @@ -0,0 +1,368 @@ +package codeql + +import ( + "archive/zip" + "errors" + "fmt" + "io" + "os" + "path" + "path/filepath" + "runtime" + "strings" + "time" + + "github.com/SAP/jenkins-library/pkg/command" + "github.com/SAP/jenkins-library/pkg/log" + "github.com/go-git/go-git/v5" + "github.com/go-git/go-git/v5/config" + "github.com/go-git/go-git/v5/plumbing" + "github.com/go-git/go-git/v5/plumbing/object" + "github.com/go-git/go-git/v5/plumbing/transport" + "github.com/go-git/go-git/v5/plumbing/transport/http" + "github.com/go-git/go-git/v5/storage/memory" + "gopkg.in/yaml.v2" +) + +type GitUploader interface { + UploadProjectToGithub() (string, error) +} + +type GitUploaderInstance struct { + *command.Command + + token string + ref string + sourceCommitId string + sourceRepo string + targetRepo string + dbDir string +} + +func NewGitUploaderInstance(token, ref, dbDir, sourceCommitId, sourceRepo, targetRepo string) (*GitUploaderInstance, error) { + dbAbsPath, err := filepath.Abs(dbDir) + if err != nil { + return nil, err + } + instance := &GitUploaderInstance{ + Command: &command.Command{}, + token: token, + ref: ref, + sourceCommitId: sourceCommitId, + sourceRepo: sourceRepo, + targetRepo: targetRepo, + dbDir: filepath.Clean(dbAbsPath), + } + + instance.Stdout(log.Writer()) + instance.Stderr(log.Writer()) + return instance, nil +} + +type gitUtils interface { + listRemote() ([]reference, error) + cloneRepo(dir string, opts *git.CloneOptions) (*git.Repository, error) + switchOrphan(ref string, repo *git.Repository) error + initRepo(dir string) (*git.Repository, error) +} + +type repository interface { + Worktree() (*git.Worktree, error) + CommitObject(commit plumbing.Hash) (*object.Commit, error) + Push(o *git.PushOptions) error +} + +type worktree interface { + RemoveGlob(pattern string) error + Clean(opts *git.CleanOptions) error + AddWithOptions(opts *git.AddOptions) error + Commit(msg string, opts *git.CommitOptions) (plumbing.Hash, error) +} + +type reference interface { + Name() plumbing.ReferenceName +} + +const ( + CommitMessageMirroringCode = "Mirroring code for revision %s from %s" + SrcZip = "src.zip" + CodeqlDatabaseYml = "codeql-database.yml" + OriginRemote = "origin" +) + +func (uploader *GitUploaderInstance) UploadProjectToGithub() (string, error) { + tmpDir, err := os.MkdirTemp("", "tmp") + if err != nil { + return "", err + } + defer os.RemoveAll(tmpDir) + + refExists, repoEmpty, err := doesRefExist(uploader, uploader.ref) + if err != nil { + return "", err + } + + repo, err := clone(uploader, uploader.targetRepo, uploader.token, uploader.ref, tmpDir, repoEmpty, refExists) + if err != nil { + return "", err + } + + tree, err := repo.Worktree() + if err != nil { + return "", err + } + err = cleanDir(tree) + if err != nil { + return "", err + } + + srcLocationPrefix, err := getSourceLocationPrefix(filepath.Join(uploader.dbDir, CodeqlDatabaseYml)) + if err != nil { + return "", err + } + + zipPath := path.Join(uploader.dbDir, SrcZip) + err = unzip(zipPath, tmpDir, strings.Trim(srcLocationPrefix, fmt.Sprintf("%c", os.PathSeparator)), strings.Trim(uploader.dbDir, fmt.Sprintf("%c", os.PathSeparator))) + if err != nil { + return "", err + } + + err = add(tree) + if err != nil { + return "", err + } + + newCommit, err := commit(repo, tree, uploader.sourceCommitId, uploader.sourceRepo) + if err != nil { + return "", err + } + + err = push(repo, uploader.token) + if err != nil { + return "", err + } + + return newCommit.ID().String(), err +} + +func (uploader *GitUploaderInstance) listRemote() ([]reference, error) { + rem := git.NewRemote(memory.NewStorage(), &config.RemoteConfig{ + Name: OriginRemote, + URLs: []string{uploader.targetRepo}, + }) + + list, err := rem.List(&git.ListOptions{ + Auth: &http.BasicAuth{ + Username: "does-not-matter", + Password: uploader.token, + }, + }) + if err != nil { + return nil, err + } + var convertedList []reference + for _, ref := range list { + convertedList = append(convertedList, ref) + } + return convertedList, err +} + +func (uploader *GitUploaderInstance) initRepo(dir string) (*git.Repository, error) { + // git init -b + repo, err := git.PlainInitWithOptions(dir, &git.PlainInitOptions{ + InitOptions: git.InitOptions{ + DefaultBranch: plumbing.ReferenceName(uploader.ref), + }, + }) + if err != nil { + return nil, err + } + + // git remote add origin + _, err = repo.CreateRemote(&config.RemoteConfig{ + Name: OriginRemote, + URLs: []string{uploader.targetRepo}, + }) + if err != nil { + return nil, err + } + return repo, nil +} + +func (uploader *GitUploaderInstance) cloneRepo(dir string, opts *git.CloneOptions) (*git.Repository, error) { + return git.PlainClone(dir, false, opts) +} + +func (uploader *GitUploaderInstance) switchOrphan(ref string, r *git.Repository) error { + branchName := strings.Split(ref, "/")[2:] + newRef := plumbing.NewBranchReferenceName(strings.Join(branchName, "/")) + return r.Storer.SetReference(plumbing.NewSymbolicReference(plumbing.HEAD, newRef)) +} + +func doesRefExist(uploader gitUtils, ref string) (bool, bool, error) { + // git ls-remote + remoteRefs, err := uploader.listRemote() + if err != nil { + if errors.Is(err, transport.ErrEmptyRemoteRepository) { + return false, true, nil + } + return false, false, err + } + for _, r := range remoteRefs { + if string(r.Name()) == ref { + return true, false, nil + } + } + return false, false, nil +} + +func clone(uploader gitUtils, url, token, ref, dir string, repoEmpty, refExists bool) (*git.Repository, error) { + if repoEmpty { + return uploader.initRepo(dir) + } + + opts := &git.CloneOptions{ + URL: url, + Auth: &http.BasicAuth{ + Username: "does-not-matter", + Password: token, + }, + SingleBranch: true, + Depth: 1, + } + if refExists { + opts.ReferenceName = plumbing.ReferenceName(ref) + // git clone -b --single-branch --depth=1 + return uploader.cloneRepo(dir, opts) + } + + // git clone --single-branch --depth=1 + r, err := uploader.cloneRepo(dir, opts) + if err != nil { + return nil, err + } + + // git switch --orphan + err = uploader.switchOrphan(ref, r) + if err != nil { + return nil, err + } + return r, nil +} + +func cleanDir(t worktree) error { + // git rm -r + err := t.RemoveGlob("*") + if err != nil { + return err + } + // git clean -d + err = t.Clean(&git.CleanOptions{Dir: true}) + return err +} + +func add(t worktree) error { + // git add --all + return t.AddWithOptions(&git.AddOptions{ + All: true, + }) +} + +func commit(r repository, t worktree, sourceCommitId, sourceRepo string) (*object.Commit, error) { + // git commit --allow-empty -m + newCommit, err := t.Commit(fmt.Sprintf(CommitMessageMirroringCode, sourceCommitId, sourceRepo), &git.CommitOptions{ + AllowEmptyCommits: true, + Author: &object.Signature{ + When: time.Now(), + }, + }) + if err != nil { + return nil, err + } + return r.CommitObject(newCommit) +} + +func push(r repository, token string) error { + // git push + return r.Push(&git.PushOptions{ + Auth: &http.BasicAuth{ + Username: "does-not-matter", + Password: token, + }, + }) +} + +func unzip(zipPath, targetDir, srcDir, dbDir string) error { + r, err := zip.OpenReader(zipPath) + if err != nil { + return err + } + defer r.Close() + + for _, f := range r.File { + fName := f.Name + + if runtime.GOOS == "windows" { + fNameSplit := strings.Split(fName, "/") + if len(fNameSplit) == 0 { + continue + } + fNameSplit[0] = strings.Replace(fNameSplit[0], "_", ":", 1) + fName = strings.Join(fNameSplit, fmt.Sprintf("%c", os.PathSeparator)) + } + if !strings.Contains(fName, srcDir) || strings.Contains(fName, dbDir) { + continue + } + + rc, err := f.Open() + if err != nil { + return err + } + + fName = strings.TrimPrefix(fName, srcDir) + fpath := filepath.Join(targetDir, fName) + if f.FileInfo().IsDir() { + os.MkdirAll(fpath, os.ModePerm) + rc.Close() + continue + } + err = os.MkdirAll(filepath.Dir(fpath), os.ModePerm) + if err != nil { + rc.Close() + return err + } + + fNew, err := os.OpenFile(fpath, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, f.Mode()) + if err != nil { + rc.Close() + return err + } + + _, err = io.Copy(fNew, rc) + if err != nil { + rc.Close() + fNew.Close() + return err + } + rc.Close() + fNew.Close() + } + return nil +} + +func getSourceLocationPrefix(fileName string) (string, error) { + type codeqlDatabase struct { + SourceLocation string `yaml:"sourceLocationPrefix"` + } + var db codeqlDatabase + file, err := os.ReadFile(fileName) + if err != nil { + return "", err + } + err = yaml.Unmarshal(file, &db) + if err != nil { + return "", err + } + + return db.SourceLocation, nil +} diff --git a/pkg/codeql/github_repo_upload_test.go b/pkg/codeql/github_repo_upload_test.go new file mode 100644 index 0000000000..3911229827 --- /dev/null +++ b/pkg/codeql/github_repo_upload_test.go @@ -0,0 +1,427 @@ +package codeql + +import ( + "archive/zip" + "fmt" + "io" + "os" + "path" + "path/filepath" + "strings" + "testing" + + "github.com/go-git/go-git/v5" + "github.com/go-git/go-git/v5/plumbing" + "github.com/go-git/go-git/v5/plumbing/object" + "github.com/stretchr/testify/assert" + "gopkg.in/yaml.v2" + "k8s.io/utils/strings/slices" +) + +const ( + notExists = "not-exists" + exists = "exists" + refsHeads = "refs/heads/" +) + +type gitMock struct { + ref string + url string +} + +func newGitMock(ref, url string) *gitMock { + return &gitMock{ref: ref, url: url} +} + +func (g *gitMock) listRemote() ([]reference, error) { + if g.url == notExists { + return nil, fmt.Errorf("repository not found") + } + list := []*referenceMock{ + { + name: refsHeads + "ref1", + }, + { + name: refsHeads + "ref2", + }, + { + name: refsHeads + "ref3", + }, + { + name: refsHeads + exists, + }, + } + var convertedList []reference + for _, ref := range list { + convertedList = append(convertedList, ref) + } + return convertedList, nil +} + +func (g *gitMock) cloneRepo(dir string, opts *git.CloneOptions) (*git.Repository, error) { + if opts.Auth == nil { + return nil, fmt.Errorf("error") + } + if opts.URL == notExists { + return nil, fmt.Errorf("error") + } + return &git.Repository{}, nil +} + +func (g *gitMock) switchOrphan(branch string, repo *git.Repository) error { + return nil +} + +func (g *gitMock) initRepo(dir string) (*git.Repository, error) { + return &git.Repository{}, nil +} + +type referenceMock struct { + name string +} + +func (r *referenceMock) Name() plumbing.ReferenceName { + return plumbing.ReferenceName(r.name) +} + +type repoMock struct{} + +func (r *repoMock) Worktree() (*git.Worktree, error) { + return &git.Worktree{}, nil +} + +func (r *repoMock) CommitObject(commit plumbing.Hash) (*object.Commit, error) { + return &object.Commit{Hash: commit}, nil +} + +func (r *repoMock) Push(opts *git.PushOptions) error { + if opts.Auth == nil { + return fmt.Errorf("error") + } + return nil +} + +type worktreeMock struct{} + +func (t *worktreeMock) RemoveGlob(pattern string) error { + return nil +} + +func (t *worktreeMock) Clean(opts *git.CleanOptions) error { + return nil +} + +func (t *worktreeMock) AddWithOptions(opts *git.AddOptions) error { + return nil +} + +func (t *worktreeMock) Commit(msg string, opts *git.CommitOptions) (plumbing.Hash, error) { + if opts.Author == nil { + return plumbing.Hash{}, fmt.Errorf("error") + } + return plumbing.Hash{}, nil +} + +func TestDoesRefExist(t *testing.T) { + t.Parallel() + t.Run("Invalid repository", func(t *testing.T) { + ghUploader := newGitMock(refsHeads+notExists, notExists) + _, _, err := doesRefExist(ghUploader, refsHeads+notExists) + assert.Error(t, err) + + }) + t.Run("Ref exists", func(t *testing.T) { + ghUploader := newGitMock(refsHeads+exists, exists) + ok, _, err := doesRefExist(ghUploader, refsHeads+exists) + assert.NoError(t, err) + assert.True(t, ok) + }) + t.Run("Ref doesn't exist", func(t *testing.T) { + ghUploader := newGitMock(refsHeads+notExists, exists) + ok, _, err := doesRefExist(ghUploader, refsHeads+notExists) + assert.NoError(t, err) + assert.False(t, ok) + }) +} + +func TestClone(t *testing.T) { + t.Parallel() + t.Run("Created new branch", func(t *testing.T) { + ghUploader := newGitMock(refsHeads+notExists, exists) + repo, err := clone(ghUploader, ghUploader.url, "", ghUploader.ref, "", false, false) + assert.NoError(t, err) + assert.NotNil(t, repo) + }) + t.Run("Target branch exists", func(t *testing.T) { + ghUploader := newGitMock(refsHeads+exists, exists) + repo, err := clone(ghUploader, ghUploader.url, "", ghUploader.ref, "", false, true) + assert.NoError(t, err) + assert.NotNil(t, repo) + }) + t.Run("Repo was empty", func(t *testing.T) { + ghUploader := newGitMock(refsHeads+exists, exists) + repo, err := clone(ghUploader, ghUploader.url, "", ghUploader.ref, "", true, false) + assert.NoError(t, err) + assert.NotNil(t, repo) + }) +} + +func TestClean(t *testing.T) { + t.Parallel() + t.Run("Success", func(t *testing.T) { + tree := &worktreeMock{} + err := cleanDir(tree) + assert.NoError(t, err) + }) +} + +func TestAdd(t *testing.T) { + t.Run("Success", func(t *testing.T) { + tree := &worktreeMock{} + err := add(tree) + assert.NoError(t, err) + }) +} + +func TestCommit(t *testing.T) { + t.Run("Success", func(t *testing.T) { + tree := &worktreeMock{} + repo := &repoMock{} + c, err := commit(repo, tree, "", "") + assert.NoError(t, err) + assert.NotNil(t, c) + }) +} + +func TestPush(t *testing.T) { + t.Run("Success", func(t *testing.T) { + repo := &repoMock{} + err := push(repo, "") + assert.NoError(t, err) + }) +} + +func TestUnzip(t *testing.T) { + t.Parallel() + + t.Run("Success", func(t *testing.T) { + targetDir, err := os.MkdirTemp("", "tmp_target") + if err != nil { + panic(err) + } + defer os.RemoveAll(targetDir) + sourceDir, err := os.MkdirTemp("", "tmp_source") + if err != nil { + panic(err) + } + defer os.RemoveAll(sourceDir) + zipPath := filepath.Join(sourceDir, "src.zip") + + srcFilenames := []string{ + filepath.Join(sourceDir, "file1"), + filepath.Join(sourceDir, "file2"), + filepath.Join(sourceDir, "codeqlDB"), + filepath.Join(sourceDir, "subfolder1", "file1"), + filepath.Join(sourceDir, "subfolder1", "file2"), + filepath.Join(sourceDir, "subfolder2", "file1"), + } + err = createZIP(zipPath, srcFilenames) + if err != nil { + panic(err) + } + assert.NoError(t, unzip(zipPath, targetDir, sourceDir, "codeqlDB")) + targetFilenames := []string{ + filepath.Join(targetDir, "file1"), + filepath.Join(targetDir, "file2"), + filepath.Join(targetDir, "subfolder1", "file1"), + filepath.Join(targetDir, "subfolder1", "file2"), + filepath.Join(targetDir, "subfolder2", "file1"), + } + checkExistedFiles(t, targetDir, targetFilenames) + }) + + t.Run("Empty zip", func(t *testing.T) { + targetDir, err := os.MkdirTemp("", "tmp_target") + if err != nil { + panic(err) + } + defer os.RemoveAll(targetDir) + sourceDir, err := os.MkdirTemp("", "tmp_source") + if err != nil { + panic(err) + } + defer os.RemoveAll(sourceDir) + zipPath := filepath.Join(sourceDir, "src.zip") + + filenames := []string{} + err = createZIP(zipPath, filenames) + if err != nil { + panic(err) + } + assert.NoError(t, unzip(zipPath, targetDir, sourceDir, "codeqlDB")) + checkExistedFiles(t, targetDir, filenames) + }) + + t.Run("zip not found", func(t *testing.T) { + targetDir, err := os.MkdirTemp("", "tmp_target") + if err != nil { + panic(err) + } + defer os.RemoveAll(targetDir) + sourceDir, err := os.MkdirTemp("", "tmp_source") + if err != nil { + panic(err) + } + defer os.RemoveAll(sourceDir) + zipPath := filepath.Join(sourceDir, "src.zip") + + assert.Error(t, unzip(zipPath, targetDir, sourceDir, "codeqlDB")) + }) + + t.Run("extra files in zip", func(t *testing.T) { + targetDir, err := os.MkdirTemp("", "tmp_target") + if err != nil { + panic(err) + } + defer os.RemoveAll(targetDir) + sourceDir, err := os.MkdirTemp("", "tmp_source") + if err != nil { + panic(err) + } + defer os.RemoveAll(sourceDir) + zipPath := filepath.Join(sourceDir, "src.zip") + + srcFilenames := []string{ + filepath.Join(sourceDir, "file1"), + filepath.Join(sourceDir, "file2"), + filepath.Join(sourceDir, "subfolder1", "file1"), + filepath.Join(sourceDir, "subfolder1", "file2"), + filepath.Join(sourceDir, "subfolder2", "file1"), + filepath.Join(targetDir, "extrafile1"), + filepath.Join(targetDir, "extrafile2"), + filepath.Join(targetDir, "subfolder1", "extrafile1"), + } + err = createZIP(zipPath, srcFilenames) + if err != nil { + panic(err) + } + assert.NoError(t, unzip(zipPath, targetDir, sourceDir, "codeqlDB")) + targetFilenames := []string{ + filepath.Join(targetDir, "file1"), + filepath.Join(targetDir, "file2"), + filepath.Join(targetDir, "subfolder1", "file1"), + filepath.Join(targetDir, "subfolder1", "file2"), + filepath.Join(targetDir, "subfolder2", "file1"), + } + checkExistedFiles(t, targetDir, targetFilenames) + }) +} + +func TestGetSourceLocationPrefix(t *testing.T) { + t.Parallel() + t.Run("Success", func(t *testing.T) { + filename := "test-file.yml" + location := "/some/location" + err := createFile(filename, location, false) + assert.NoError(t, err) + defer os.Remove(filename) + srcLocationPrefix, err := getSourceLocationPrefix(filename) + assert.NoError(t, err) + assert.Equal(t, location, srcLocationPrefix) + }) + + t.Run("No file found", func(t *testing.T) { + filename := "test-file-2.yml" + _, err := getSourceLocationPrefix(filename) + assert.Error(t, err) + }) + + t.Run("Empty file", func(t *testing.T) { + filename := "test-file-3.yml" + err := createFile(filename, "", true) + assert.NoError(t, err) + defer os.Remove(filename) + srcLocationPrefix, err := getSourceLocationPrefix(filename) + assert.NoError(t, err) + assert.Empty(t, srcLocationPrefix) + }) +} + +func checkExistedFiles(t *testing.T, dir string, filenames []string) { + counter := 0 + err := filepath.Walk(dir, func(path string, info os.FileInfo, err error) error { + if err != nil { + return err + } + if path == dir || info.IsDir() { + return nil + } + assert.True(t, slices.Contains(filenames, path)) + counter++ + return nil + }) + assert.NoError(t, err) + assert.Equal(t, len(filenames), counter) +} + +func createZIP(zipPath string, filenames []string) error { + archive, err := os.Create(zipPath) + if err != nil { + return err + } + defer archive.Close() + + zipWriter := zip.NewWriter(archive) + defer zipWriter.Close() + + for _, filename := range filenames { + writer, err := zipWriter.Create(filename) + if err != nil { + return err + } + + reader := strings.NewReader("test content\n") + if _, err := io.Copy(writer, reader); err != nil { + return err + } + } + return nil +} + +func createFile(fileName, location string, isEmpty bool) error { + err := ensureBaseDir(fileName) + if err != nil { + return err + } + f, err := os.OpenFile(fileName, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, os.ModePerm) + if err != nil { + return err + } + defer f.Close() + + if isEmpty { + return nil + } + + type codeqlDatabase struct { + SourceLocation string `yaml:"sourceLocationPrefix"` + OtherInfo string `yaml:"otherInfo"` + } + db := codeqlDatabase{SourceLocation: location, OtherInfo: "test"} + data, err := yaml.Marshal(db) + if err != nil { + return err + } + + _, err = f.Write(data) + return err +} + +func ensureBaseDir(fpath string) error { + baseDir := path.Dir(fpath) + info, err := os.Stat(baseDir) + if err == nil && info.IsDir() { + return nil + } + return os.MkdirAll(baseDir, 0755) +} diff --git a/pkg/codeql/reporting.go b/pkg/codeql/reporting.go index 380307d1e9..fc095ccffa 100644 --- a/pkg/codeql/reporting.go +++ b/pkg/codeql/reporting.go @@ -2,10 +2,13 @@ package codeql import ( "encoding/json" + "fmt" "path/filepath" + "strings" "github.com/SAP/jenkins-library/pkg/log" "github.com/SAP/jenkins-library/pkg/piperutils" + "github.com/SAP/jenkins-library/pkg/toolrecord" "github.com/pkg/errors" ) @@ -24,6 +27,14 @@ type CodeqlFindings struct { Audited int `json:"audited"` } +type RepoInfo struct { + ServerUrl string + Repo string + CommitId string + Ref string + Owner string +} + func WriteJSONReport(jsonReport CodeqlAudit, modulePath string) ([]piperutils.Path, error) { utils := piperutils.Files{} reportPaths := []piperutils.Path{} @@ -44,3 +55,84 @@ func WriteJSONReport(jsonReport CodeqlAudit, modulePath string) ([]piperutils.Pa return reportPaths, nil } + +func BuildRepoReference(repository, analyzedRef string) (string, error) { + ref := strings.Split(analyzedRef, "/") + if len(ref) < 3 { + return "", errors.New(fmt.Sprintf("Wrong analyzedRef format: %s", analyzedRef)) + } + if strings.Contains(analyzedRef, "pull") { + if len(ref) < 4 { + return "", errors.New(fmt.Sprintf("Wrong analyzedRef format: %s", analyzedRef)) + } + return fmt.Sprintf("%s/pull/%s", repository, ref[2]), nil + } + return fmt.Sprintf("%s/tree/%s", repository, ref[2]), nil +} + +func CreateAndPersistToolRecord(utils piperutils.FileUtils, repoInfo RepoInfo, repoReference, repoUrl, modulePath string) (string, error) { + toolRecord, err := createToolRecordCodeql(utils, repoInfo, repoReference, repoUrl, modulePath) + if err != nil { + return "", err + } + + toolRecordFileName, err := persistToolRecord(toolRecord) + if err != nil { + return "", err + } + + return toolRecordFileName, nil +} + +func createToolRecordCodeql(utils piperutils.FileUtils, repoInfo RepoInfo, repoUrl, repoReference, modulePath string) (*toolrecord.Toolrecord, error) { + record := toolrecord.New(utils, modulePath, "codeql", repoInfo.ServerUrl) + + if repoInfo.ServerUrl == "" { + return record, errors.New("Repository not set") + } + + if repoInfo.CommitId == "" || repoInfo.CommitId == "NA" { + return record, errors.New("CommitId not set") + } + + if repoInfo.Ref == "" { + return record, errors.New("Analyzed Reference not set") + } + + record.DisplayName = fmt.Sprintf("%s %s - %s %s", repoInfo.Owner, repoInfo.Repo, repoInfo.Ref, repoInfo.CommitId) + record.DisplayURL = fmt.Sprintf("%s/security/code-scanning?query=is:open+ref:%s", repoUrl, repoInfo.Ref) + + err := record.AddKeyData("repository", + fmt.Sprintf("%s/%s", repoInfo.Owner, repoInfo.Repo), + fmt.Sprintf("%s %s", repoInfo.Owner, repoInfo.Repo), + repoUrl) + if err != nil { + return record, err + } + + err = record.AddKeyData("repositoryReference", + repoInfo.Ref, + fmt.Sprintf("%s - %s", repoInfo.Repo, repoInfo.Ref), + repoReference) + if err != nil { + return record, err + } + + err = record.AddKeyData("scanResult", + fmt.Sprintf("%s/%s", repoInfo.Ref, repoInfo.CommitId), + fmt.Sprintf("%s %s - %s %s", repoInfo.Owner, repoInfo.Repo, repoInfo.Ref, repoInfo.CommitId), + fmt.Sprintf("%s/security/code-scanning?query=is:open+ref:%s", repoUrl, repoInfo.Ref)) + if err != nil { + return record, err + } + + return record, nil +} + +func persistToolRecord(toolRecord *toolrecord.Toolrecord) (string, error) { + err := toolRecord.Persist() + if err != nil { + return "", err + } + return toolRecord.GetFileName(), nil +} diff --git a/pkg/codeql/reporting_test.go b/pkg/codeql/reporting_test.go new file mode 100644 index 0000000000..2587a559fd --- /dev/null +++ b/pkg/codeql/reporting_test.go @@ -0,0 +1,108 @@ +package codeql + +import ( + "fmt" + "testing" + + "github.com/SAP/jenkins-library/pkg/mock" + "github.com/stretchr/testify/assert" +) + +type codeqlExecuteScanMockUtils struct { + *mock.ExecMockRunner + *mock.FilesMock +} + +func newCodeqlExecuteScanTestsUtils() codeqlExecuteScanMockUtils { + utils := codeqlExecuteScanMockUtils{ + ExecMockRunner: &mock.ExecMockRunner{}, + FilesMock: &mock.FilesMock{}, + } + return utils +} + +func TestBuildRepoReference(t *testing.T) { + t.Run("Valid Ref with branch", func(t *testing.T) { + repository := "https://github.hello.test/Testing/fortify" + analyzedRef := "refs/head/branch" + ref, err := BuildRepoReference(repository, analyzedRef) + assert.NoError(t, err) + assert.Equal(t, "https://github.hello.test/Testing/fortify/tree/branch", ref) + }) + t.Run("Valid Ref with PR", func(t *testing.T) { + repository := "https://github.hello.test/Testing/fortify" + analyzedRef := "refs/pull/1/merge" + ref, err := BuildRepoReference(repository, analyzedRef) + assert.NoError(t, err) + assert.Equal(t, "https://github.hello.test/Testing/fortify/pull/1", ref) + }) + t.Run("Invalid Ref without branch name", func(t *testing.T) { + repository := "https://github.hello.test/Testing/fortify" + analyzedRef := "refs/head" + ref, err := BuildRepoReference(repository, analyzedRef) + assert.Error(t, err) + assert.ErrorContains(t, err, "Wrong analyzedRef format") + assert.Equal(t, "", ref) + }) + t.Run("Invalid Ref without PR id", func(t *testing.T) { + repository := "https://github.hello.test/Testing/fortify" + analyzedRef := "refs/pull/merge" + ref, err := BuildRepoReference(repository, analyzedRef) + assert.Error(t, err) + assert.ErrorContains(t, err, "Wrong analyzedRef format") + assert.Equal(t, "", ref) + }) +} + +func getRepoReferences(repoInfo RepoInfo) (string, string) { + repoUrl := fmt.Sprintf("%s/%s/%s", repoInfo.ServerUrl, repoInfo.Owner, repoInfo.Repo) + repoReference, _ := BuildRepoReference(repoUrl, repoInfo.Ref) + return repoUrl, repoReference +} + +func TestCreateToolRecordCodeql(t *testing.T) { + modulePath := "./" + t.Run("Valid toolrun file", func(t *testing.T) { + repoInfo := RepoInfo{ServerUrl: "https://github.hello.test", CommitId: "test", Ref: "refs/head/branch", Owner: "Testing", Repo: "fortify"} + repoUrl, repoReference := getRepoReferences(repoInfo) + toolRecord, err := createToolRecordCodeql(newCodeqlExecuteScanTestsUtils(), repoInfo, repoUrl, repoReference, modulePath) + assert.NoError(t, err) + assert.Equal(t, toolRecord.ToolName, "codeql") + assert.Equal(t, toolRecord.ToolInstance, "https://github.hello.test") + assert.Equal(t, toolRecord.DisplayName, "Testing fortify - refs/head/branch test") + assert.Equal(t, toolRecord.DisplayURL, "https://github.hello.test/Testing/fortify/security/code-scanning?query=is:open+ref:refs/head/branch") + }) + t.Run("Empty repository URL", func(t *testing.T) { + repoInfo := RepoInfo{ServerUrl: "", CommitId: "test", Ref: "refs/head/branch", Owner: "Testing", Repo: "fortify"} + repoUrl, repoReference := getRepoReferences(repoInfo) + _, err := createToolRecordCodeql(newCodeqlExecuteScanTestsUtils(), repoInfo, repoUrl, repoReference, modulePath) + + assert.Error(t, err) + assert.ErrorContains(t, err, "Repository not set") + }) + + t.Run("Empty analyzedRef", func(t *testing.T) { + repoInfo := RepoInfo{ServerUrl: "https://github.hello.test", CommitId: "test", Ref: "", Owner: "Testing", Repo: "fortify"} + repoUrl, repoReference := getRepoReferences(repoInfo) + _, err := createToolRecordCodeql(newCodeqlExecuteScanTestsUtils(), repoInfo, repoUrl, repoReference, modulePath) + + assert.Error(t, err) + assert.ErrorContains(t, err, "Analyzed Reference not set") + }) + + t.Run("Empty CommitId", func(t *testing.T) { + repoInfo := RepoInfo{ServerUrl: "https://github.hello.test", CommitId: "", Ref: "refs/head/branch", Owner: "Testing", Repo: "fortify"} + repoUrl, repoReference := getRepoReferences(repoInfo) + _, err := createToolRecordCodeql(newCodeqlExecuteScanTestsUtils(), repoInfo, repoUrl, repoReference, modulePath) + + assert.Error(t, err) + assert.ErrorContains(t, err, "CommitId not set") + }) + t.Run("Invalid analyzedRef", func(t *testing.T) { + repoInfo := RepoInfo{ServerUrl: "https://github.hello.test", CommitId: "", Ref: "refs/branch", Owner: "Testing", Repo: "fortify"} + repoUrl, repoReference := getRepoReferences(repoInfo) + _, err := createToolRecordCodeql(newCodeqlExecuteScanTestsUtils(), repoInfo, repoUrl, repoReference, modulePath) + + assert.Error(t, err) + }) +} diff --git a/pkg/command/command.go b/pkg/command/command.go index d745a1a681..521d47ac2c 100644 --- a/pkg/command/command.go +++ b/pkg/command/command.go @@ -42,6 +42,7 @@ type runner interface { type ExecRunner interface { runner RunExecutable(executable string, params ...string) error + RunExecutableWithAttrs(executable string, sysProcAttr *syscall.SysProcAttr, params ...string) error RunExecutableInBackground(executable string, params ...string) (Execution, error) } @@ -127,9 +128,18 @@ func (c *Command) RunShell(shell, script string) error { // // Thus the executable needs to be on the PATH of the current process and it is not sufficient to alter the PATH on cmd.Env. func (c *Command) RunExecutable(executable string, params ...string) error { + return c.RunExecutableWithAttrs(executable, nil, params...) +} + +// RunExecutableWithAttrs runs the specified executable with parameters and as a specified UID and GID +// !! While the cmd.Env is applied during command execution, it is NOT involved when the actual executable is resolved. +// +// Thus the executable needs to be on the PATH of the current process and it is not sufficient to alter the PATH on cmd.Env. +func (c *Command) RunExecutableWithAttrs(executable string, sysProcAttr *syscall.SysProcAttr, params ...string) error { c.prepareOut() cmd := ExecCommand(executable, params...) + cmd.SysProcAttr = sysProcAttr if len(c.dir) > 0 { cmd.Dir = c.dir diff --git a/pkg/config/stepmeta.go b/pkg/config/stepmeta.go index c31ccd4000..659451c338 100644 --- a/pkg/config/stepmeta.go +++ b/pkg/config/stepmeta.go @@ -13,6 +13,8 @@ import ( "github.com/pkg/errors" ) +const SupportedVolumeName = "volume" + // StepData defines the metadata for a step, like step descriptions, parameters, ... type StepData struct { Metadata StepMetadata `json:"metadata"` @@ -105,25 +107,25 @@ type StepOutputs struct { // Container defines an execution container type Container struct { //ToDo: check dockerOptions, dockerVolumeBind, containerPortMappings, sidecarOptions, sidecarVolumeBind - Command []string `json:"command"` - EnvVars []EnvVar `json:"env"` - Image string `json:"image"` - ImagePullPolicy string `json:"imagePullPolicy"` - Name string `json:"name"` - ReadyCommand string `json:"readyCommand"` - Shell string `json:"shell"` - WorkingDir string `json:"workingDir"` - Conditions []Condition `json:"conditions,omitempty"` - Options []Option `json:"options,omitempty"` - //VolumeMounts []VolumeMount `json:"volumeMounts,omitempty"` + Command []string `json:"command"` + EnvVars []EnvVar `json:"env"` + Image string `json:"image"` + ImagePullPolicy string `json:"imagePullPolicy"` + Name string `json:"name"` + ReadyCommand string `json:"readyCommand"` + Shell string `json:"shell"` + WorkingDir string `json:"workingDir"` + Conditions []Condition `json:"conditions,omitempty"` + Options []Option `json:"options,omitempty"` + VolumeMounts []VolumeMount `json:"volumeMounts,omitempty"` } // ToDo: Add the missing Volumes part to enable the volume mount completely // VolumeMount defines a mount path -// type VolumeMount struct { -// MountPath string `json:"mountPath"` -// Name string `json:"name"` -//} +type VolumeMount struct { + Name string `json:"name"` + MountPath string `json:"mountPath"` +} // Option defines an docker option type Option struct { @@ -385,7 +387,7 @@ func (container *Container) commonConfiguration(keyPrefix string, config *map[st } putStringIfNotEmpty(*config, keyPrefix+"Workspace", container.WorkingDir) putSliceIfNotEmpty(*config, keyPrefix+"Options", OptionsAsStringSlice(container.Options)) - //putSliceIfNotEmpty(*config, keyPrefix+"VolumeBind", volumeMountsAsStringSlice(container.VolumeMounts)) + putSliceIfNotEmpty(*config, keyPrefix+"VolumeBind", volumeMountsAsStringSlice(container.VolumeMounts)) } @@ -518,11 +520,14 @@ func ResolveMetadata(gitHubTokens map[string]string, metaDataResolver func() map return metadata, nil } -//ToDo: Enable this when the Volumes part is also implemented -//func volumeMountsAsStringSlice(volumeMounts []VolumeMount) []string { -// e := []string{} -// for _, v := range volumeMounts { -// e = append(e, fmt.Sprintf("%v:%v", v.Name, v.MountPath)) -// } -// return e -//} +func volumeMountsAsStringSlice(volumeMounts []VolumeMount) []string { + e := []string{} + for _, v := range volumeMounts { + if v.Name != SupportedVolumeName { + log.Entry().Warningf("Unsupported volume name: %q, only %q is supported", v.Name, SupportedVolumeName) + continue + } + e = append(e, fmt.Sprintf("%v:%v", v.Name, v.MountPath)) + } + return e +} diff --git a/pkg/config/stepmeta_test.go b/pkg/config/stepmeta_test.go index 5452d95c40..5d4a4449fa 100644 --- a/pkg/config/stepmeta_test.go +++ b/pkg/config/stepmeta_test.go @@ -397,10 +397,10 @@ func TestGetContextDefaults(t *testing.T) { {Name: "opt1", Value: "optValue1"}, {Name: "opt2", Value: "optValue2"}, }, - //VolumeMounts: []VolumeMount{ - // {MountPath: "mp1", Name: "mn1"}, - // {MountPath: "mp2", Name: "mn2"}, - //}, + VolumeMounts: []VolumeMount{ + {MountPath: "mp1", Name: "volume"}, + {MountPath: "mp2", Name: "mn2"}, + }, }, }, Sidecars: []Container{ @@ -419,10 +419,10 @@ func TestGetContextDefaults(t *testing.T) { {Name: "opt3", Value: "optValue3"}, {Name: "opt4", Value: "optValue4"}, }, - //VolumeMounts: []VolumeMount{ - // {MountPath: "mp3", Name: "mn3"}, - // {MountPath: "mp4", Name: "mn4"}, - //}, + VolumeMounts: []VolumeMount{ + {MountPath: "mp3", Name: "mn3"}, + {MountPath: "mp4", Name: "volume"}, + }, }, }, }, @@ -451,7 +451,7 @@ func TestGetContextDefaults(t *testing.T) { assert.Equal(t, true, d.Defaults[0].Steps["testStep"]["dockerPullImage"], "dockerPullImage default not available") assert.Equal(t, "/test/dir", d.Defaults[0].Steps["testStep"]["dockerWorkspace"], "dockerWorkspace default not available") assert.Equal(t, []interface{}{"opt1 optValue1", "opt2 optValue2"}, d.Defaults[0].Steps["testStep"]["dockerOptions"], "dockerOptions default not available") - //assert.Equal(t, []interface{}{"mn1:mp1", "mn2:mp2"}, d.Defaults[0].Steps["testStep"]["dockerVolumeBind"], "dockerVolumeBind default not available") + assert.Equal(t, []interface{}{"volume:mp1"}, d.Defaults[0].Steps["testStep"]["dockerVolumeBind"], "dockerVolumeBind default not available") assert.Equal(t, "/sidecar/command", d.Defaults[0].Steps["testStep"]["sidecarCommand"], "sidecarCommand default not available") assert.Equal(t, map[string]interface{}{"env3": "val3", "env4": "val4"}, d.Defaults[0].Steps["testStep"]["sidecarEnvVars"], "sidecarEnvVars default not available") @@ -461,7 +461,7 @@ func TestGetContextDefaults(t *testing.T) { assert.Equal(t, "/sidecar/command", d.Defaults[0].Steps["testStep"]["sidecarReadyCommand"], "sidecarReadyCommand default not available") assert.Equal(t, "/sidecar/dir", d.Defaults[0].Steps["testStep"]["sidecarWorkspace"], "sidecarWorkspace default not available") assert.Equal(t, []interface{}{"opt3 optValue3", "opt4 optValue4"}, d.Defaults[0].Steps["testStep"]["sidecarOptions"], "sidecarOptions default not available") - //assert.Equal(t, []interface{}{"mn3:mp3", "mn4:mp4"}, d.Defaults[0].Steps["testStep"]["sidecarVolumeBind"], "sidecarVolumeBind default not available") + assert.Equal(t, []interface{}{"volume:mp4"}, d.Defaults[0].Steps["testStep"]["sidecarVolumeBind"], "sidecarVolumeBind default not available") }) t.Run("Container conditions", func(t *testing.T) { diff --git a/pkg/config/vault.go b/pkg/config/vault.go index a3dc924e29..e4e4829843 100644 --- a/pkg/config/vault.go +++ b/pkg/config/vault.go @@ -94,33 +94,33 @@ func (s *StepConfig) mixinVaultConfig(parameters []StepParameters, configs ...ma func getVaultClientFromConfig(config StepConfig, creds VaultCredentials) (vaultClient, error) { address, addressOk := config.Config["vaultServerUrl"].(string) // if vault isn't used it's not an error - if !addressOk || creds.VaultToken == "" && (creds.AppRoleID == "" || creds.AppRoleSecretID == "") { - log.Entry().Debug("Skipping fetching secrets from Vault since it is not configured") + log.Entry().Debug("Vault not configured") return nil, nil } + log.Entry().Info("Logging into Vault") + log.Entry().Debugf(" with URL %s", address) namespace := "" // namespaces are only available in vault enterprise so using them should be optional if config.Config["vaultNamespace"] != nil { namespace = config.Config["vaultNamespace"].(string) - log.Entry().Debugf("Using Vault namespace %s", namespace) + log.Entry().Debugf(" with namespace %s", namespace) } - var client vaultClient var err error clientConfig := &vault.Config{Config: &api.Config{Address: address}, Namespace: namespace} if creds.VaultToken != "" { - log.Entry().Debugf("Using Vault Token Authentication") + log.Entry().Debugf(" with Token authentication") client, err = vault.NewClient(clientConfig, creds.VaultToken) } else { - log.Entry().Debugf("Using Vault AppRole Authentication") + log.Entry().Debugf(" with AppRole authentication") client, err = vault.NewClientWithAppRole(clientConfig, creds.AppRoleID, creds.AppRoleSecretID) } if err != nil { + log.Entry().Info(" failed") return nil, err } - - log.Entry().Infof("Fetching secrets from Vault at %s", address) + log.Entry().Info(" succeeded") return client, nil } @@ -142,6 +142,8 @@ func resolveVaultReference(ref *ResourceReference, config *StepConfig, client va return } + log.Entry().Infof("Resolving '%s'", param.Name) + var secretValue *string for _, vaultPath := range getSecretReferencePaths(ref, config.Config) { // it should be possible to configure the root path were the secret is stored @@ -152,7 +154,7 @@ func resolveVaultReference(ref *ResourceReference, config *StepConfig, client va secretValue = lookupPath(client, vaultPath, ¶m) if secretValue != nil { - log.Entry().Infof("Resolved param '%s' with Vault path '%s'", param.Name, vaultPath) + log.Entry().Infof(" succeeded with Vault path '%s'", vaultPath) if ref.Type == "vaultSecret" { config.Config[param.Name] = *secretValue } else if ref.Type == "vaultSecretFile" { @@ -167,17 +169,17 @@ func resolveVaultReference(ref *ResourceReference, config *StepConfig, client va } } if secretValue == nil { - log.Entry().Warnf("Could not resolve param '%s' from Vault", param.Name) + log.Entry().Warn(" failed") } } func resolveVaultTestCredentialsWrapper(config *StepConfig, client vaultClient) { - log.Entry().Debugln("resolveVaultTestCredentialsWrapper") + log.Entry().Infof("Resolving test credentials wrapper") resolveVaultTestCredentialsWrapperBase(config, client, vaultTestCredentialPath, vaultTestCredentialKeys, resolveVaultTestCredentials) } func resolveVaultCredentialsWrapper(config *StepConfig, client vaultClient) { - log.Entry().Debugln("resolveVaultCredentialsWrapper") + log.Entry().Infof("Resolving credentials wrapper") resolveVaultTestCredentialsWrapperBase(config, client, vaultCredentialPath, vaultCredentialKeys, resolveVaultCredentials) } @@ -194,12 +196,12 @@ func resolveVaultTestCredentialsWrapperBase( vaultCredentialKeysCopy := config.Config[vaultCredKeys] if _, ok := vaultCredentialKeysCopy.([]interface{}); !ok { - log.Entry().Debugf("Not fetching credentials from vault since they are not (properly) configured: unknown type of keys") + log.Entry().Debugf(" failed, unknown type of keys") return } if len(vaultCredentialKeysCopy.([]interface{})) != len(vaultCredentialPathCopy.([]interface{})) { - log.Entry().Debugf("Not fetching credentials from vault since they are not (properly) configured: not same count of values and keys") + log.Entry().Debugf(" failed, not same count of values and keys") return } @@ -212,7 +214,7 @@ func resolveVaultTestCredentialsWrapperBase( config.Config[vaultCredPath] = vaultCredentialPathCopy config.Config[vaultCredKeys] = vaultCredentialKeysCopy default: - log.Entry().Debugf("Not fetching credentials from vault since they are not (properly) configured: unknown type of path") + log.Entry().Debugf(" failed, unknown type of path") return } } @@ -438,7 +440,7 @@ func createTemporarySecretFile(namePattern string, content string) (string, erro } func lookupPath(client vaultClient, path string, param *StepParameters) *string { - log.Entry().Debugf("Trying to resolve Vault parameter '%s' at '%s'", param.Name, path) + log.Entry().Debugf(" with Vault path '%s'", path) secret, err := client.GetKvSecret(path) if err != nil { log.Entry().WithError(err).Warnf("Couldn't fetch secret at '%s'", path) diff --git a/pkg/docker/crane.go b/pkg/docker/crane.go new file mode 100644 index 0000000000..5fcc563a1e --- /dev/null +++ b/pkg/docker/crane.go @@ -0,0 +1,40 @@ +package docker + +import ( + "context" + + "github.com/google/go-containerregistry/pkg/crane" + v1 "github.com/google/go-containerregistry/pkg/v1" +) + +type CraneUtilsBundle struct{} + +func (c *CraneUtilsBundle) CopyImage(ctx context.Context, src, dest, platform string) error { + p, err := parsePlatform(platform) + if err != nil { + return err + } + return crane.Copy(src, dest, crane.WithContext(ctx), crane.WithPlatform(p)) +} + +func (c *CraneUtilsBundle) PushImage(ctx context.Context, im v1.Image, dest, platform string) error { + p, err := parsePlatform(platform) + if err != nil { + return err + } + return crane.Push(im, dest, crane.WithContext(ctx), crane.WithPlatform(p)) +} + +func (c *CraneUtilsBundle) LoadImage(ctx context.Context, src string) (v1.Image, error) { + return crane.Load(src, crane.WithContext(ctx)) +} + +// parsePlatform is a wrapper for v1.ParsePlatform. It is necessary because +// v1.ParsePlatform returns an empty struct when the platform is equal to an empty string, +// whereas we expect 'nil' +func parsePlatform(p string) (*v1.Platform, error) { + if p == "" { + return nil, nil + } + return v1.ParsePlatform(p) +} diff --git a/pkg/docker/docker.go b/pkg/docker/docker.go index dc614119fe..d0619b7dd9 100644 --- a/pkg/docker/docker.go +++ b/pkg/docker/docker.go @@ -11,19 +11,18 @@ import ( "regexp" "strings" - "github.com/SAP/jenkins-library/pkg/log" - "github.com/SAP/jenkins-library/pkg/piperutils" - "github.com/pkg/errors" - "github.com/docker/cli/cli/config" "github.com/docker/cli/cli/config/configfile" - cranecmd "github.com/google/go-containerregistry/cmd/crane/cmd" "github.com/google/go-containerregistry/pkg/authn" "github.com/google/go-containerregistry/pkg/crane" "github.com/google/go-containerregistry/pkg/name" v1 "github.com/google/go-containerregistry/pkg/v1" "github.com/google/go-containerregistry/pkg/v1/remote" + "github.com/pkg/errors" + + "github.com/SAP/jenkins-library/pkg/log" + "github.com/SAP/jenkins-library/pkg/piperutils" ) // AuthEntry defines base64 encoded username:password required inside a Docker config.json @@ -93,9 +92,10 @@ func CreateDockerConfigJSON(registryURL, username, password, targetPath, configP targetPath = configPath } + dockerConfigContent := []byte{} dockerConfig := map[string]interface{}{} - if exists, _ := utils.FileExists(configPath); exists { - dockerConfigContent, err := utils.FileRead(configPath) + if exists, err := utils.FileExists(configPath); exists { + dockerConfigContent, err = utils.FileRead(configPath) if err != nil { return "", fmt.Errorf("failed to read file '%v': %w", configPath, err) } @@ -106,6 +106,13 @@ func CreateDockerConfigJSON(registryURL, username, password, targetPath, configP } } + if registryURL == "" || password == "" || username == "" { + if err := fileWrite(targetPath, dockerConfigContent, utils); err != nil { + return "", err + } + return targetPath, nil + } + credentialsBase64 := base64.StdEncoding.EncodeToString([]byte(fmt.Sprintf("%v:%v", username, password))) dockerAuth := AuthEntry{Auth: credentialsBase64} @@ -125,17 +132,24 @@ func CreateDockerConfigJSON(registryURL, username, password, targetPath, configP return "", fmt.Errorf("failed to marshal Docker config.json: %w", err) } - //always create the target path directories if any before writing - err = utils.MkdirAll(filepath.Dir(targetPath), 0777) + if err := fileWrite(targetPath, jsonResult, utils); err != nil { + return "", err + } + + return targetPath, nil +} + +func fileWrite(path string, content []byte, utils piperutils.FileUtils) error { + err := utils.MkdirAll(filepath.Dir(path), 0777) if err != nil { - return "", fmt.Errorf("failed to create directory path for the Docker config.json file %v:%w", targetPath, err) + return fmt.Errorf("failed to create directory path for the Docker config.json file %v:%w", path, err) } - err = utils.FileWrite(targetPath, jsonResult, 0666) + err = utils.FileWrite(path, content, 0666) if err != nil { - return "", fmt.Errorf("failed to write Docker config.json: %w", err) + return fmt.Errorf("failed to write Docker config.json: %w", err) } - return targetPath, nil + return nil } // Client defines an docker client object @@ -289,7 +303,7 @@ func ImageListWithFilePath(imageName string, excludes []string, trimDir string, for _, dockerfilePath := range matches { // make sure that the path we have is relative // ToDo: needs rework - //dockerfilePath = strings.ReplaceAll(dockerfilePath, cwd, ".") + // dockerfilePath = strings.ReplaceAll(dockerfilePath, cwd, ".") if piperutils.ContainsString(excludes, dockerfilePath) { log.Entry().Infof("Discard %v since it is in the exclude list %v", dockerfilePath, excludes) diff --git a/pkg/docker/mock/crane.go b/pkg/docker/mock/crane.go new file mode 100644 index 0000000000..510e59e9bd --- /dev/null +++ b/pkg/docker/mock/crane.go @@ -0,0 +1,30 @@ +package mock + +import ( + "context" + "errors" + + v1 "github.com/google/go-containerregistry/pkg/v1" +) + +var ( + ErrCopyImage = errors.New("copy image err") + ErrPushImage = errors.New("push image err") + ErrLoadImage = errors.New("load image err") +) + +type CraneMockUtils struct { + ErrCopyImage, ErrPushImage, ErrLoadImage error +} + +func (c *CraneMockUtils) CopyImage(_ context.Context, src, dest, platform string) error { + return c.ErrCopyImage +} + +func (c *CraneMockUtils) PushImage(_ context.Context, im v1.Image, dest, platform string) error { + return c.ErrPushImage +} + +func (c *CraneMockUtils) LoadImage(_ context.Context, src string) (v1.Image, error) { + return nil, c.ErrLoadImage +} diff --git a/pkg/format/sarif.go b/pkg/format/sarif.go index bedae05d47..7eab9f1fa6 100644 --- a/pkg/format/sarif.go +++ b/pkg/format/sarif.go @@ -96,17 +96,20 @@ type PartialFingerprints struct { // SarifProperties adding additional information/context to the finding type SarifProperties struct { // common - RuleGUID string `json:"ruleGUID,omitempty"` - InstanceID string `json:"instanceID,omitempty"` - Audited bool `json:"audited"` - ToolSeverity string `json:"toolSeverity"` - ToolSeverityIndex int `json:"toolSeverityIndex"` - ToolState string `json:"toolState"` - ToolStateIndex int `json:"toolStateIndex"` - ToolAuditMessage string `json:"toolAuditMessage"` - UnifiedAuditState string `json:"unifiedAuditState"` - AuditRequirement string `json:"auditRequirement"` - AuditRequirementIndex int `json:"auditRequirementIndex"` + RuleGUID string `json:"ruleGUID,omitempty"` + InstanceID string `json:"instanceID,omitempty"` + Audited bool `json:"audited"` + ToolSeverity string `json:"toolSeverity"` + ToolSeverityIndex int `json:"toolSeverityIndex"` + ToolState string `json:"toolState"` + ToolStateIndex int `json:"toolStateIndex"` + ToolAuditMessage string `json:"toolAuditMessage"` + UnifiedAuditState string `json:"unifiedAuditState,omitempty"` + UnifiedSeverity string `json:"unifiedSeverity,omitempty"` + UnifiedCriticality float32 `json:"unifiedCriticality,omitempty"` + UnifiedAuditUser string `json:"unifiedAuditUser,omitempty"` + AuditRequirement string `json:"auditRequirement"` + AuditRequirementIndex int `json:"auditRequirementIndex"` // specific InstanceSeverity string `json:"instanceSeverity"` diff --git a/pkg/generator/helper/helper.go b/pkg/generator/helper/helper.go index 755970bb35..fc873b2a52 100644 --- a/pkg/generator/helper/helper.go +++ b/pkg/generator/helper/helper.go @@ -144,7 +144,7 @@ func {{.CobraCmdFuncName}}() *cobra.Command { log.RegisterHook(&sentryHook) } - if len({{if .ExportPrefix}}{{ .ExportPrefix }}.{{end}}GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len({{if .ExportPrefix}}{{ .ExportPrefix }}.{{end}}GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len({{if .ExportPrefix}}{{ .ExportPrefix }}.{{end}}GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: {{if .ExportPrefix}}{{ .ExportPrefix }}.{{end}}GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/pkg/generator/helper/testdata/TestProcessMetaFiles/custom_step_code_generated.golden b/pkg/generator/helper/testdata/TestProcessMetaFiles/custom_step_code_generated.golden index 0bba1c70ed..27c924cfd7 100644 --- a/pkg/generator/helper/testdata/TestProcessMetaFiles/custom_step_code_generated.golden +++ b/pkg/generator/helper/testdata/TestProcessMetaFiles/custom_step_code_generated.golden @@ -181,7 +181,7 @@ func TestStepCommand() *cobra.Command { log.RegisterHook(&sentryHook) } - if len(piperOsCmd.GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(piperOsCmd.GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(piperOsCmd.GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: piperOsCmd.GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/pkg/generator/helper/testdata/TestProcessMetaFiles/step_code_generated.golden b/pkg/generator/helper/testdata/TestProcessMetaFiles/step_code_generated.golden index 85ebce3f0c..41fe1202e4 100644 --- a/pkg/generator/helper/testdata/TestProcessMetaFiles/step_code_generated.golden +++ b/pkg/generator/helper/testdata/TestProcessMetaFiles/step_code_generated.golden @@ -180,7 +180,7 @@ func TestStepCommand() *cobra.Command { log.RegisterHook(&sentryHook) } - if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 { + if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 || len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 { splunkClient = &splunk.Splunk{} logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID} log.RegisterHook(logCollector) diff --git a/pkg/git/git.go b/pkg/git/git.go index f843d1ee36..c5758f281a 100644 --- a/pkg/git/git.go +++ b/pkg/git/git.go @@ -1,13 +1,14 @@ package git import ( + "time" + "github.com/SAP/jenkins-library/pkg/log" "github.com/go-git/go-git/v5" "github.com/go-git/go-git/v5/plumbing" "github.com/go-git/go-git/v5/plumbing/object" "github.com/go-git/go-git/v5/plumbing/transport/http" "github.com/pkg/errors" - "time" ) // utilsWorkTree interface abstraction of git.Worktree to enable tests @@ -53,14 +54,18 @@ func commitSingleFile(filePath, commitMessage, author string, worktree utilsWork } // PushChangesToRepository Pushes all committed changes in the repository to the remote repository -func PushChangesToRepository(username, password string, force *bool, repository *git.Repository) error { - return pushChangesToRepository(username, password, force, repository) +func PushChangesToRepository(username, password string, force *bool, repository *git.Repository, caCerts []byte) error { + return pushChangesToRepository(username, password, force, repository, caCerts) } -func pushChangesToRepository(username, password string, force *bool, repository utilsRepository) error { +func pushChangesToRepository(username, password string, force *bool, repository utilsRepository, caCerts []byte) error { pushOptions := &git.PushOptions{ Auth: &http.BasicAuth{Username: username, Password: password}, } + + if len(caCerts) > 0 { + pushOptions.CABundle = caCerts + } if force != nil { pushOptions.Force = *force } @@ -72,16 +77,21 @@ func pushChangesToRepository(username, password string, force *bool, repository } // PlainClone Clones a non-bare repository to the provided directory -func PlainClone(username, password, serverURL, directory string) (*git.Repository, error) { +func PlainClone(username, password, serverURL, directory string, caCerts []byte) (*git.Repository, error) { abstractedGit := &abstractionGit{} - return plainClone(username, password, serverURL, directory, abstractedGit) + return plainClone(username, password, serverURL, directory, abstractedGit, caCerts) } -func plainClone(username, password, serverURL, directory string, abstractionGit utilsGit) (*git.Repository, error) { +func plainClone(username, password, serverURL, directory string, abstractionGit utilsGit, caCerts []byte) (*git.Repository, error) { gitCloneOptions := git.CloneOptions{ Auth: &http.BasicAuth{Username: username, Password: password}, URL: serverURL, } + + if len(caCerts) > 0 { + gitCloneOptions.CABundle = caCerts + } + repository, err := abstractionGit.plainClone(directory, false, &gitCloneOptions) if err != nil { return nil, errors.Wrap(err, "failed to clone git") diff --git a/pkg/git/git_test.go b/pkg/git/git_test.go index 39e50e3348..f5cf660db4 100644 --- a/pkg/git/git_test.go +++ b/pkg/git/git_test.go @@ -51,13 +51,13 @@ func TestPushChangesToRepository(t *testing.T) { t.Parallel() err := pushChangesToRepository("user", "password", nil, RepositoryMock{ test: t, - }) + }, []byte{}) assert.NoError(t, err) }) t.Run("error pushing", func(t *testing.T) { t.Parallel() - err := pushChangesToRepository("user", "password", nil, RepositoryMockError{}) + err := pushChangesToRepository("user", "password", nil, RepositoryMockError{}, []byte{}) assert.EqualError(t, err, "failed to push commit: error on push commits") }) } @@ -67,7 +67,7 @@ func TestPlainClone(t *testing.T) { t.Run("successful clone", func(t *testing.T) { t.Parallel() abstractedGit := &UtilsGitMock{} - _, err := plainClone("user", "password", "URL", "directory", abstractedGit) + _, err := plainClone("user", "password", "URL", "directory", abstractedGit, []byte{}) assert.NoError(t, err) assert.Equal(t, "directory", abstractedGit.path) assert.False(t, abstractedGit.isBare) @@ -78,7 +78,7 @@ func TestPlainClone(t *testing.T) { t.Run("error on cloning", func(t *testing.T) { t.Parallel() abstractedGit := UtilsGitMockError{} - _, err := plainClone("user", "password", "URL", "directory", abstractedGit) + _, err := plainClone("user", "password", "URL", "directory", abstractedGit, []byte{}) assert.EqualError(t, err, "failed to clone git: error during clone") }) } diff --git a/pkg/golang/golang.go b/pkg/golang/golang.go new file mode 100644 index 0000000000..8ff5d2f4df --- /dev/null +++ b/pkg/golang/golang.go @@ -0,0 +1,43 @@ +package golang + +import ( + "fmt" + "os" + "strings" + + "github.com/SAP/jenkins-library/pkg/command" +) + +type utilsBundle struct { + command.Command +} + +// prepare golang private packages for whitesource and blackduck(detectExecuteScan) +func PrepareGolangPrivatePackages(stepName, privateModules, privateModulesGitToken string) error { + utils := &utilsBundle{ + Command: command.Command{ + StepName: stepName, + }, + } + os.Setenv("GOPRIVATE", privateModules) + err := gitConfigurationForPrivateModules(privateModules, privateModulesGitToken, utils) + if err != nil { + return err + } + return nil +} + +func gitConfigurationForPrivateModules(privateMod string, token string, utils *utilsBundle) error { + privateMod = strings.ReplaceAll(privateMod, "/*", "") + privateMod = strings.ReplaceAll(privateMod, "*.", "") + modules := strings.Split(privateMod, ",") + for _, v := range modules { + authenticatedRepoURL := fmt.Sprintf("https://%s@%s", token, v) + repoBaseURL := fmt.Sprintf("https://%s", v) + err := utils.RunExecutable("git", "config", "--global", fmt.Sprintf("url.%s.insteadOf", authenticatedRepoURL), repoBaseURL) + if err != nil { + return err + } + } + return nil +} diff --git a/pkg/mock/fileUtils.go b/pkg/mock/fileUtils.go index 414ec1edd4..e3d3079c66 100644 --- a/pkg/mock/fileUtils.go +++ b/pkg/mock/fileUtils.go @@ -511,6 +511,10 @@ func (f *FilesMock) Chmod(path string, mode os.FileMode) error { return nil } +func (f *FilesMock) Chown(path string, uid, gid int) error { + return nil +} + func (f *FilesMock) Abs(path string) (string, error) { f.init() return f.toAbsPath(path), nil @@ -671,3 +675,28 @@ func (f *FilesMock) Open(name string) (io.ReadWriteCloser, error) { func (f *FilesMock) Create(name string) (io.ReadWriteCloser, error) { return f.OpenFile(name, os.O_RDWR|os.O_CREATE|os.O_TRUNC, 0o666) } + +type FilesMockRelativeGlob struct { + *FilesMock +} + +// Glob of FilesMockRelativeGlob cuts current directory path part from files if pattern is relative +func (f *FilesMockRelativeGlob) Glob(pattern string) ([]string, error) { + var matches []string + if f.files == nil { + return matches, nil + } + for path := range f.files { + if !filepath.IsAbs(pattern) { + path = strings.TrimLeft(path, f.Separator+f.CurrentDir) + } + path = strings.TrimLeft(path, f.Separator) + matched, _ := doublestar.PathMatch(pattern, path) + if matched { + matches = append(matches, path) + } + } + // The order in f.files is not deterministic, this would result in flaky tests. + sort.Strings(matches) + return matches, nil +} diff --git a/pkg/mock/runner.go b/pkg/mock/runner.go index 4ad53fba0e..5fa6e552ac 100644 --- a/pkg/mock/runner.go +++ b/pkg/mock/runner.go @@ -7,6 +7,7 @@ import ( "io" "regexp" "strings" + "syscall" "github.com/SAP/jenkins-library/pkg/command" ) @@ -25,10 +26,11 @@ type ExecMockRunner struct { } type ExecCall struct { - Execution *Execution - Async bool - Exec string - Params []string + Execution *Execution + SysProcAttrs *syscall.SysProcAttr + Async bool + Exec string + Params []string } type Execution struct { @@ -61,8 +63,11 @@ func (m *ExecMockRunner) AppendEnv(e []string) { } func (m *ExecMockRunner) RunExecutable(e string, p ...string) error { + return m.RunExecutableWithAttrs(e, nil, p...) +} - exec := ExecCall{Exec: e, Params: p} +func (m *ExecMockRunner) RunExecutableWithAttrs(e string, attrs *syscall.SysProcAttr, p ...string) error { + exec := ExecCall{Exec: e, SysProcAttrs: attrs, Params: p} m.Calls = append(m.Calls, exec) c := strings.Join(append([]string{e}, p...), " ") diff --git a/pkg/npm/publish.go b/pkg/npm/publish.go index 76cd4a62b0..628729894f 100644 --- a/pkg/npm/publish.go +++ b/pkg/npm/publish.go @@ -55,8 +55,9 @@ func (exec *Execute) PublishAllPackages(packageJSONFiles []string, registry, use func (exec *Execute) publish(packageJSON, registry, username, password string, packBeforePublish bool) error { execRunner := exec.Utils.GetExecRunner() - scope, err := exec.readPackageScope(packageJSON) + oldWorkingDirectory, err := exec.Utils.Getwd() + scope, err := exec.readPackageScope(packageJSON) if err != nil { return errors.Wrapf(err, "error reading package scope from %s", packageJSON) } @@ -80,6 +81,8 @@ func (exec *Execute) publish(packageJSON, registry, username, password string, p // temporary installation folder used to install BOM to be ignored log.Entry().Debug("adding tmp to npmignore") npmignore.Add("tmp/") + log.Entry().Debug("adding sboms to npmignore") + npmignore.Add("**/bom*.xml") npmrc := NewNPMRC(filepath.Dir(packageJSON)) @@ -130,42 +133,42 @@ func (exec *Execute) publish(packageJSON, registry, username, password string, p } if packBeforePublish { - tmpDirectory, err := exec.Utils.TempDir(".", "temp-") - - if err != nil { - return errors.Wrap(err, "creating temp directory failed") + // change directory in package json file , since npm pack will run only for that packages + if err := exec.Utils.Chdir(filepath.Dir(packageJSON)); err != nil { + return fmt.Errorf("failed to change into directory for executing script: %w", err) } - defer exec.Utils.RemoveAll(tmpDirectory) - - err = execRunner.RunExecutable("npm", "pack", "--pack-destination", tmpDirectory) - if err != nil { + if err := execRunner.RunExecutable("npm", "pack"); err != nil { return err } - _, err = exec.Utils.Copy(npmrc.filepath, filepath.Join(tmpDirectory, ".piperNpmrc")) - if err != nil { - return fmt.Errorf("error copying piperNpmrc file from %v to %v with error: %w", - npmrc.filepath, filepath.Join(tmpDirectory, ".piperNpmrc"), err) - } - - tarballs, err := exec.Utils.Glob(filepath.Join(tmpDirectory, "*.tgz")) - + tarballs, err := exec.Utils.Glob(filepath.Join(".", "*.tgz")) if err != nil { return err } - if len(tarballs) != 1 { + // we do not maintain the tarball file name and hence expect only one tarball that comes + // from the npm pack command + if len(tarballs) < 1 { + return fmt.Errorf("no tarballs found") + } + if len(tarballs) > 1 { return fmt.Errorf("found more tarballs than expected: %v", tarballs) } tarballFilePath, err := exec.Utils.Abs(tarballs[0]) - if err != nil { return err } - projectNpmrc := filepath.Join(filepath.Dir(packageJSON), ".npmrc") + // if a user has a .npmrc file and if it has a scope (e.g @sap to download scoped dependencies) + // if the package to be published also has the same scope (@sap) then npm gets confused + // and tries to publish to the scope that comes from the npmrc file + // and is not the desired publish since we want to publish to the other registry (from .piperNpmrc) + // file and not to the one mentioned in the users npmrc file + // to solve this we rename the users npmrc file before publish, the original npmrc is already + // packaged in the tarball and hence renaming it before publish should not have an effect + projectNpmrc := filepath.Join(".", ".npmrc") projectNpmrcExists, _ := exec.Utils.FileExists(projectNpmrc) if projectNpmrcExists { @@ -176,7 +179,7 @@ func (exec *Execute) publish(packageJSON, registry, username, password string, p } } - err = execRunner.RunExecutable("npm", "publish", "--tarball", tarballFilePath, "--userconfig", filepath.Join(tmpDirectory, ".piperNpmrc"), "--registry", registry) + err = execRunner.RunExecutable("npm", "publish", "--tarball", tarballFilePath, "--userconfig", ".piperNpmrc", "--registry", registry) if err != nil { return errors.Wrap(err, "failed publishing artifact") } @@ -188,6 +191,10 @@ func (exec *Execute) publish(packageJSON, registry, username, password string, p log.Entry().Warnf("unable to rename the .npmrc file : %v", err) } } + + if err := exec.Utils.Chdir(oldWorkingDirectory); err != nil { + return fmt.Errorf("failed to change back into original directory: %w", err) + } } else { err := execRunner.RunExecutable("npm", "publish", "--userconfig", npmrc.filepath, "--registry", registry) if err != nil { @@ -200,7 +207,6 @@ func (exec *Execute) publish(packageJSON, registry, username, password string, p func (exec *Execute) readPackageScope(packageJSON string) (string, error) { b, err := exec.Utils.FileRead(packageJSON) - if err != nil { return "", err } diff --git a/pkg/npm/publish_test.go b/pkg/npm/publish_test.go index df92fa1e78..6ca7fdaaaf 100644 --- a/pkg/npm/publish_test.go +++ b/pkg/npm/publish_test.go @@ -4,15 +4,31 @@ package npm import ( + "github.com/SAP/jenkins-library/pkg/mock" "io" "path/filepath" - "regexp" "testing" "github.com/SAP/jenkins-library/pkg/piperutils" "github.com/stretchr/testify/assert" ) +type npmMockUtilsBundleRelativeGlob struct { + *mock.FilesMockRelativeGlob + execRunner *mock.ExecMockRunner +} + +func (u *npmMockUtilsBundleRelativeGlob) GetExecRunner() ExecRunner { + return u.execRunner +} + +func newNpmMockUtilsBundleRelativeGlob() npmMockUtilsBundleRelativeGlob { + return npmMockUtilsBundleRelativeGlob{ + FilesMockRelativeGlob: &mock.FilesMockRelativeGlob{FilesMock: &mock.FilesMock{}}, + execRunner: &mock.ExecMockRunner{}, + } +} + func TestNpmPublish(t *testing.T) { type wants struct { publishConfigPath string @@ -102,9 +118,9 @@ func TestNpmPublish(t *testing.T) { packBeforePublish: true, wants: wants{ - publishConfigPath: `temp-(?:test|[0-9]+)/\.piperNpmrc`, + publishConfigPath: `\.piperNpmrc`, publishConfig: "_auth=VGhpc0lzVGhlVXNlcjpBbmRIZXJlSXNUaGVQYXNzd29yZA==\nregistry=https://my.private.npm.registry/\n", - tarballPath: "/temp-test/package.tgz", + tarballPath: "/package.tgz", }, }, { @@ -123,9 +139,9 @@ func TestNpmPublish(t *testing.T) { registryPassword: "AndHereIsThePassword", wants: wants{ - publishConfigPath: `temp-(?:test|[0-9]+)/\.piperNpmrc`, + publishConfigPath: `\.piperNpmrc`, publishConfig: "registry=https://my.private.npm.registry/\n//my.private.npm.registry/:_auth=VGhpc0lzVGhlVXNlcjpBbmRIZXJlSXNUaGVQYXNzd29yZA==\nalways-auth=true\n", - tarballPath: "/temp-test/package.tgz", + tarballPath: "/package.tgz", }, }, { @@ -145,9 +161,9 @@ func TestNpmPublish(t *testing.T) { registryPassword: "AndHereIsTheOtherPassword", wants: wants{ - publishConfigPath: `temp-(?:test|[0-9]+)/\.piperNpmrc`, + publishConfigPath: `\.piperNpmrc`, publishConfig: "//my.private.npm.registry/:_auth=VGhpc0lzVGhlVXNlcjpBbmRIZXJlSXNUaGVQYXNzd29yZA==\nregistry=https://my.other.private.npm.registry/\n//my.other.private.npm.registry/:_auth=VGhpc0lzVGhlT3RoZXJVc2VyOkFuZEhlcmVJc1RoZU90aGVyUGFzc3dvcmQ=\nalways-auth=true\n", - tarballPath: "/temp-test/package.tgz", + tarballPath: "/package.tgz", }, }, // scoped project @@ -216,9 +232,9 @@ func TestNpmPublish(t *testing.T) { packBeforePublish: true, wants: wants{ - publishConfigPath: `temp-(?:test|[0-9]+)/\.piperNpmrc`, + publishConfigPath: `\.piperNpmrc`, publishConfig: "_auth=VGhpc0lzVGhlVXNlcjpBbmRIZXJlSXNUaGVQYXNzd29yZA==\n@piper:registry=https://my.private.npm.registry/\n", - tarballPath: "/temp-test/package.tgz", + tarballPath: "/package.tgz", }, }, { @@ -237,9 +253,9 @@ func TestNpmPublish(t *testing.T) { registryPassword: "AndHereIsThePassword", wants: wants{ - publishConfigPath: `temp-(?:test|[0-9]+)/\.piperNpmrc`, + publishConfigPath: `\.piperNpmrc`, publishConfig: "registry=https://my.private.npm.registry/\n@piper:registry=https://my.private.npm.registry/\n//my.private.npm.registry/:_auth=VGhpc0lzVGhlVXNlcjpBbmRIZXJlSXNUaGVQYXNzd29yZA==\nalways-auth=true\n", - tarballPath: "/temp-test/package.tgz", + tarballPath: "/package.tgz", }, }, { @@ -259,9 +275,9 @@ func TestNpmPublish(t *testing.T) { registryPassword: "AndHereIsTheOtherPassword", wants: wants{ - publishConfigPath: `temp-(?:test|[0-9]+)/\.piperNpmrc`, + publishConfigPath: `\.piperNpmrc`, publishConfig: "//my.private.npm.registry/:_auth=VGhpc0lzVGhlVXNlcjpBbmRIZXJlSXNUaGVQYXNzd29yZA==\nregistry=https://my.other.private.npm.registry/\n@piper:registry=https://my.other.private.npm.registry/\n//my.other.private.npm.registry/:_auth=VGhpc0lzVGhlT3RoZXJVc2VyOkFuZEhlcmVJc1RoZU90aGVyUGFzc3dvcmQ=\nalways-auth=true\n", - tarballPath: "/temp-test/package.tgz", + tarballPath: "/package.tgz", }, }, // project in a subfolder @@ -330,9 +346,9 @@ func TestNpmPublish(t *testing.T) { packBeforePublish: true, wants: wants{ - publishConfigPath: `temp-(?:test|[0-9]+)/\.piperNpmrc`, + publishConfigPath: `\.piperNpmrc`, publishConfig: "_auth=VGhpc0lzVGhlVXNlcjpBbmRIZXJlSXNUaGVQYXNzd29yZA==\nregistry=https://my.private.npm.registry/\n", - tarballPath: "/temp-test/package.tgz", + tarballPath: "/sub/package.tgz", }, }, { @@ -351,9 +367,9 @@ func TestNpmPublish(t *testing.T) { registryPassword: "AndHereIsThePassword", wants: wants{ - publishConfigPath: `temp-(?:test|[0-9]+)/\.piperNpmrc`, + publishConfigPath: `\.piperNpmrc`, publishConfig: "registry=https://my.private.npm.registry/\n//my.private.npm.registry/:_auth=VGhpc0lzVGhlVXNlcjpBbmRIZXJlSXNUaGVQYXNzd29yZA==\nalways-auth=true\n", - tarballPath: "/temp-test/package.tgz", + tarballPath: "/sub/package.tgz", }, }, { @@ -373,9 +389,9 @@ func TestNpmPublish(t *testing.T) { registryPassword: "AndHereIsTheOtherPassword", wants: wants{ - publishConfigPath: `temp-(?:test|[0-9]+)/\.piperNpmrc`, + publishConfigPath: `\.piperNpmrc`, publishConfig: "//my.private.npm.registry/:_auth=VGhpc0lzVGhlVXNlcjpBbmRIZXJlSXNUaGVQYXNzd29yZA==\nregistry=https://my.other.private.npm.registry/\n//my.other.private.npm.registry/:_auth=VGhpc0lzVGhlT3RoZXJVc2VyOkFuZEhlcmVJc1RoZU90aGVyUGFzc3dvcmQ=\nalways-auth=true\n", - tarballPath: "/temp-test/package.tgz", + tarballPath: "/sub/package.tgz", }, }, // scoped project in a subfolder @@ -444,9 +460,9 @@ func TestNpmPublish(t *testing.T) { packBeforePublish: true, wants: wants{ - publishConfigPath: `temp-(?:test|[0-9]+)/\.piperNpmrc`, + publishConfigPath: `\.piperNpmrc`, publishConfig: "_auth=VGhpc0lzVGhlVXNlcjpBbmRIZXJlSXNUaGVQYXNzd29yZA==\n@piper:registry=https://my.private.npm.registry/\n", - tarballPath: "/temp-test/package.tgz", + tarballPath: "/sub/package.tgz", }, }, { @@ -465,9 +481,9 @@ func TestNpmPublish(t *testing.T) { registryPassword: "AndHereIsThePassword", wants: wants{ - publishConfigPath: `temp-(?:test|[0-9]+)/\.piperNpmrc`, + publishConfigPath: `\.piperNpmrc`, publishConfig: "registry=https://my.private.npm.registry/\n@piper:registry=https://my.private.npm.registry/\n//my.private.npm.registry/:_auth=VGhpc0lzVGhlVXNlcjpBbmRIZXJlSXNUaGVQYXNzd29yZA==\nalways-auth=true\n", - tarballPath: "/temp-test/package.tgz", + tarballPath: "/sub/package.tgz", }, }, { @@ -487,9 +503,9 @@ func TestNpmPublish(t *testing.T) { registryPassword: "AndHereIsTheOtherPassword", wants: wants{ - publishConfigPath: `temp-(?:test|[0-9]+)/\.piperNpmrc`, + publishConfigPath: `\.piperNpmrc`, publishConfig: "_auth=VGhpc0lzVGhlVXNlcjpBbmRIZXJlSXNUaGVQYXNzd29yZA==\nregistry=https://my.other.private.npm.registry/\n@piper:registry=https://my.other.private.npm.registry/\n//my.other.private.npm.registry/:_auth=VGhpc0lzVGhlT3RoZXJVc2VyOkFuZEhlcmVJc1RoZU90aGVyUGFzc3dvcmQ=\nalways-auth=true\n", - tarballPath: "/temp-test/package.tgz", + tarballPath: "/sub/package.tgz", }, }, // TODO multiple projects @@ -497,17 +513,14 @@ func TestNpmPublish(t *testing.T) { for _, test := range tt { t.Run(test.name, func(t *testing.T) { - utils := newNpmMockUtilsBundle() - + utils := newNpmMockUtilsBundleRelativeGlob() for path, content := range test.files { utils.AddFile(path, []byte(content)) } - - options := ExecutorOptions{} + utils.Separator = string(filepath.Separator) exec := &Execute{ - Utils: &utils, - Options: options, + Utils: &utils, } propertiesLoadFile = utils.FileRead @@ -516,18 +529,8 @@ func TestNpmPublish(t *testing.T) { // This stub simulates the behavior of npm pack and puts a tgz into the requested utils.execRunner.Stub = func(call string, stdoutReturn map[string]string, shouldFailOnCommand map[string]error, stdout io.Writer) error { - r := regexp.MustCompile(`npm\s+pack\s+.*--pack-destination\s+(?P[^\s]+).*`) - - matches := r.FindStringSubmatch(call) - - if len(matches) == 0 { - return nil - } - - packDestination := matches[1] - - utils.AddFile(filepath.Join(packDestination, "package.tgz"), []byte("this is a tgz file")) - + //tgzTargetPath := filepath.Dir(test.packageDescriptors[0]) + utils.AddFile(filepath.Join(".", "package.tgz"), []byte("this is a tgz file")) return nil } @@ -543,16 +546,20 @@ func TestNpmPublish(t *testing.T) { if len(test.wants.tarballPath) > 0 && assert.Contains(t, publishCmd.Params, "--tarball") { tarballPath := publishCmd.Params[piperutils.FindString(publishCmd.Params, "--tarball")+1] - assert.Equal(t, test.wants.tarballPath, tarballPath) + assert.Equal(t, test.wants.tarballPath, filepath.ToSlash(tarballPath)) } if assert.Contains(t, publishCmd.Params, "--userconfig") { effectivePublishConfigPath := publishCmd.Params[piperutils.FindString(publishCmd.Params, "--userconfig")+1] - assert.Regexp(t, test.wants.publishConfigPath, effectivePublishConfigPath) + assert.Regexp(t, test.wants.publishConfigPath, filepath.ToSlash(effectivePublishConfigPath)) - effectiveConfig, err := utils.FileRead(effectivePublishConfigPath) + if test.packBeforePublish { + subPath := filepath.Dir(test.packageDescriptors[0]) + effectivePublishConfigPath = filepath.Join(subPath, effectivePublishConfigPath) + } + effectiveConfig, err := utils.FileRead(effectivePublishConfigPath) if assert.NoError(t, err) { assert.Equal(t, test.wants.publishConfig, string(effectiveConfig)) } diff --git a/pkg/orchestrator/gitHubActions.go b/pkg/orchestrator/gitHubActions.go index 9847fbacce..ee68d92443 100644 --- a/pkg/orchestrator/gitHubActions.go +++ b/pkg/orchestrator/gitHubActions.go @@ -5,6 +5,7 @@ import ( "context" "fmt" "io" + "regexp" "strconv" "strings" "sync" @@ -26,7 +27,6 @@ type GitHubActionsConfigProvider struct { runData run jobs []job jobsFetched bool - currentJob job } type run struct { @@ -100,7 +100,13 @@ func (g *GitHubActionsConfigProvider) GetLog() ([]byte, error) { wg.Go(func() error { _, resp, err := g.client.Actions.GetWorkflowJobLogs(g.ctx, g.owner, g.repo, jobs[i].ID, true) if err != nil { - return errors.Wrap(err, "fetching job logs failed") + // GetWorkflowJobLogs returns "200 OK" as error when log download is successful. + // Therefore, ignore this error. + // GitHub API returns redirect URL instead of plain text logs. See: + // https://docs.github.com/en/enterprise-server@3.9/rest/actions/workflow-jobs?apiVersion=2022-11-28#download-job-logs-for-a-workflow-run + if err.Error() != "unexpected status code: 200 OK" { + return errors.Wrap(err, "fetching job logs failed") + } } defer resp.Body.Close() @@ -174,18 +180,21 @@ func (g *GitHubActionsConfigProvider) GetReference() string { return getEnv("GITHUB_REF", "n/a") } -// GetBuildURL returns the builds URL. For example, https://github.com/SAP/jenkins-library/actions/runs/5815297487 +// GetBuildURL returns the builds URL. The URL should point to the pipeline (not to the stage) +// that is currently being executed. For example, https://github.com/SAP/jenkins-library/actions/runs/5815297487 func (g *GitHubActionsConfigProvider) GetBuildURL() string { return g.GetRepoURL() + "/actions/runs/" + g.GetBuildID() } -// GetJobURL returns the current job HTML URL (not API URL). -// For example, https://github.com/SAP/jenkins-library/actions/runs/123456/jobs/7654321 +// GetJobURL returns the job URL. The URL should point to project’s pipelines. +// For example, https://github.com/SAP/jenkins-library/actions/workflows/workflow-file-name.yaml func (g *GitHubActionsConfigProvider) GetJobURL() string { - // We need to query the GitHub API here because the environment variable GITHUB_JOB returns - // the name of the job, not a numeric ID (which we need to form the URL) - g.guessCurrentJob() - return g.currentJob.HtmlURL + fileName := workflowFileName() + if fileName == "" { + return "" + } + + return g.GetRepoURL() + "/actions/workflows/" + fileName } // GetJobName returns the current workflow name. For example, "Piper workflow" @@ -295,32 +304,6 @@ func convertJobs(jobs []*github.WorkflowJob) []job { return result } -func (g *GitHubActionsConfigProvider) guessCurrentJob() { - // check if the current job has already been guessed - if g.currentJob.ID != 0 { - return - } - - // fetch jobs if they haven't been fetched yet - if err := g.fetchJobs(); err != nil { - log.Entry().Errorf("failed to fetch jobs: %s", err) - g.jobs = []job{} - return - } - - targetJobName := getEnv("GITHUB_JOB", "unknown") - log.Entry().Debugf("looking for job '%s' in jobs list: %v", targetJobName, g.jobs) - for _, j := range g.jobs { - // j.Name may be something like "piper / Init / Init" - // but GITHUB_JOB env may contain only "Init" - if strings.HasSuffix(j.Name, targetJobName) { - log.Entry().Debugf("current job id: %d", j.ID) - g.currentJob = j - return - } - } -} - func (g *GitHubActionsConfigProvider) runIdInt64() (int64, error) { strRunId := g.GetBuildID() runId, err := strconv.ParseInt(strRunId, 10, 64) @@ -341,3 +324,15 @@ func getOwnerAndRepoNames() (string, string) { return s[0], s[1] } + +func workflowFileName() string { + workflowRef := getEnv("GITHUB_WORKFLOW_REF", "") + re := regexp.MustCompile(`\.github/workflows/([a-zA-Z0-9_-]+\.(yml|yaml))`) + matches := re.FindStringSubmatch(workflowRef) + if len(matches) > 1 { + return matches[1] + } + + log.Entry().Debugf("unable to determine workflow file name from GITHUB_WORKFLOW_REF: %s", workflowRef) + return "" +} diff --git a/pkg/orchestrator/gitHubActions_test.go b/pkg/orchestrator/gitHubActions_test.go index c8aa8e632e..6a18c5c9a8 100644 --- a/pkg/orchestrator/gitHubActions_test.go +++ b/pkg/orchestrator/gitHubActions_test.go @@ -104,50 +104,6 @@ func TestGitHubActionsConfigProvider_GetPullRequestConfig(t *testing.T) { } } -func TestGitHubActionsConfigProvider_guessCurrentJob(t *testing.T) { - tests := []struct { - name string - jobs []job - jobsFetched bool - targetJobName string - wantJob job - }{ - { - name: "job found", - jobs: []job{{Name: "Job1"}, {Name: "Job2"}, {Name: "Job3"}}, - jobsFetched: true, - targetJobName: "Job2", - wantJob: job{Name: "Job2"}, - }, - { - name: "job found", - jobs: []job{{Name: "Piper / Job1"}, {Name: "Piper / Job2"}, {Name: "Piper / Job3"}}, - jobsFetched: true, - targetJobName: "Job2", - wantJob: job{Name: "Piper / Job2"}, - }, - { - name: "job not found", - jobs: []job{{Name: "Job1"}, {Name: "Job2"}, {Name: "Job3"}}, - jobsFetched: true, - targetJobName: "Job123", - wantJob: job{}, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - g := &GitHubActionsConfigProvider{ - jobs: tt.jobs, - jobsFetched: tt.jobsFetched, - } - _ = os.Setenv("GITHUB_JOB", tt.targetJobName) - g.guessCurrentJob() - - assert.Equal(t, tt.wantJob, g.currentJob) - }) - } -} - func TestGitHubActionsConfigProvider_fetchRunData(t *testing.T) { // data respJson := map[string]interface{}{ @@ -325,6 +281,7 @@ func TestGitHubActionsConfigProvider_Others(t *testing.T) { _ = os.Setenv("GITHUB_API_URL", "https://api.github.com") _ = os.Setenv("GITHUB_SERVER_URL", "https://github.com") _ = os.Setenv("GITHUB_REPOSITORY", "SAP/jenkins-library") + _ = os.Setenv("GITHUB_WORKFLOW_REF", "SAP/jenkins-library/.github/workflows/piper.yml@refs/heads/main") p := GitHubActionsConfigProvider{} startedAt, _ := time.Parse(time.RFC3339, "2023-08-11T07:28:24Z") @@ -333,7 +290,6 @@ func TestGitHubActionsConfigProvider_Others(t *testing.T) { Status: "", StartedAt: startedAt, } - p.currentJob = job{ID: 111, Name: "job1", HtmlURL: "https://github.com/SAP/jenkins-library/actions/runs/123456/jobs/7654321"} assert.Equal(t, "n/a", p.OrchestratorVersion()) assert.Equal(t, "GitHubActions", p.OrchestratorType()) @@ -344,10 +300,42 @@ func TestGitHubActionsConfigProvider_Others(t *testing.T) { assert.Equal(t, "main", p.GetBranch()) assert.Equal(t, "refs/pull/42/merge", p.GetReference()) assert.Equal(t, "https://github.com/SAP/jenkins-library/actions/runs/11111", p.GetBuildURL()) - assert.Equal(t, "https://github.com/SAP/jenkins-library/actions/runs/123456/jobs/7654321", p.GetJobURL()) + assert.Equal(t, "https://github.com/SAP/jenkins-library/actions/workflows/piper.yml", p.GetJobURL()) assert.Equal(t, "Piper workflow", p.GetJobName()) assert.Equal(t, "ffac537e6cbbf934b08745a378932722df287a53", p.GetCommit()) assert.Equal(t, "https://api.github.com/repos/SAP/jenkins-library/actions", actionsURL()) assert.True(t, p.IsPullRequest()) assert.True(t, isGitHubActions()) } + +func TestWorkflowFileName(t *testing.T) { + defer resetEnv(os.Environ()) + os.Clearenv() + + tests := []struct { + name, workflowRef, want string + }{ + { + name: "valid file name (yaml)", + workflowRef: "owner/repo/.github/workflows/test-workflow.yaml@refs/heads/main", + want: "test-workflow.yaml", + }, + { + name: "valid file name (yml)", + workflowRef: "owner/repo/.github/workflows/test-workflow.yml@refs/heads/main", + want: "test-workflow.yml", + }, + { + name: "invalid file name", + workflowRef: "owner/repo/.github/workflows/test-workflow@refs/heads/main", + want: "", + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + _ = os.Setenv("GITHUB_WORKFLOW_REF", tt.workflowRef) + result := workflowFileName() + assert.Equal(t, tt.want, result) + }) + } +} diff --git a/pkg/piperutils/fileUtils.go b/pkg/piperutils/fileUtils.go index be6c1889e9..4cc5091ba6 100644 --- a/pkg/piperutils/fileUtils.go +++ b/pkg/piperutils/fileUtils.go @@ -31,6 +31,7 @@ type FileUtils interface { FileRemove(path string) error MkdirAll(path string, perm os.FileMode) error Chmod(path string, mode os.FileMode) error + Chown(path string, uid, gid int) error Glob(pattern string) (matches []string, err error) Chdir(path string) error TempDir(string, string) (string, error) @@ -144,6 +145,17 @@ func (f Files) Chmod(path string, mode os.FileMode) error { return os.Chmod(path, mode) } +// Chown is a recursive wrapper for os.Chown(). +func (f Files) Chown(path string, uid, gid int) error { + return filepath.WalkDir(path, func(name string, d fs.DirEntry, err error) error { + if err != nil { + return err + } + + return os.Chown(name, uid, gid) + }) +} + // Unzip will decompress a zip archive, moving all files and folders // within the zip file (parameter 1) to an output directory (parameter 2). // from https://golangcode.com/unzip-files-in-go/ with the following license: diff --git a/pkg/splunk/splunk.go b/pkg/splunk/splunk.go index 919a33c90a..1ce1190516 100644 --- a/pkg/splunk/splunk.go +++ b/pkg/splunk/splunk.go @@ -123,8 +123,8 @@ func (s *Splunk) prepareTelemetry(telemetryData telemetry.Data) MonitoringData { CorrelationID: s.correlationID, CommitHash: readCommonPipelineEnvironment("git/headCommitId"), Branch: readCommonPipelineEnvironment("git/branch"), - GitOwner: readCommonPipelineEnvironment("github/owner"), - GitRepository: readCommonPipelineEnvironment("github/repository"), + GitOwner: readCommonPipelineEnvironment("git/organization"), + GitRepository: readCommonPipelineEnvironment("git/repository"), } monitoringJson, err := json.Marshal(monitoringData) if err != nil { diff --git a/pkg/telemetry/telemetry.go b/pkg/telemetry/telemetry.go index c3f0c34f10..e262010be0 100644 --- a/pkg/telemetry/telemetry.go +++ b/pkg/telemetry/telemetry.go @@ -4,15 +4,14 @@ import ( "crypto/sha1" "encoding/json" "fmt" - "github.com/SAP/jenkins-library/pkg/orchestrator" - "strconv" - "time" - "net/http" "net/url" + "strconv" + "time" piperhttp "github.com/SAP/jenkins-library/pkg/http" "github.com/SAP/jenkins-library/pkg/log" + "github.com/SAP/jenkins-library/pkg/orchestrator" ) // eventType @@ -81,8 +80,8 @@ func (t *Telemetry) Initialize(telemetryDisabled bool, stepName string) { EventType: eventType, StepName: stepName, SiteID: t.SiteID, - PipelineURLHash: t.getPipelineURLHash(), // http://server:port/jenkins/job/foo/ - BuildURLHash: t.getBuildURLHash(), // http://server:port/jenkins/job/foo/15/ + PipelineURLHash: t.getPipelineURLHash(), // URL (hashed value) which points to the project’s pipelines + BuildURLHash: t.getBuildURLHash(), // URL (hashed value) which points to the pipeline that is currently running } t.baseMetaData = baseMetaData } diff --git a/pkg/tms/tmsClient.go b/pkg/tms/tmsClient.go index 43842d07d3..88a5f6394b 100644 --- a/pkg/tms/tmsClient.go +++ b/pkg/tms/tmsClient.go @@ -9,6 +9,7 @@ import ( "net/http" "net/url" "os" + "strconv" "strings" piperHttp "github.com/SAP/jenkins-library/pkg/http" @@ -177,7 +178,9 @@ func (communicationInstance *CommunicationInstance) GetMtaExtDescriptor(nodeId i } -func (communicationInstance *CommunicationInstance) UploadFileToNode(nodeName, fileId, description, namedUser string) (NodeUploadResponseEntity, error) { +func (communicationInstance *CommunicationInstance) UploadFileToNode(fileInfo FileInfo, nodeName, description, namedUser string) (NodeUploadResponseEntity, error) { + fileId := strconv.FormatInt(fileInfo.Id, 10) + if communicationInstance.isVerbose { communicationInstance.logger.Info("Node upload started") communicationInstance.logger.Infof("tmsUrl: %v, nodeName: %v, fileId: %v, description: %v, namedUser: %v", communicationInstance.tmsUrl, nodeName, fileId, description, namedUser) @@ -200,14 +203,16 @@ func (communicationInstance *CommunicationInstance) UploadFileToNode(nodeName, f } json.Unmarshal(data, &nodeUploadResponseEntity) - if communicationInstance.isVerbose { - communicationInstance.logger.Info("Node upload executed successfully") - } + communicationInstance.logger.Info("Node upload executed successfully") + communicationInstance.logger.Infof("nodeName: %v, nodeId: %v, uploadedFile: %v, createdTransportRequestDescription: %v, createdTransportRequestId: %v", nodeUploadResponseEntity.QueueEntries[0].NodeName, nodeUploadResponseEntity.QueueEntries[0].NodeId, fileInfo.Name, nodeUploadResponseEntity.TransportRequestDescription, nodeUploadResponseEntity.TransportRequestId) + return nodeUploadResponseEntity, nil } -func (communicationInstance *CommunicationInstance) ExportFileToNode(nodeName, fileId, description, namedUser string) (NodeUploadResponseEntity, error) { +func (communicationInstance *CommunicationInstance) ExportFileToNode(fileInfo FileInfo, nodeName, description, namedUser string) (NodeUploadResponseEntity, error) { + fileId := strconv.FormatInt(fileInfo.Id, 10) + if communicationInstance.isVerbose { communicationInstance.logger.Info("Node export started") communicationInstance.logger.Infof("tmsUrl: %v, nodeName: %v, fileId: %v, description: %v, namedUser: %v", communicationInstance.tmsUrl, nodeName, fileId, description, namedUser) @@ -230,9 +235,8 @@ func (communicationInstance *CommunicationInstance) ExportFileToNode(nodeName, f } json.Unmarshal(data, &nodeUploadResponseEntity) - if communicationInstance.isVerbose { - communicationInstance.logger.Info("Node export executed successfully") - } + communicationInstance.logger.Info("Node export executed successfully") + communicationInstance.logger.Infof("nodeName: %v, nodeId: %v, uploadedFile: %v, createdTransportRequestDescription: %v, createdTransportRequestId: %v", nodeUploadResponseEntity.QueueEntries[0].NodeName, nodeUploadResponseEntity.QueueEntries[0].NodeId, fileInfo.Name, nodeUploadResponseEntity.TransportRequestDescription, nodeUploadResponseEntity.TransportRequestId) return nodeUploadResponseEntity, nil } diff --git a/pkg/tms/tmsClient_test.go b/pkg/tms/tmsClient_test.go index b80f8f10da..2848930f76 100644 --- a/pkg/tms/tmsClient_test.go +++ b/pkg/tms/tmsClient_test.go @@ -11,6 +11,7 @@ import ( "net/http" "net/url" "os" + "strconv" "strings" "testing" @@ -469,16 +470,16 @@ func TestUploadFileToNode(t *testing.T) { communicationInstance := CommunicationInstance{tmsUrl: "https://tms.dummy.sap.com", httpClient: &uploaderMock, logger: logger, isVerbose: false} - fileId := "111" + fileInfo := FileInfo{Id: 111, Name: "test.mtar"} namedUser := "testUser" - nodeUploadResponseEntity, err := communicationInstance.UploadFileToNode(nodeName, fileId, transportRequestDescription, namedUser) + nodeUploadResponseEntity, err := communicationInstance.UploadFileToNode(fileInfo, nodeName, transportRequestDescription, namedUser) assert.NoError(t, err, "Error occurred, but none expected") assert.Equal(t, "https://tms.dummy.sap.com/v2/nodes/upload", uploaderMock.urlCalled, "Called url incorrect") assert.Equal(t, http.MethodPost, uploaderMock.httpMethod, "Http method incorrect") assert.Equal(t, []string{"application/json"}, uploaderMock.header[http.CanonicalHeaderKey("content-type")], "Content-Type header incorrect") - entryString := fmt.Sprintf(`{"uri":"%v"}`, fileId) + entryString := fmt.Sprintf(`{"uri":"%v"}`, strconv.FormatInt(fileInfo.Id, 10)) assert.Equal(t, fmt.Sprintf(`{"contentType":"MTA","storageType":"FILE","nodeName":"%v","description":"%v","namedUser":"%v","entries":[%v]}`, nodeName, transportRequestDescription, namedUser, entryString), uploaderMock.requestBody, "Request body incorrect") assert.Equal(t, transportRequestId, nodeUploadResponseEntity.TransportRequestId, "TransportRequestId field of node upload response incorrect") @@ -493,11 +494,11 @@ func TestUploadFileToNode(t *testing.T) { uploaderMock := uploaderMock{responseBody: `Bad request provided`, httpStatusCode: http.StatusBadRequest} communicationInstance := CommunicationInstance{tmsUrl: "https://tms.dummy.sap.com", httpClient: &uploaderMock, logger: logger, isVerbose: false} + fileInfo := FileInfo{Id: 111, Name: "test.mtar"} nodeName := "TEST_NODE" - fileId := "111" transportRequestDescription := "This is a test description" namedUser := "testUser" - _, err := communicationInstance.UploadFileToNode(nodeName, fileId, transportRequestDescription, namedUser) + _, err := communicationInstance.UploadFileToNode(fileInfo, nodeName, transportRequestDescription, namedUser) assert.Error(t, err, "Error expected, but none occurred") assert.Equal(t, "https://tms.dummy.sap.com/v2/nodes/upload", uploaderMock.urlCalled, "Called url incorrect") diff --git a/pkg/tms/tmsUtils.go b/pkg/tms/tmsUtils.go index a270d78af3..416d8619bb 100644 --- a/pkg/tms/tmsUtils.go +++ b/pkg/tms/tmsUtils.go @@ -5,7 +5,6 @@ import ( "fmt" "net/url" "sort" - "strconv" "github.com/SAP/jenkins-library/pkg/command" piperHttp "github.com/SAP/jenkins-library/pkg/http" @@ -29,8 +28,13 @@ type uaa struct { } type serviceKey struct { - Uaa uaa `json:"uaa"` - Uri string `json:"uri"` + Uaa uaa `json:"uaa"` + Uri string `json:"uri"` + CALMEndpoints cALMEndpoints `json:"endpoints"` +} + +type cALMEndpoints *struct { + API string `json:"Api"` } type CommunicationInstance struct { @@ -101,20 +105,20 @@ type CommunicationInterface interface { UpdateMtaExtDescriptor(nodeId, idOfMtaExtDescriptor int64, file, mtaVersion, description, namedUser string) (MtaExtDescriptor, error) UploadMtaExtDescriptorToNode(nodeId int64, file, mtaVersion, description, namedUser string) (MtaExtDescriptor, error) UploadFile(file, namedUser string) (FileInfo, error) - UploadFileToNode(nodeName, fileId, description, namedUser string) (NodeUploadResponseEntity, error) - ExportFileToNode(nodeName, fileId, description, namedUser string) (NodeUploadResponseEntity, error) + UploadFileToNode(fileInfo FileInfo, nodeName, description, namedUser string) (NodeUploadResponseEntity, error) + ExportFileToNode(fileInfo FileInfo, nodeName, description, namedUser string) (NodeUploadResponseEntity, error) } type Options struct { - TmsServiceKey string `json:"tmsServiceKey,omitempty"` - CustomDescription string `json:"customDescription,omitempty"` - NamedUser string `json:"namedUser,omitempty"` - NodeName string `json:"nodeName,omitempty"` - MtaPath string `json:"mtaPath,omitempty"` - MtaVersion string `json:"mtaVersion,omitempty"` - NodeExtDescriptorMapping map[string]interface{} `json:"nodeExtDescriptorMapping,omitempty"` - Proxy string `json:"proxy,omitempty"` - StashContent []string `json:"stashContent,omitempty"` + ServiceKey string + CustomDescription string + NamedUser string + NodeName string + MtaPath string + MtaVersion string + NodeExtDescriptorMapping map[string]interface{} + Proxy string + StashContent []string Verbose bool } @@ -124,6 +128,7 @@ type tmsUtilsBundle struct { } const DEFAULT_TR_DESCRIPTION = "Created by Piper" +const CALM_REROUTING_ENDPOINT_TO_CTMS = "/imp-cdm-transport-management-api/v1" func NewTmsUtils() TmsUtils { utils := tmsUtilsBundle{ @@ -141,6 +146,14 @@ func unmarshalServiceKey(serviceKeyJson string) (serviceKey serviceKey, err erro if err != nil { return } + if len(serviceKey.Uri) == 0 { + if serviceKey.CALMEndpoints != nil && len(serviceKey.CALMEndpoints.API) > 0 { + serviceKey.Uri = serviceKey.CALMEndpoints.API + CALM_REROUTING_ENDPOINT_TO_CTMS + } else { + err = fmt.Errorf("neither uri nor endpoints.Api is set in service key json string") + return + } + } return } @@ -238,9 +251,9 @@ func SetupCommunication(config Options) (communicationInstance CommunicationInte } } - serviceKey, err := unmarshalServiceKey(config.TmsServiceKey) + serviceKey, err := unmarshalServiceKey(config.ServiceKey) if err != nil { - log.Entry().WithError(err).Fatal("Failed to unmarshal TMS service key") + log.Entry().WithError(err).Fatal("Failed to unmarshal service key") } log.RegisterSecret(serviceKey.Uaa.ClientSecret) @@ -339,20 +352,21 @@ func UploadDescriptors(config Options, communicationInstance CommunicationInterf return nil } -func UploadFile(config Options, communicationInstance CommunicationInterface, utils TmsUtils) (string, error) { +func UploadFile(config Options, communicationInstance CommunicationInterface, utils TmsUtils) (FileInfo, error) { + var fileInfo FileInfo + mtaPath := config.MtaPath exists, _ := utils.FileExists(mtaPath) if !exists { log.SetErrorCategory(log.ErrorConfiguration) - return "", fmt.Errorf("mta file %s not found", mtaPath) + return fileInfo, fmt.Errorf("mta file %s not found", mtaPath) } fileInfo, errUploadFile := communicationInstance.UploadFile(mtaPath, config.NamedUser) if errUploadFile != nil { log.SetErrorCategory(log.ErrorService) - return "", fmt.Errorf("failed to upload file: %w", errUploadFile) + return fileInfo, fmt.Errorf("failed to upload file: %w", errUploadFile) } - fileId := strconv.FormatInt(fileInfo.Id, 10) - return fileId, nil + return fileInfo, nil } diff --git a/pkg/tms/tmsUtils_test.go b/pkg/tms/tmsUtils_test.go new file mode 100644 index 0000000000..2b166d1f75 --- /dev/null +++ b/pkg/tms/tmsUtils_test.go @@ -0,0 +1,47 @@ +package tms + +import ( + "github.com/stretchr/testify/assert" + "testing" +) + +func Test_unmarshalServiceKey(t *testing.T) { + tests := []struct { + name string + serviceKeyJson string + wantTmsUrl string + errMessage string + }{ + { + name: "standard cTMS service key uri works", + serviceKeyJson: `{"uri": "https://my.tms.endpoint.sap.com"}`, + wantTmsUrl: "https://my.tms.endpoint.sap.com", + }, + { + name: "standard cALM service key uri has expected postfix", + serviceKeyJson: `{"endpoints": {"Api": "https://my.alm.endpoint.sap.com"}}`, + wantTmsUrl: "https://my.alm.endpoint.sap.com/imp-cdm-transport-management-api/v1", + }, + { + name: "no uri or endpoints in service key leads to error", + serviceKeyJson: `{"missing key options": "leads to error"}`, + errMessage: "neither uri nor endpoints.Api is set in service key json string", + }, + { + name: "faulty json leads to error", + serviceKeyJson: `"this is not correct json"`, + errMessage: "json: cannot unmarshal string into Go value of type tms.serviceKey", + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + gotServiceKey, err := unmarshalServiceKey(tt.serviceKeyJson) + if tt.errMessage == "" { + assert.NoError(t, err, "No error was expected") + assert.Equal(t, tt.wantTmsUrl, gotServiceKey.Uri, "Expected tms url does not match the uri in the service key") + } else { + assert.EqualError(t, err, tt.errMessage, "Error message not as expected") + } + }) + } +} diff --git a/pkg/vault/client.go b/pkg/vault/client.go index 77b6b04f1c..76e1018c3d 100644 --- a/pkg/vault/client.go +++ b/pkg/vault/client.go @@ -4,6 +4,7 @@ import ( "context" "encoding/json" "fmt" + "io" "net/http" "path" "strconv" @@ -42,32 +43,10 @@ func NewClient(config *Config, token string) (Client, error) { if err != nil { return Client{}, err } - - client.SetMinRetryWait(time.Second * 3) - client.SetMaxRetryWait(time.Second * 5) - client.SetCheckRetry(func(ctx context.Context, resp *http.Response, err error) (bool, error) { - if resp != nil { - log.Entry().Infoln("Vault retry: ", resp.Status, resp.StatusCode, err) - } else { - log.Entry().Infoln("Vault retry: ", err) - } - - retry, err := api.DefaultRetryPolicy(ctx, resp, err) - if err != nil || retry { - return true, nil - } - if resp != nil && resp.StatusCode >= 400 { - return true, nil - } - return false, nil - }) - if config.Namespace != "" { client.SetNamespace(config.Namespace) } - client.SetToken(token) - log.Entry().Debugf("Login to Vault %s in namespace %s successfull", config.Address, config.Namespace) return Client{client.Logical(), config}, nil } @@ -76,26 +55,51 @@ func NewClientWithAppRole(config *Config, roleID, secretID string) (Client, erro if config == nil { config = &Config{Config: api.DefaultConfig()} } - if config.AppRoleMountPoint == "" { config.AppRoleMountPoint = "auth/approle" } - client, err := api.NewClient(config.Config) if err != nil { return Client{}, err } + client.SetMinRetryWait(time.Second * 5) + client.SetMaxRetryWait(time.Second * 90) + client.SetMaxRetries(3) + client.SetCheckRetry(func(ctx context.Context, resp *http.Response, err error) (bool, error) { + if resp != nil { + log.Entry().Debugln("Vault response: ", resp.Status, resp.StatusCode, err) + } else { + log.Entry().Debugln("Vault response: ", err) + } + + isEOF := false + if err != nil && strings.Contains(err.Error(), "EOF") { + log.Entry().Infoln("isEOF is true") + isEOF = true + } + + if err == io.EOF { + log.Entry().Infoln("err = io.EOF is true") + } + + retry, err := api.DefaultRetryPolicy(ctx, resp, err) + + if err != nil || err == io.EOF || isEOF || retry { + log.Entry().Infoln("Retrying vault request...") + return true, nil + } + return false, nil + }) + if config.Namespace != "" { client.SetNamespace(config.Namespace) } - log.Entry().Debug("Using AppRole login") result, err := client.Logical().Write(path.Join(config.AppRoleMountPoint, "/login"), map[string]interface{}{ "role_id": roleID, "secret_id": secretID, }) - if err != nil { return Client{}, err } diff --git a/pkg/whitesource/reporting.go b/pkg/whitesource/reporting.go index b1c7cc8555..56e5cefa0d 100644 --- a/pkg/whitesource/reporting.go +++ b/pkg/whitesource/reporting.go @@ -40,7 +40,7 @@ func CreateCustomVulnerabilityReport(productName string, scan *Scan, alerts *[]A {Description: "Filtered project names", Details: strings.Join(projectNames, ", ")}, }, Overview: []reporting.OverviewRow{ - {Description: "Total number of vulnerabilities", Details: fmt.Sprint(len((*alerts)))}, + {Description: "Total number of vulnerabilities", Details: fmt.Sprint(len(*alerts))}, {Description: "Total number of high/critical vulnerabilities with CVSS score >= 7.0", Details: fmt.Sprint(severe)}, }, SuccessfulScan: severe == 0, @@ -295,9 +295,13 @@ func getAuditInformation(alert Alert) *format.SarifProperties { } return &format.SarifProperties{ - Audited: isAudited, - ToolAuditMessage: auditMessage, - UnifiedAuditState: unifiedAuditState, + Audited: isAudited, + ToolAuditMessage: auditMessage, + UnifiedAuditState: unifiedAuditState, + AuditRequirement: format.AUDIT_REQUIREMENT_GROUP_1_DESC, + AuditRequirementIndex: format.AUDIT_REQUIREMENT_GROUP_1_INDEX, + UnifiedSeverity: alert.Vulnerability.CVSS3Severity, + UnifiedCriticality: float32(alert.Vulnerability.CVSS3Score), } } diff --git a/pkg/whitesource/reporting_test.go b/pkg/whitesource/reporting_test.go index c859ab8bac..94abeb6d9d 100644 --- a/pkg/whitesource/reporting_test.go +++ b/pkg/whitesource/reporting_test.go @@ -349,9 +349,11 @@ func TestGetAuditInformation(t *testing.T) { Status: "OPEN", }, expected: &format.SarifProperties{ - Audited: false, - ToolAuditMessage: "", - UnifiedAuditState: "new", + Audited: false, + ToolAuditMessage: "", + UnifiedAuditState: "new", + AuditRequirement: format.AUDIT_REQUIREMENT_GROUP_1_DESC, + AuditRequirementIndex: format.AUDIT_REQUIREMENT_GROUP_1_INDEX, }, }, { @@ -359,11 +361,19 @@ func TestGetAuditInformation(t *testing.T) { alert: Alert{ Status: "IGNORE", Comments: "Not relevant alert", + Vulnerability: Vulnerability{ + CVSS3Score: 9.3, + CVSS3Severity: "critical", + }, }, expected: &format.SarifProperties{ - Audited: true, - ToolAuditMessage: "Not relevant alert", - UnifiedAuditState: "notRelevant", + Audited: true, + ToolAuditMessage: "Not relevant alert", + UnifiedAuditState: "notRelevant", + UnifiedSeverity: "critical", + UnifiedCriticality: 9.3, + AuditRequirement: format.AUDIT_REQUIREMENT_GROUP_1_DESC, + AuditRequirementIndex: format.AUDIT_REQUIREMENT_GROUP_1_INDEX, }, }, { @@ -373,13 +383,15 @@ func TestGetAuditInformation(t *testing.T) { Comments: "Some comment", }, expected: &format.SarifProperties{ - Audited: false, - ToolAuditMessage: "", - UnifiedAuditState: "new", + Audited: false, + ToolAuditMessage: "", + UnifiedAuditState: "new", + AuditRequirement: format.AUDIT_REQUIREMENT_GROUP_1_DESC, + AuditRequirementIndex: format.AUDIT_REQUIREMENT_GROUP_1_INDEX, }, }, { - name: "Audited alert", + name: "Not audited alert", alert: Alert{ Assessment: &format.Assessment{ Status: format.NotRelevant, @@ -389,16 +401,18 @@ func TestGetAuditInformation(t *testing.T) { Comments: "New alert", }, expected: &format.SarifProperties{ - Audited: true, - ToolAuditMessage: string(format.FixedByDevTeam), - UnifiedAuditState: "notRelevant", + Audited: true, + ToolAuditMessage: string(format.FixedByDevTeam), + UnifiedAuditState: "notRelevant", + AuditRequirement: format.AUDIT_REQUIREMENT_GROUP_1_DESC, + AuditRequirementIndex: format.AUDIT_REQUIREMENT_GROUP_1_INDEX, }, }, } for _, test := range tt { t.Run(test.name, func(t *testing.T) { - assert.Equal(t, getAuditInformation(test.alert), test.expected) + assert.Equal(t, test.expected, getAuditInformation(test.alert)) }) } } diff --git a/resources/com.sap.piper/pipeline/stageDefaults.yml b/resources/com.sap.piper/pipeline/stageDefaults.yml index 3a887d7972..7e2cdfa53b 100644 --- a/resources/com.sap.piper/pipeline/stageDefaults.yml +++ b/resources/com.sap.piper/pipeline/stageDefaults.yml @@ -112,6 +112,9 @@ spec: - name: tmsUpload conditions: - configKey: nodeName + - name: tmsExport + conditions: + - configKey: nodeName - name: healthExecuteCheck conditions: - configKey: 'testServerUrl' diff --git a/resources/default_pipeline_environment.yml b/resources/default_pipeline_environment.yml index 7e479dcb50..74825676d6 100644 --- a/resources/default_pipeline_environment.yml +++ b/resources/default_pipeline_environment.yml @@ -481,6 +481,7 @@ steps: active: false jmeter: pattern: '**/*.jtl' + filterRegex: '' errorFailedThreshold: 20 errorUnstableThreshold: 10 errorUnstableResponseTimeThreshold: '' diff --git a/resources/metadata/artifactPrepareVersion.yaml b/resources/metadata/artifactPrepareVersion.yaml index 98983be807..44abf0577d 100644 --- a/resources/metadata/artifactPrepareVersion.yaml +++ b/resources/metadata/artifactPrepareVersion.yaml @@ -337,6 +337,21 @@ spec: - cloud - cloud_noTag - library + - name: customTlsCertificateLinks + type: "[]string" + description: List containing download links of custom TLS certificates. This is required to ensure trusted connections to registries with custom certificates. + scope: + - GENERAL + - PARAMETERS + - STAGES + - STEPS + conditions: + - conditionRef: strings-equal + params: + - name: buildTool + value: maven + - name: buildTool + value: gradle outputs: resources: - name: commonPipelineEnvironment diff --git a/resources/metadata/cnbBuild.yaml b/resources/metadata/cnbBuild.yaml index ffd5c44fb8..947ebf5278 100644 --- a/resources/metadata/cnbBuild.yaml +++ b/resources/metadata/cnbBuild.yaml @@ -362,4 +362,7 @@ spec: - filePattern: "**/bom-*.xml" type: sbom containers: - - image: "paketobuildpacks/builder:base" + - image: "paketobuildpacks/builder-jammy-base:latest" + options: + - name: -u + value: "0" diff --git a/resources/metadata/codeqlExecuteScan.yaml b/resources/metadata/codeqlExecuteScan.yaml index a0e55fc7df..c6503dc267 100644 --- a/resources/metadata/codeqlExecuteScan.yaml +++ b/resources/metadata/codeqlExecuteScan.yaml @@ -120,6 +120,20 @@ spec: - STAGES - STEPS default: 30 + - name: targetGithubRepoURL + type: string + descriptoin: "Target github repo url. Only relevant, if project uses a combination of Piper and non-GitHub SCM." + scope: + - PARAMETERS + - STAGES + - STEPS + - name: targetGithubBranchName + type: string + descriptoin: "Target github branch name. Only relevant, if project uses a combination of Piper and non-GitHub SCM." + scope: + - PARAMETERS + - STAGES + - STEPS - name: threads type: string description: "Use this many threads for the codeql operations." @@ -174,6 +188,26 @@ spec: - PARAMETERS - STAGES - STEPS + - name: projectSettingsFile + type: string + description: Path to the mvn settings file that should be used as project settings file. + scope: + - GENERAL + - STEPS + - STAGES + - PARAMETERS + aliases: + - name: maven/projectSettingsFile + - name: globalSettingsFile + type: string + description: Path to the mvn settings file that should be used as global settings file. + scope: + - GENERAL + - STEPS + - STAGES + - PARAMETERS + aliases: + - name: maven/globalSettingsFile containers: - image: "" outputs: diff --git a/resources/metadata/detectExecuteScan.yaml b/resources/metadata/detectExecuteScan.yaml index 35c10a9f90..dc0663c6bd 100644 --- a/resources/metadata/detectExecuteScan.yaml +++ b/resources/metadata/detectExecuteScan.yaml @@ -21,6 +21,9 @@ spec: - name: githubTokenCredentialsId description: Jenkins 'Secret text' credentials ID containing token to authenticate to GitHub. type: jenkins + - name: golangPrivateModulesGitTokenCredentialsId + description: Jenkins 'Username with password' credentials ID containing username/password for http access to your git repos where your go private modules are stored. + type: jenkins params: - name: token aliases: @@ -124,6 +127,7 @@ spec: description: Server URL to the Synopsis Detect (formerly BlackDuck) Server. aliases: - name: detect/serverUrl + - name: detectServerUrl type: string mandatory: true scope: @@ -489,6 +493,32 @@ spec: - PARAMETERS - STAGES - STEPS + - name: privateModules + type: "string" + description: Tells go which modules shall be considered to be private (by setting [GOPRIVATE](https://pkg.go.dev/cmd/go#hdr-Configuration_for_downloading_non_public_code)). + scope: + - GENERAL + - STEPS + - STAGES + - PARAMETERS + alias: + - goprivate + - name: privateModulesGitToken + description: GitHub personal access token as per https://help.github.com/en/github/authenticating-to-github/creating-a-personal-access-token-for-the-command-line. + scope: + - GENERAL + - PARAMETERS + - STAGES + - STEPS + type: string + secret: true + resourceRef: + - name: golangPrivateModulesGitTokenCredentialsId + type: secret + param: password + - type: vaultSecret + name: golangPrivateModulesGitTokenVaultSecret + default: golang outputs: resources: - name: influx diff --git a/resources/metadata/gitopsUpdateDeployment.yaml b/resources/metadata/gitopsUpdateDeployment.yaml index 22bceee9bf..5349e2e74b 100644 --- a/resources/metadata/gitopsUpdateDeployment.yaml +++ b/resources/metadata/gitopsUpdateDeployment.yaml @@ -190,6 +190,13 @@ spec: - kubectl - helm - kustomize + - name: customTlsCertificateLinks + type: "[]string" + description: List containing download links of custom TLS certificates. This is required to ensure trusted connections to registries with custom certificates. + scope: + - PARAMETERS + - STAGES + - STEPS containers: - image: dtzar/helm-kubectl:3.8.0 workingDir: /config diff --git a/resources/metadata/golangBuild.yaml b/resources/metadata/golangBuild.yaml index 58bedb1f8f..6eb6ebe1c1 100644 --- a/resources/metadata/golangBuild.yaml +++ b/resources/metadata/golangBuild.yaml @@ -209,6 +209,7 @@ spec: type: "string" description: Tells go which modules shall be considered to be private (by setting [GOPRIVATE](https://pkg.go.dev/cmd/go#hdr-Configuration_for_downloading_non_public_code)). scope: + - GENERAL - STEPS - STAGES - PARAMETERS diff --git a/resources/metadata/helmExecute.yaml b/resources/metadata/helmExecute.yaml index f91c755a78..ccae717d7f 100644 --- a/resources/metadata/helmExecute.yaml +++ b/resources/metadata/helmExecute.yaml @@ -36,8 +36,11 @@ spec: - name: dockerConfigJsonCredentialsId description: Jenkins 'Secret file' credentials ID containing Docker config.json (with registry credential(s)). type: jenkins + - name: sourceRepositoryCredentialsId + description: Jenkins 'Username Password' credentials ID containing username and password for the Helm Repository authentication (source repo) + type: jenkins - name: targetRepositoryCredentialsId - description: Jenkins 'Username Password' credentials ID containing username and password for the Helm Repository authentication + description: Jenkins 'Username Password' credentials ID containing username and password for the Helm Repository authentication (target repo) type: jenkins resources: - name: deployDescriptor diff --git a/resources/metadata/imagePushToRegistry.yaml b/resources/metadata/imagePushToRegistry.yaml new file mode 100644 index 0000000000..dd944afcf3 --- /dev/null +++ b/resources/metadata/imagePushToRegistry.yaml @@ -0,0 +1,228 @@ +metadata: + name: imagePushToRegistry + description: Allows you to copy a Docker image from a source container registry to a destination container registry. + longDescription: |- + In case you want to pull an existing image from a remote container registry, a source image and source registry needs to be specified.
+ This makes it possible to move an image from one registry to another. + + The imagePushToRegistry is not similar in functionality to containerPushToRegistry (which is currently a groovy based step and only be used in jenkins). + Currently the imagePushToRegistry only supports copying a local image or image from source remote registry to destination registry. + +spec: + inputs: + resources: + - name: source + type: stash + params: + - name: targetImages + type: "map[string]interface{}" + description: | + Defines the names of the images that will be pushed to the target registry. If empty, names of sourceImages will be used. + Please ensure that targetImages and sourceImages correspond to each other: the first image in sourceImages should be mapped to the first image in the targetImages parameter. + + ```yaml + sourceImages: + - image-1 + - image-2 + targetImages: + image-1: target-image-1 + image-2: target-image-2 + ``` + scope: + - PARAMETERS + - STAGES + - STEPS + - name: sourceImages + type: "[]string" + mandatoryIf: + - name: pushLocalDockerImage + value: false + description: | + Defines the names of the images that will be pulled from source registry. This is helpful for moving images from one location to another. + Please ensure that targetImages and sourceImages correspond to each other: the first image in sourceImages should be mapped to the first image in the targetImages parameter. + + ```yaml + sourceImages: + - image-1 + - image-2 + targetImages: + image-1: target-image-1 + image-2: target-image-2 + ``` + scope: + - PARAMETERS + - STAGES + - STEPS + resourceRef: + - name: commonPipelineEnvironment + param: container/imageNames + - name: sourceImageTag + aliases: + - name: artifactVersion + - name: containerImageTag + description: Tag of the sourceImages + type: string + mandatoryIf: + - name: pushLocalDockerImage + value: false + scope: + - GENERAL + - PARAMETERS + - STAGES + - STEPS + resourceRef: + - name: commonPipelineEnvironment + param: artifactVersion + - name: sourceRegistryUrl + description: Defines a registry url from where the image should optionally be pulled from, incl. the protocol like `https://my.registry.com`*" + type: string + mandatoryIf: + - name: pushLocalDockerImage + value: false + scope: + - PARAMETERS + - STAGES + - STEPS + resourceRef: + - name: commonPipelineEnvironment + param: container/registryUrl + - name: sourceRegistryUser + type: string + mandatoryIf: + - name: pushLocalDockerImage + value: false + secret: true + description: Username of the source registry where the image should be pulled from. + scope: + - PARAMETERS + - STAGES + - STEPS + resourceRef: + - name: commonPipelineEnvironment + param: container/repositoryUsername + - type: vaultSecret + name: registryCredentialsVaultSecretName + default: docker-registry + - name: sourceRegistryPassword + type: string + mandatoryIf: + - name: pushLocalDockerImage + value: false + secret: true + description: Password of the source registry where the image should be pulled from. + scope: + - PARAMETERS + - STAGES + - STEPS + resourceRef: + - name: commonPipelineEnvironment + param: container/repositoryPassword + - type: vaultSecret + name: registryCredentialsVaultSecretName + default: docker-registry + - name: targetRegistryUrl + description: Defines a registry url from where the image should optionally be pushed to, incl. the protocol like `https://my.registry.com`*" + type: string + mandatory: true + scope: + - PARAMETERS + - STAGES + - STEPS + - name: targetRegistryUser + type: string + secret: true + mandatory: true + description: Username of the target registry where the image should be pushed to. + scope: + - PARAMETERS + - STAGES + - STEPS + resourceRef: + - type: vaultSecret + name: registryCredentialsVaultSecretName + default: docker-registry + - name: targetRegistryPassword + type: string + secret: true + description: Password of the target registry where the image should be pushed to. + mandatory: true + scope: + - PARAMETERS + - STAGES + - STEPS + resourceRef: + - type: vaultSecret + name: registryCredentialsVaultSecretName + default: docker-registry + - name: targetImageTag + aliases: + - name: artifactVersion + - name: containerImageTag + type: string + mandatoryIf: + - name: tagLatest + value: false + description: Tag of the targetImages + scope: + - GENERAL + - PARAMETERS + - STAGES + - STEPS + resourceRef: + - name: commonPipelineEnvironment + param: artifactVersion + - name: tagLatest + description: "Defines if the image should be tagged as `latest`. The parameter is true if targetImageTag is not specified." + type: bool + scope: + - PARAMETERS + - STAGES + - STEPS + - name: dockerConfigJSON + type: string + secret: true + description: Path to the file `.docker/config.json` - this is typically provided by your CI/CD system. You can find more details about the Docker credentials in the [Docker documentation](https://docs.docker.com/engine/reference/commandline/login/). + scope: + - PARAMETERS + - STAGES + - STEPS + resourceRef: + - type: vaultSecretFile + name: dockerConfigFileVaultSecretName + default: docker-config + - name: pushLocalDockerImage + description: "Defines if the local image should be pushed to registry" + type: bool + scope: + - PARAMETERS + - STAGES + - STEPS + - name: localDockerImagePath + description: "If the `localDockerImagePath` is a directory, it will be read as an OCI image layout. Otherwise, `localDockerImagePath` is assumed to be a docker-style tarball." + type: string + mandatoryIf: + - name: pushLocalDockerImage + value: true + scope: + - PARAMETERS + - STAGES + - STEPS + - name: targetArchitecture + type: string + description: Specifies the targetArchitecture in the form os/arch[/variant][:osversion] (e.g. linux/amd64). All OS and architectures of the specified image will be copied if it is a multi-platform image. To only push a single platform to the target registry use this parameter + scope: + - STEPS + - PARAMETERS + containers: + - image: gcr.io/go-containerregistry/crane:debug + command: + - /busybox/tail -f /dev/null + shell: /busybox/sh + options: + - name: -u + value: "0" + - name: --entrypoint + value: "" + env: + - name: container + value: docker diff --git a/resources/metadata/kanikoExecute.yaml b/resources/metadata/kanikoExecute.yaml index 18a241c42b..f8acefba0b 100644 --- a/resources/metadata/kanikoExecute.yaml +++ b/resources/metadata/kanikoExecute.yaml @@ -158,6 +158,7 @@ spec: Array keys: contextSubPath - Set a context subpath. + dockerfilePath - Dockerfile path (optional). If empty, root will be used. containerImageName - Name of the container which will be built. containerImageTag - Tag of the container which will be built. If empty - root containerImageTag will be used. containerImage - Defines the full name of the Docker image to be created including registry. diff --git a/resources/metadata/mavenBuild.yaml b/resources/metadata/mavenBuild.yaml index 9fa1693757..e2ccfd7c7b 100644 --- a/resources/metadata/mavenBuild.yaml +++ b/resources/metadata/mavenBuild.yaml @@ -231,6 +231,19 @@ spec: resourceRef: - name: commonPipelineEnvironment param: custom/buildSettingsInfo + - name: deployFlags + type: "[]string" + description: maven deploy flags that will be used when publish is detected. + scope: + - STEPS + - STAGES + - PARAMETERS + default: + - -Dmaven.main.skip=true + - -Dmaven.test.skip=true + - -Dmaven.install.skip=true + resources: + - type: stash outputs: resources: - name: commonPipelineEnvironment diff --git a/resources/metadata/mtaBuild.yaml b/resources/metadata/mtaBuild.yaml index 2c8d3de223..b69bb6a8e0 100644 --- a/resources/metadata/mtaBuild.yaml +++ b/resources/metadata/mtaBuild.yaml @@ -227,6 +227,15 @@ spec: resourceRef: - name: commonPipelineEnvironment param: custom/buildSettingsInfo + - name: createBOM + type: bool + description: Creates the bill of materials (BOM) using CycloneDX plugin. + scope: + - GENERAL + - STEPS + - STAGES + - PARAMETERS + default: false outputs: resources: - name: commonPipelineEnvironment diff --git a/resources/metadata/npmExecuteScripts.yaml b/resources/metadata/npmExecuteScripts.yaml index d76490da94..2dd39b941a 100644 --- a/resources/metadata/npmExecuteScripts.yaml +++ b/resources/metadata/npmExecuteScripts.yaml @@ -150,7 +150,15 @@ spec: - name: packBeforePublish type: bool default: false - description: used for executing npm pack first, followed by npm publish. This two step maybe required when you are building a scoped packages and have npm dependencies from the same scope + description: used for executing npm pack first, followed by npm publish. This two step maybe required in two cases. case 1) When building multiple npm packages (multiple package.json) please keep this parameter true and also see `buildDescriptorList` or `buildDescriptorExcludeList` to choose which package(s) to publish. case 2)when you are building a single npm (single `package.json` in your repo) / multiple npm (multiple package.json) scoped package(s) and have npm dependencies from the same scope. + scope: + - STEPS + - STAGES + - PARAMETERS + - name: production + type: bool + default: false + description: used for omitting installation of dev. dependencies if true scope: - STEPS - STAGES diff --git a/resources/metadata/tmsExport.yaml b/resources/metadata/tmsExport.yaml index 4472f37851..83d561bbae 100644 --- a/resources/metadata/tmsExport.yaml +++ b/resources/metadata/tmsExport.yaml @@ -9,12 +9,12 @@ metadata: !!! note "Prerequisites" * You have subscribed to and set up TMS, as described in [Initial Setup](https://help.sap.com/viewer/7f7160ec0d8546c6b3eab72fb5ad6fd8/Cloud/en-US/66fd7283c62f48adb23c56fb48c84a60.html), which includes the configuration of your transport landscape. - * A corresponding service key has been created, as described in [Set Up the Environment to Transport Content Archives directly in an Application](https://help.sap.com/viewer/7f7160ec0d8546c6b3eab72fb5ad6fd8/Cloud/en-US/8d9490792ed14f1bbf8a6ac08a6bca64.html). This service key (JSON) must be stored as a secret text within the Jenkins secure store or provided as value of tmsServiceKey parameter. + * A corresponding service key has been created, as described in [Set Up the Environment to Transport Content Archives directly in an Application](https://help.sap.com/viewer/7f7160ec0d8546c6b3eab72fb5ad6fd8/Cloud/en-US/8d9490792ed14f1bbf8a6ac08a6bca64.html). This service key (JSON) must be stored as a secret text within the Jenkins secure store or provided as value of serviceKey parameter. spec: inputs: secrets: - name: credentialsId - description: Jenkins 'Secret text' credentials ID containing service key for SAP Cloud Transport Management service. + description: Jenkins 'Secret text' credentials ID containing service key for TMS (SAP Cloud Transport Management service) or CALM (SAP Cloud Application Lifecycle Management) service. type: jenkins resources: - name: buildResult @@ -22,7 +22,25 @@ spec: params: - name: tmsServiceKey type: string - description: Service key JSON string to access the SAP Cloud Transport Management service instance APIs. If not specified and if pipeline is running on Jenkins, service key, stored under ID provided with credentialsId parameter, is used. + description: > + DEPRECATION WARNING: This parameter has been deprecated, please use the serviceKey parameter instead, + which supports both service key for TMS (SAP Cloud Transport Management service), + as well as service key for CALM (SAP Cloud Application Lifecycle Management) service. + + Service key JSON string to access the SAP Cloud Transport Management service instance APIs. + scope: + - PARAMETERS + - STEPS + - STAGES + mandatory: false + secret: true + - name: serviceKey + type: string + description: > + Service key JSON string to access TMS (SAP Cloud Transport Management service) instance APIs. + This can be a service key for TMS, + or a service key for CALM (SAP Cloud Application Lifecycle Management) service. + If not specified and if pipeline is running on Jenkins, service key, stored under ID provided with credentialsId parameter, is used. scope: - PARAMETERS - STEPS @@ -32,7 +50,7 @@ spec: resourceRef: - name: credentialsId type: secret - param: tmsServiceKey + param: serviceKey - name: customDescription type: string description: Can be used as the description of a transport request. Will overwrite the default, which is corresponding Git commit ID. diff --git a/resources/metadata/tmsUpload.yaml b/resources/metadata/tmsUpload.yaml index afe53f9717..7d61e4c31b 100644 --- a/resources/metadata/tmsUpload.yaml +++ b/resources/metadata/tmsUpload.yaml @@ -9,12 +9,12 @@ metadata: !!! note "Prerequisites" * You have subscribed to and set up TMS, as described in [Initial Setup](https://help.sap.com/viewer/7f7160ec0d8546c6b3eab72fb5ad6fd8/Cloud/en-US/66fd7283c62f48adb23c56fb48c84a60.html), which includes the configuration of a node to be used for uploading an MTA file. - * A corresponding service key has been created, as described in [Set Up the Environment to Transport Content Archives directly in an Application](https://help.sap.com/viewer/7f7160ec0d8546c6b3eab72fb5ad6fd8/Cloud/en-US/8d9490792ed14f1bbf8a6ac08a6bca64.html). This service key (JSON) must be stored as a secret text within the Jenkins secure store or provided as value of tmsServiceKey parameter. + * A corresponding service key has been created, as described in [Set Up the Environment to Transport Content Archives directly in an Application](https://help.sap.com/viewer/7f7160ec0d8546c6b3eab72fb5ad6fd8/Cloud/en-US/8d9490792ed14f1bbf8a6ac08a6bca64.html). This service key (JSON) must be stored as a secret text within the Jenkins secure store or provided as value of serviceKey parameter. spec: inputs: secrets: - name: credentialsId - description: Jenkins 'Secret text' credentials ID containing service key for SAP Cloud Transport Management service. + description: Jenkins 'Secret text' credentials ID containing service key for TMS (SAP Cloud Transport Management service) or CALM (SAP Cloud Application Lifecycle Management) service. type: jenkins resources: - name: buildResult @@ -22,7 +22,25 @@ spec: params: - name: tmsServiceKey type: string - description: Service key JSON string to access the SAP Cloud Transport Management service instance APIs. If not specified and if pipeline is running on Jenkins, service key, stored under ID provided with credentialsId parameter, is used. + description: > + DEPRECATION WARNING: This parameter has been deprecated, please use the serviceKey parameter instead, + which supports both service key for TMS (SAP Cloud Transport Management service), + as well as service key for CALM (SAP Cloud Application Lifecycle Management) service. + + Service key JSON string to access the SAP Cloud Transport Management service instance APIs. + scope: + - PARAMETERS + - STEPS + - STAGES + mandatory: false + secret: true + - name: serviceKey + type: string + description: > + Service key JSON string to access TMS (SAP Cloud Transport Management service) instance APIs. + This can be a service key for TMS, + or a service key for CALM (SAP Cloud Application Lifecycle Management) service. + If not specified and if pipeline is running on Jenkins, service key, stored under ID provided with credentialsId parameter, is used. scope: - PARAMETERS - STEPS @@ -32,7 +50,7 @@ spec: resourceRef: - name: credentialsId type: secret - param: tmsServiceKey + param: serviceKey - name: customDescription type: string description: Can be used as the description of a transport request. Will overwrite the default, which is corresponding Git commit ID. diff --git a/resources/metadata/whitesourceExecuteScan.yaml b/resources/metadata/whitesourceExecuteScan.yaml index 1b450e3ffa..2be99d7ecb 100644 --- a/resources/metadata/whitesourceExecuteScan.yaml +++ b/resources/metadata/whitesourceExecuteScan.yaml @@ -38,6 +38,9 @@ spec: - name: githubTokenCredentialsId description: Jenkins 'Secret text' credentials ID containing token to authenticate to GitHub. type: jenkins + - name: golangPrivateModulesGitTokenCredentialsId + description: Jenkins 'Username with password' credentials ID containing username/password for http access to your git repos where your go private modules are stored. + type: jenkins params: - name: agentDownloadUrl type: string @@ -597,6 +600,32 @@ spec: - PARAMETERS - STAGES - STEPS + - name: privateModules + type: "string" + description: Tells go which modules shall be considered to be private (by setting [GOPRIVATE](https://pkg.go.dev/cmd/go#hdr-Configuration_for_downloading_non_public_code)). + scope: + - GENERAL + - STEPS + - STAGES + - PARAMETERS + alias: + - goprivate + - name: privateModulesGitToken + description: GitHub personal access token as per https://help.github.com/en/github/authenticating-to-github/creating-a-personal-access-token-for-the-command-line. + scope: + - GENERAL + - PARAMETERS + - STAGES + - STEPS + type: string + secret: true + resourceRef: + - name: golangPrivateModulesGitTokenCredentialsId + type: secret + param: password + - type: vaultSecret + name: golangPrivateModulesGitTokenVaultSecret + default: golang resources: - name: buildDescriptor type: stash diff --git a/test/groovy/CheckmarxExecuteScanTest.groovy b/test/groovy/CheckmarxExecuteScanTest.groovy index 3289c451ea..a8de577400 100644 --- a/test/groovy/CheckmarxExecuteScanTest.groovy +++ b/test/groovy/CheckmarxExecuteScanTest.groovy @@ -75,21 +75,4 @@ class CheckmarxExecuteScanTest extends BasePiperTest { assertThat(withEnvArgs[0], allOf(startsWith('PIPER_parametersJSON'), containsString('"testParam":"This is test content"'))) assertThat(shellCallRule.shell[2], is('./piper checkmarxExecuteScan')) } - - @Test - void testCheckmarxExecuteScanNoReports() { - helper.registerAllowedMethod('fileExists', [Map], { - return false - }) - - exception.expect(AbortException) - exception.expectMessage("Expected to find checkmarxExecuteScan_reports.json in workspace but it is not there") - - stepRule.step.checkmarxExecuteScan( - juStabUtils: utils, - jenkinsUtilsStub: jenkinsUtils, - testParam: "This is test content", - script: nullScript - ) - } } diff --git a/test/groovy/CommonStepsTest.groovy b/test/groovy/CommonStepsTest.groovy index 470f59fbbf..46519c0482 100644 --- a/test/groovy/CommonStepsTest.groovy +++ b/test/groovy/CommonStepsTest.groovy @@ -229,6 +229,7 @@ public class CommonStepsTest extends BasePiperTest{ 'apiProviderList', //implementing new golang pattern without fields 'tmsUpload', 'tmsExport', + 'imagePushToRegistry', ] @Test diff --git a/test/groovy/TmsUploadTest.groovy b/test/groovy/TmsUploadTest.groovy index ecc2f30dad..54cec2e8c7 100644 --- a/test/groovy/TmsUploadTest.groovy +++ b/test/groovy/TmsUploadTest.groovy @@ -115,7 +115,7 @@ public class TmsUploadTest extends BasePiperTest { // contains assertion does not work apparently when comparing a list of lists against an expected list boolean found = false credInfo.each { entry -> - if (entry == [type: 'token', id: 'credentialsId', env: ['PIPER_tmsServiceKey']]) { + if (entry == [type: 'token', id: 'credentialsId', env: ['PIPER_serviceKey']]) { found = true } } diff --git a/vars/checkmarxExecuteScan.groovy b/vars/checkmarxExecuteScan.groovy index dab67463f3..3da0544c35 100644 --- a/vars/checkmarxExecuteScan.groovy +++ b/vars/checkmarxExecuteScan.groovy @@ -7,5 +7,5 @@ import groovy.transform.Field void call(Map parameters = [:]) { List credentials = [[type: 'usernamePassword', id: 'checkmarxCredentialsId', env: ['PIPER_username', 'PIPER_password']], [type: 'token', id: 'githubTokenCredentialsId', env: ['PIPER_githubToken']]] - piperExecuteBin(parameters, STEP_NAME, METADATA_FILE, credentials, true) + piperExecuteBin(parameters, STEP_NAME, METADATA_FILE, credentials) } diff --git a/vars/checkmarxOneExecuteScan.groovy b/vars/checkmarxOneExecuteScan.groovy index 57f318926c..88a6911e19 100644 --- a/vars/checkmarxOneExecuteScan.groovy +++ b/vars/checkmarxOneExecuteScan.groovy @@ -8,5 +8,5 @@ import groovy.transform.Field void call(Map parameters = [:]) { List credentials = [[type: 'usernamePassword', id: 'checkmarxOneCredentialsId', env: ['PIPER_clientId', 'PIPER_clientSecret']], [type: 'token', id: 'checkmarxOneAPIKey', env: ['PIPER_APIKey']]] - piperExecuteBin(parameters, STEP_NAME, METADATA_FILE, credentials, true) + piperExecuteBin(parameters, STEP_NAME, METADATA_FILE, credentials) } diff --git a/vars/detectExecuteScan.groovy b/vars/detectExecuteScan.groovy index 6587c18ba7..3bb58fb3d4 100644 --- a/vars/detectExecuteScan.groovy +++ b/vars/detectExecuteScan.groovy @@ -12,7 +12,8 @@ void call(Map parameters = [:]) { parameters = DownloadCacheUtils.injectDownloadCacheInParameters(script, parameters, BuildTool.MAVEN) List credentials = [ [type: 'token', id: 'detectTokenCredentialsId', env: ['PIPER_token']], - [type: 'token', id: 'githubTokenCredentialsId', env: ['PIPER_githubToken']] + [type: 'token', id: 'githubTokenCredentialsId', env: ['PIPER_githubToken']], + [type: 'usernamePassword', id: 'golangPrivateModulesGitTokenCredentialsId', env: ['PIPER_privateModulesGitUsername', 'PIPER_privateModulesGitToken']] ] piperExecuteBin(parameters, STEP_NAME, METADATA_FILE, credentials) } diff --git a/vars/dockerExecute.groovy b/vars/dockerExecute.groovy index 1d7a2e9407..4dc56499a6 100644 --- a/vars/dockerExecute.groovy +++ b/vars/dockerExecute.groovy @@ -183,10 +183,11 @@ void call(Map parameters = [:], body) { } def securityContext = securityContextFromOptions(config.dockerOptions) + def containerMountPath = containerMountPathFromVolumeBind(config.dockerVolumeBind) if (env.POD_NAME && isContainerDefined(config)) { container(getContainerDefined(config)) { withEnv(dockerEnvVars) { - echo "[INFO][${STEP_NAME}] Executing inside a Kubernetes Container." + echo "[INFO][${STEP_NAME}] Executing inside a Kubernetes Container. Docker image: ${config.dockerImage}" body() sh "chown -R 1000:1000 ." } @@ -208,6 +209,7 @@ void call(Map parameters = [:], body) { stashContent: config.stashContent, stashNoDefaultExcludes: config.stashNoDefaultExcludes, securityContext: securityContext, + containerMountPath: containerMountPath, ] if (config.sidecarImage) { @@ -222,7 +224,7 @@ void call(Map parameters = [:], body) { } dockerExecuteOnKubernetes(dockerExecuteOnKubernetesParams) { - echo "[INFO][${STEP_NAME}] Executing inside a Kubernetes Pod" + echo "[INFO][${STEP_NAME}] Executing inside a Kubernetes Pod. Docker image: ${config.dockerImage}" body() } } @@ -379,6 +381,17 @@ def securityContextFromOptions(dockerOptions) { return securityContext } +/* + * Picks the first volumeBind option and translates it into containerMountPath, currently only one fix volume is supported + */ +@NonCPS +def containerMountPathFromVolumeBind(dockerVolumeBind) { + if (dockerVolumeBind) { + return dockerVolumeBind[0].split(":")[1] + } + return "" +} + boolean isContainerDefined(config) { Map containerMap = ContainerMap.instance.getMap() diff --git a/vars/dockerExecuteOnKubernetes.groovy b/vars/dockerExecuteOnKubernetes.groovy index dcd4de1b76..f80eaea3f5 100644 --- a/vars/dockerExecuteOnKubernetes.groovy +++ b/vars/dockerExecuteOnKubernetes.groovy @@ -582,9 +582,12 @@ private List getContainerList(config) { command : [] ] def resources = getResources(sideCarContainerName, config) - if(resources) { + if (resources) { containerSpec.resources = resources } + if (config.containerMountPath) { + containerSpec.volumeMounts = [[name: "volume", mountPath: config.containerMountPath]] + } result.push(containerSpec) } return result diff --git a/vars/helmExecute.groovy b/vars/helmExecute.groovy index ef729cd237..89c8ee202a 100644 --- a/vars/helmExecute.groovy +++ b/vars/helmExecute.groovy @@ -7,6 +7,7 @@ void call(Map parameters = [:]) { List credentials = [ [type: 'file', id: 'kubeConfigFileCredentialsId', env: ['PIPER_kubeConfig']], [type: 'file', id: 'dockerConfigJsonCredentialsId', env: ['PIPER_dockerConfigJSON']], + [type: 'usernamePassword', id: 'sourceRepositoryCredentialsId', env: ['PIPER_sourceRepositoryUser', 'PIPER_sourceRepositoryPassword']], [type: 'usernamePassword', id: 'targetRepositoryCredentialsId', env: ['PIPER_targetRepositoryUser', 'PIPER_targetRepositoryPassword']], ] piperExecuteBin(parameters, STEP_NAME, METADATA_FILE, credentials) diff --git a/vars/imagePushToRegistry.groovy b/vars/imagePushToRegistry.groovy new file mode 100644 index 0000000000..dc73ec1bc6 --- /dev/null +++ b/vars/imagePushToRegistry.groovy @@ -0,0 +1,9 @@ +import groovy.transform.Field + +@Field String STEP_NAME = getClass().getName() +@Field String METADATA_FILE = 'metadata/imagePushToRegistry.yaml' + +void call(Map parameters = [:]) { + List credentials = [] + piperExecuteBin(parameters, STEP_NAME, METADATA_FILE, credentials) +} diff --git a/vars/piperPipelineStageRelease.groovy b/vars/piperPipelineStageRelease.groovy index cdc9c0b3d3..1f64d065db 100644 --- a/vars/piperPipelineStageRelease.groovy +++ b/vars/piperPipelineStageRelease.groovy @@ -23,6 +23,8 @@ import static com.sap.piper.Prerequisites.checkScript 'kubernetesDeploy', /** For TMS use-cases: Performs upload to Transport Management Service node*/ 'tmsUpload', + /** For TMS use-cases: Performs export to Transport Management Service node*/ + 'tmsExport', /** Publishes release information to GitHub. */ 'githubPublishRelease', /** Executes smoke tests by running the npm script 'ci-smoke' defined in the project's package.json file. */ @@ -93,6 +95,10 @@ void call(Map parameters = [:]) { durationMeasure(script: script, measurementName: 'upload_release_tms_duration') { tmsUpload script: script } + } else if(config.tmsExport){ + durationMeasure(script: script, measurementName: 'export_release_tms_duration') { + tmsExport script: script + } } if (config.healthExecuteCheck) { diff --git a/vars/testsPublishResults.groovy b/vars/testsPublishResults.groovy index 501b7e9260..025c7a0ead 100644 --- a/vars/testsPublishResults.groovy +++ b/vars/testsPublishResults.groovy @@ -163,7 +163,8 @@ def publishJMeterReport(Map settings = [:]){ nthBuildNumber: settings.get('nthBuildNumber'), configType: settings.get('configType'), failBuildIfNoResultFile: settings.get('failBuildIfNoResultFile'), - compareBuildPrevious: settings.get('compareBuildPrevious') + compareBuildPrevious: settings.get('compareBuildPrevious'), + filterRegex: settings.get('filterRegex') ) archiveResults(settings.get('archive'), pattern, settings.get('allowEmptyResults')) } diff --git a/vars/tmsExport.groovy b/vars/tmsExport.groovy index f486429d01..303fa6cc63 100644 --- a/vars/tmsExport.groovy +++ b/vars/tmsExport.groovy @@ -6,7 +6,7 @@ import com.sap.piper.JenkinsUtils void call(Map parameters = [:]) { List credentials = [ - [type: 'token', id: 'credentialsId', env: ['PIPER_tmsServiceKey']] + [type: 'token', id: 'credentialsId', env: ['PIPER_serviceKey']] ] piperExecuteBin(parameters, STEP_NAME, METADATA_FILE, credentials, false, false, true) diff --git a/vars/tmsUpload.groovy b/vars/tmsUpload.groovy index d89203c922..4c586d40af 100644 --- a/vars/tmsUpload.groovy +++ b/vars/tmsUpload.groovy @@ -96,7 +96,7 @@ void call(Map parameters = [:]) { if (config.useGoStep != false) { List credentials = [ - [type: 'token', id: 'credentialsId', env: ['PIPER_tmsServiceKey']] + [type: 'token', id: 'credentialsId', env: ['PIPER_serviceKey']] ] if (namedUser) { diff --git a/vars/whitesourceExecuteScan.groovy b/vars/whitesourceExecuteScan.groovy index 818c4004a6..36cf2321ba 100644 --- a/vars/whitesourceExecuteScan.groovy +++ b/vars/whitesourceExecuteScan.groovy @@ -18,6 +18,7 @@ void call(Map parameters = [:]) { [type: 'token', id: 'userTokenCredentialsId', env: ['PIPER_userToken']], [type: 'token', id: 'githubTokenCredentialsId', env: ['PIPER_githubToken']], [type: 'file', id: 'dockerConfigJsonCredentialsId', env: ['PIPER_dockerConfigJSON']], + [type: 'usernamePassword', id: 'golangPrivateModulesGitTokenCredentialsId', env: ['PIPER_privateModulesGitUsername', 'PIPER_privateModulesGitToken']] ] piperExecuteBin(parameters, STEP_NAME, METADATA_FILE, credentials) }