From 5ab432b8047f173dd67f9a621c88757c2cee48d9 Mon Sep 17 00:00:00 2001 From: thtri Date: Tue, 30 May 2023 11:06:14 +0200 Subject: [PATCH 01/36] fix(whitesource):add stash for checkmarxOne (#4383) --- cmd/whitesourceExecuteScan_generated.go | 1 + resources/metadata/whitesourceExecuteScan.yaml | 2 ++ 2 files changed, 3 insertions(+) diff --git a/cmd/whitesourceExecuteScan_generated.go b/cmd/whitesourceExecuteScan_generated.go index da5dbee8b0..280827b409 100644 --- a/cmd/whitesourceExecuteScan_generated.go +++ b/cmd/whitesourceExecuteScan_generated.go @@ -386,6 +386,7 @@ func whitesourceExecuteScanMetadata() config.StepData { {Name: "buildDescriptor", Type: "stash"}, {Name: "opensourceConfiguration", Type: "stash"}, {Name: "checkmarx", Type: "stash"}, + {Name: "checkmarxOne", Type: "stash"}, }, Parameters: []config.StepParameters{ { diff --git a/resources/metadata/whitesourceExecuteScan.yaml b/resources/metadata/whitesourceExecuteScan.yaml index 4adc63e7fc..93d8480faa 100644 --- a/resources/metadata/whitesourceExecuteScan.yaml +++ b/resources/metadata/whitesourceExecuteScan.yaml @@ -604,6 +604,8 @@ spec: type: stash - name: checkmarx type: stash + - name: checkmarxOne + type: stash outputs: resources: - name: commonPipelineEnvironment From 7f2e58b2113c64292390570aa0feec90ed1d16f8 Mon Sep 17 00:00:00 2001 From: Jk1484 <35270240+Jk1484@users.noreply.github.com> Date: Tue, 30 May 2023 15:06:34 +0500 Subject: [PATCH 02/36] fix(golangBuild): pinversion of cyclonedx (#4368) * output version pin for cyclonedx * test fix --------- Co-authored-by: Vyacheslav Starostin <32613074+vstarostin@users.noreply.github.com> --- cmd/golangBuild.go | 2 +- cmd/golangBuild_test.go | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/cmd/golangBuild.go b/cmd/golangBuild.go index 6328f02f99..0b831a394e 100644 --- a/cmd/golangBuild.go +++ b/cmd/golangBuild.go @@ -570,7 +570,7 @@ func lookupGolangPrivateModulesRepositories(goModFile *modfile.File, globPattern } func runBOMCreation(utils golangBuildUtils, outputFilename string) error { - if err := utils.RunExecutable("cyclonedx-gomod", "mod", "-licenses", "-test", "-output", outputFilename); err != nil { + if err := utils.RunExecutable("cyclonedx-gomod", "mod", "-licenses", "-test", "-output", outputFilename, "-output-version", "1.4"); err != nil { return fmt.Errorf("BOM creation failed: %w", err) } return nil diff --git a/cmd/golangBuild_test.go b/cmd/golangBuild_test.go index e582608d88..1f53435d5d 100644 --- a/cmd/golangBuild_test.go +++ b/cmd/golangBuild_test.go @@ -286,7 +286,7 @@ go 1.17` assert.Equal(t, "go", utils.ExecMockRunner.Calls[0].Exec) assert.Equal(t, []string{"install", "github.com/CycloneDX/cyclonedx-gomod/cmd/cyclonedx-gomod@latest"}, utils.ExecMockRunner.Calls[0].Params) assert.Equal(t, "cyclonedx-gomod", utils.ExecMockRunner.Calls[1].Exec) - assert.Equal(t, []string{"mod", "-licenses", "-test", "-output", "bom-golang.xml"}, utils.ExecMockRunner.Calls[1].Params) + assert.Equal(t, []string{"mod", "-licenses", "-test", "-output", "bom-golang.xml", "-output-version", "1.4"}, utils.ExecMockRunner.Calls[1].Params) assert.Equal(t, "go", utils.ExecMockRunner.Calls[2].Exec) assert.Equal(t, []string{"build", "-trimpath"}, utils.ExecMockRunner.Calls[2].Params) }) From a2109c59b5ac6815316ef5af9cbae318b27a968d Mon Sep 17 00:00:00 2001 From: Jk1484 <35270240+Jk1484@users.noreply.github.com> Date: Tue, 30 May 2023 15:21:16 +0500 Subject: [PATCH 03/36] fix(gradle): Pin schema version of cyclonedx (#4367) Co-authored-by: Ashly Mathew Co-authored-by: Vyacheslav Starostin <32613074+vstarostin@users.noreply.github.com> --- cmd/gradleExecuteBuild.go | 2 +- .../java-project-with-bom-plugin/build.gradle | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/cmd/gradleExecuteBuild.go b/cmd/gradleExecuteBuild.go index 6535e4d5b1..8a635adf2b 100644 --- a/cmd/gradleExecuteBuild.go +++ b/cmd/gradleExecuteBuild.go @@ -94,7 +94,7 @@ allprojects { cyclonedxBom { outputName = "` + gradleBomFilename + `" outputFormat = "xml" - schemaVersion = "1.2" + schemaVersion = "1.4" includeConfigs = ["runtimeClasspath"] skipConfigs = ["compileClasspath", "testCompileClasspath"] } diff --git a/integration/testdata/TestGradleIntegration/java-project-with-bom-plugin/build.gradle b/integration/testdata/TestGradleIntegration/java-project-with-bom-plugin/build.gradle index ba09a09345..db7fa946b9 100644 --- a/integration/testdata/TestGradleIntegration/java-project-with-bom-plugin/build.gradle +++ b/integration/testdata/TestGradleIntegration/java-project-with-bom-plugin/build.gradle @@ -29,5 +29,5 @@ tasks.named('test') { cyclonedxBom { outputName = "bom-gradle" outputFormat = "xml" - schemaVersion = "1.2" + schemaVersion = "1.4" } From c15448b4e0690a718718a77fc46ce3bc59813448 Mon Sep 17 00:00:00 2001 From: Leonard Heilos Date: Tue, 30 May 2023 16:00:02 +0200 Subject: [PATCH 04/36] feat(whitesourceExecuteScan): allow to specify InstallCommand (#4376) * feat(whitesourceExecuteScan) allow to specify InstallCommand * reorder imports --------- Co-authored-by: sumeet patil Co-authored-by: Andrei Kireev --- cmd/whitesourceExecuteScan.go | 9 +++ cmd/whitesourceExecuteScan_generated.go | 2 +- cmd/whitesourceExecuteScan_test.go | 61 +++++++++++++++++++ pkg/whitesource/scanOptions.go | 2 + .../metadata/whitesourceExecuteScan.yaml | 2 +- 5 files changed, 74 insertions(+), 2 deletions(-) diff --git a/cmd/whitesourceExecuteScan.go b/cmd/whitesourceExecuteScan.go index 802f126b6f..740e1ff0c6 100644 --- a/cmd/whitesourceExecuteScan.go +++ b/cmd/whitesourceExecuteScan.go @@ -478,6 +478,7 @@ func wsScanOptions(config *ScanOptions) *ws.ScanOptions { AgentURL: config.AgentURL, ServiceURL: config.ServiceURL, ScanPath: config.ScanPath, + InstallCommand: config.InstallCommand, Verbose: GeneralConfig.Verbose, } } @@ -487,6 +488,14 @@ func wsScanOptions(config *ScanOptions) *ws.ScanOptions { func executeScan(config *ScanOptions, scan *ws.Scan, utils whitesourceUtils) error { options := wsScanOptions(config) + if options.InstallCommand != "" { + installCommandTokens := strings.Split(config.InstallCommand, " ") + if err := utils.RunExecutable(installCommandTokens[0], installCommandTokens[1:]...); err != nil { + log.SetErrorCategory(log.ErrorCustom) + return errors.Wrapf(err, "failed to execute install command: %v", config.InstallCommand) + } + } + // Execute scan with Unified Agent jar file if err := scan.ExecuteUAScan(options, utils); err != nil { return errors.Wrapf(err, "failed to execute Unified Agent scan") diff --git a/cmd/whitesourceExecuteScan_generated.go b/cmd/whitesourceExecuteScan_generated.go index 280827b409..5b0c246a77 100644 --- a/cmd/whitesourceExecuteScan_generated.go +++ b/cmd/whitesourceExecuteScan_generated.go @@ -329,7 +329,7 @@ func addWhitesourceExecuteScanFlags(cmd *cobra.Command, stepConfig *whitesourceE cmd.Flags().StringSliceVar(&stepConfig.Excludes, "excludes", []string{}, "List of file path patterns to exclude in the scan.") cmd.Flags().BoolVar(&stepConfig.FailOnSevereVulnerabilities, "failOnSevereVulnerabilities", true, "Whether to fail the step on severe vulnerabilties or not") cmd.Flags().StringSliceVar(&stepConfig.Includes, "includes", []string{}, "List of file path patterns to include in the scan.") - cmd.Flags().StringVar(&stepConfig.InstallCommand, "installCommand", os.Getenv("PIPER_installCommand"), "[NOT IMPLEMENTED] Install command that can be used to populate the default docker image for some scenarios.") + cmd.Flags().StringVar(&stepConfig.InstallCommand, "installCommand", os.Getenv("PIPER_installCommand"), "Install command that can be used to populate the default docker image for some scenarios.") cmd.Flags().StringVar(&stepConfig.JreDownloadURL, "jreDownloadUrl", `https://github.com/SAP/SapMachine/releases/download/sapmachine-11.0.2/sapmachine-jre-11.0.2_linux-x64_bin.tar.gz`, "URL used for downloading the Java Runtime Environment (JRE) required to run the WhiteSource Unified Agent.") cmd.Flags().BoolVar(&stepConfig.LicensingVulnerabilities, "licensingVulnerabilities", true, "[NOT IMPLEMENTED] Whether license compliance is considered and reported as part of the assessment.") cmd.Flags().StringVar(&stepConfig.OrgToken, "orgToken", os.Getenv("PIPER_orgToken"), "WhiteSource token identifying your organization.") diff --git a/cmd/whitesourceExecuteScan_test.go b/cmd/whitesourceExecuteScan_test.go index 98bbd24bf8..77c724902e 100644 --- a/cmd/whitesourceExecuteScan_test.go +++ b/cmd/whitesourceExecuteScan_test.go @@ -16,6 +16,7 @@ import ( "github.com/SAP/jenkins-library/pkg/reporting" "github.com/SAP/jenkins-library/pkg/versioning" ws "github.com/SAP/jenkins-library/pkg/whitesource" + "github.com/pkg/errors" "github.com/stretchr/testify/assert" "github.com/google/go-github/v45/github" @@ -143,6 +144,66 @@ func TestRunWhitesourceExecuteScan(t *testing.T) { } assert.True(t, utilsMock.HasWrittenFile(filepath.Join(ws.ReportsDirectory, "mock-project - 1-vulnerability-report.pdf"))) assert.True(t, utilsMock.HasWrittenFile(filepath.Join(ws.ReportsDirectory, "mock-project - 1-vulnerability-report.pdf"))) + assert.Equal(t, 3, len(utilsMock.ExecMockRunner.Calls), "no InstallCommand must be executed") + }) + t.Run("executes the InstallCommand prior to the scan", func(t *testing.T) { + ctx := context.Background() + // init + config := ScanOptions{ + BuildDescriptorFile: "my-mta.yml", + VersioningModel: "major", + AgentDownloadURL: "https://whitesource.com/agent.jar", + VulnerabilityReportFormat: "pdf", + Reporting: true, + AgentFileName: "ua.jar", + ProductName: "mock-product", + ProjectToken: "mock-project-token", + InstallCommand: "echo hello world", + } + utilsMock := newWhitesourceUtilsMock() + utilsMock.AddFile("wss-generated-file.config", []byte("key=value")) + lastUpdatedDate := time.Now().Format(ws.DateTimeLayout) + systemMock := ws.NewSystemMock(lastUpdatedDate) + systemMock.Alerts = []ws.Alert{} + scan := newWhitesourceScan(&config) + cpe := whitesourceExecuteScanCommonPipelineEnvironment{} + influx := whitesourceExecuteScanInflux{} + // test + err := runWhitesourceExecuteScan(ctx, &config, scan, utilsMock, systemMock, &cpe, &influx) + // assert + assert.NoError(t, err) + assert.Equal(t, 4, len(utilsMock.ExecMockRunner.Calls), "InstallCommand not executed") + assert.Equal(t, mock.ExecCall{Exec: "echo", Params: []string{"hello", "world"}}, utilsMock.ExecMockRunner.Calls[0], "run command/params of InstallCommand incorrect") + }) + t.Run("fails if the InstallCommand fails", func(t *testing.T) { + ctx := context.Background() + // init + config := ScanOptions{ + BuildDescriptorFile: "my-mta.yml", + VersioningModel: "major", + AgentDownloadURL: "https://whitesource.com/agent.jar", + VulnerabilityReportFormat: "pdf", + Reporting: true, + AgentFileName: "ua.jar", + ProductName: "mock-product", + ProjectToken: "mock-project-token", + InstallCommand: "echo this-will-fail", + } + utilsMock := newWhitesourceUtilsMock() + utilsMock.AddFile("wss-generated-file.config", []byte("key=value")) + lastUpdatedDate := time.Now().Format(ws.DateTimeLayout) + systemMock := ws.NewSystemMock(lastUpdatedDate) + systemMock.Alerts = []ws.Alert{} + scan := newWhitesourceScan(&config) + cpe := whitesourceExecuteScanCommonPipelineEnvironment{} + influx := whitesourceExecuteScanInflux{} + utilsMock.ExecMockRunner.ShouldFailOnCommand = map[string]error{ + "echo this-will-fail": errors.New("error case"), + } + // test + err := runWhitesourceExecuteScan(ctx, &config, scan, utilsMock, systemMock, &cpe, &influx) + // assert + assert.EqualError(t, err, "failed to execute WhiteSource scan: failed to execute Scan: failed to execute install command: echo this-will-fail: error case") }) } diff --git a/pkg/whitesource/scanOptions.go b/pkg/whitesource/scanOptions.go index ff9475b07d..6cdcd29e5d 100644 --- a/pkg/whitesource/scanOptions.go +++ b/pkg/whitesource/scanOptions.go @@ -44,5 +44,7 @@ type ScanOptions struct { ScanPath string + InstallCommand string + Verbose bool } diff --git a/resources/metadata/whitesourceExecuteScan.yaml b/resources/metadata/whitesourceExecuteScan.yaml index 93d8480faa..1b450e3ffa 100644 --- a/resources/metadata/whitesourceExecuteScan.yaml +++ b/resources/metadata/whitesourceExecuteScan.yaml @@ -234,7 +234,7 @@ spec: - STEPS - name: installCommand type: string - description: "[NOT IMPLEMENTED] Install command that can be used to populate the default docker image for some scenarios." + description: "Install command that can be used to populate the default docker image for some scenarios." scope: - PARAMETERS - STAGES From cd71282f006d9d4e7a0db7c72a1470dddfbb9275 Mon Sep 17 00:00:00 2001 From: Daria Kuznetsova Date: Wed, 31 May 2023 11:37:09 +0300 Subject: [PATCH 05/36] fix(codeqlExecuteScan): pagination call for getting codescanning results (#4370) pagination call for getting code scanning results --------- Co-authored-by: sumeet patil --- cmd/codeqlExecuteScan.go | 24 ++++++------- pkg/codeql/codeql.go | 72 ++++++++++++++++++++++++++++--------- pkg/codeql/codeql_test.go | 76 ++++++++++++++++++++++++++++++++++++--- 3 files changed, 139 insertions(+), 33 deletions(-) diff --git a/cmd/codeqlExecuteScan.go b/cmd/codeqlExecuteScan.go index 7c645f99c4..9ac2d7f0ee 100644 --- a/cmd/codeqlExecuteScan.go +++ b/cmd/codeqlExecuteScan.go @@ -280,25 +280,25 @@ func runCodeqlExecuteScan(config *codeqlExecuteScanOptions, telemetryData *telem return reports, err } - if config.CheckForCompliance { - codeqlScanAuditInstance := codeql.NewCodeqlScanAuditInstance(repoInfo.serverUrl, repoInfo.owner, repoInfo.repo, token, []string{}) - scanResults, err := codeqlScanAuditInstance.GetVulnerabilities(repoInfo.ref) - if err != nil { - return reports, errors.Wrap(err, "failed to get scan results") - } + codeqlScanAuditInstance := codeql.NewCodeqlScanAuditInstance(repoInfo.serverUrl, repoInfo.owner, repoInfo.repo, token, []string{}) + scanResults, err := codeqlScanAuditInstance.GetVulnerabilities(repoInfo.ref) + if err != nil { + return reports, errors.Wrap(err, "failed to get scan results") + } + + codeqlAudit := codeql.CodeqlAudit{ToolName: "codeql", RepositoryUrl: repoUrl, CodeScanningLink: repoCodeqlScanUrl, RepositoryReferenceUrl: repoReference, ScanResults: scanResults} + paths, err := codeql.WriteJSONReport(codeqlAudit, config.ModulePath) + if err != nil { + return reports, errors.Wrap(err, "failed to write json compliance report") + } + if config.CheckForCompliance { unaudited := (scanResults.Total - scanResults.Audited) if unaudited > config.VulnerabilityThresholdTotal { msg := fmt.Sprintf("Your repository %v with ref %v is not compliant. Total unaudited issues are %v which is greater than the VulnerabilityThresholdTotal count %v", repoUrl, repoInfo.ref, unaudited, config.VulnerabilityThresholdTotal) return reports, errors.Errorf(msg) } - codeqlAudit := codeql.CodeqlAudit{ToolName: "codeql", RepositoryUrl: repoUrl, CodeScanningLink: repoCodeqlScanUrl, RepositoryReferenceUrl: repoReference, ScanResults: scanResults} - paths, err := codeql.WriteJSONReport(codeqlAudit, config.ModulePath) - if err != nil { - return reports, errors.Wrap(err, "failed to write json compliance report") - } - reports = append(reports, paths...) } } diff --git a/pkg/codeql/codeql.go b/pkg/codeql/codeql.go index 3fe877513f..b64bdffd5b 100644 --- a/pkg/codeql/codeql.go +++ b/pkg/codeql/codeql.go @@ -2,6 +2,7 @@ package codeql import ( "context" + "errors" sapgithub "github.com/SAP/jenkins-library/pkg/github" "github.com/google/go-github/v45/github" @@ -13,9 +14,11 @@ type CodeqlScanAudit interface { type githubCodeqlScanningService interface { ListAlertsForRepo(ctx context.Context, owner, repo string, opts *github.AlertListOptions) ([]*github.Alert, *github.Response, error) + ListAnalysesForRepo(ctx context.Context, owner, repo string, opts *github.AnalysesListOptions) ([]*github.ScanningAnalysis, *github.Response, error) } -const auditStateOpen = "open" +const auditStateOpen string = "open" +const perPageCount int = 100 func NewCodeqlScanAuditInstance(serverUrl, owner, repository, token string, trustedCerts []string) CodeqlScanAuditInstance { return CodeqlScanAuditInstance{serverUrl: serverUrl, owner: owner, repository: repository, token: token, trustedCerts: trustedCerts} @@ -36,32 +39,67 @@ func (codeqlScanAudit *CodeqlScanAuditInstance) GetVulnerabilities(analyzedRef s if err != nil { return CodeqlScanning{}, err } + totalAlerts, err := getTotalAlertsFromClient(ctx, client.CodeScanning, analyzedRef, codeqlScanAudit) - return getVulnerabilitiesFromClient(ctx, client.CodeScanning, analyzedRef, codeqlScanAudit) + return getVulnerabilitiesFromClient(ctx, client.CodeScanning, analyzedRef, codeqlScanAudit, totalAlerts) } -func getVulnerabilitiesFromClient(ctx context.Context, codeScanning githubCodeqlScanningService, analyzedRef string, codeqlScanAudit *CodeqlScanAuditInstance) (CodeqlScanning, error) { - alertOptions := github.AlertListOptions{ - State: "", - Ref: analyzedRef, - ListOptions: github.ListOptions{}, +func getTotalAlertsFromClient(ctx context.Context, codeScannning githubCodeqlScanningService, analyzedRef string, codeqlScanAudit *CodeqlScanAuditInstance) (int, error) { + analysesOptions := github.AnalysesListOptions{ + Ref: &analyzedRef, } - - alerts, _, err := codeScanning.ListAlertsForRepo(ctx, codeqlScanAudit.owner, codeqlScanAudit.repository, &alertOptions) + analyses, _, err := codeScannning.ListAnalysesForRepo(ctx, codeqlScanAudit.owner, codeqlScanAudit.repository, &analysesOptions) if err != nil { - return CodeqlScanning{}, err + return 0, err + } + if len(analyses) < 1 { + return 0, errors.New("analyses for ref not found") } + return *analyses[0].ResultsCount, nil +} - openStateCount := 0 - for _, alert := range alerts { - if *alert.State == auditStateOpen { - openStateCount = openStateCount + 1 - } +func getVulnerabilitiesFromClient(ctx context.Context, codeScanning githubCodeqlScanningService, analyzedRef string, codeqlScanAudit *CodeqlScanAuditInstance, totalAlerts int) (CodeqlScanning, error) { + pages := totalAlerts/perPageCount + 1 + errChan := make(chan error) + openStateCountChan := make(chan int) + for page := 1; page <= pages; page++ { + go func(i int) { + alertOptions := github.AlertListOptions{ + State: "", + Ref: analyzedRef, + ListOptions: github.ListOptions{ + Page: i, + PerPage: perPageCount, + }, + } + + alerts, _, err := codeScanning.ListAlertsForRepo(ctx, codeqlScanAudit.owner, codeqlScanAudit.repository, &alertOptions) + if err != nil { + errChan <- err + return + } + + openStateCount := 0 + for _, alert := range alerts { + if *alert.State == auditStateOpen { + openStateCount = openStateCount + 1 + } + } + openStateCountChan <- len(alerts) - openStateCount + }(page) } codeqlScanning := CodeqlScanning{} - codeqlScanning.Total = len(alerts) - codeqlScanning.Audited = (codeqlScanning.Total - openStateCount) + codeqlScanning.Total = totalAlerts + for i := 0; i < pages; i++ { + select { + case openStateCount := <-openStateCountChan: + codeqlScanning.Audited += openStateCount + case err := <-errChan: + return CodeqlScanning{}, err + } + } + return codeqlScanning, nil } diff --git a/pkg/codeql/codeql_test.go b/pkg/codeql/codeql_test.go index 9d2798c247..11c7e7b816 100644 --- a/pkg/codeql/codeql_test.go +++ b/pkg/codeql/codeql_test.go @@ -18,10 +18,43 @@ type githubCodeqlScanningMock struct { func (g *githubCodeqlScanningMock) ListAlertsForRepo(ctx context.Context, owner, repo string, opts *github.AlertListOptions) ([]*github.Alert, *github.Response, error) { openState := "open" closedState := "closed" - alerts := []*github.Alert{{State: &openState}, {State: &openState}, {State: &closedState}} + alerts := []*github.Alert{} + + if repo == "testRepo1" { + alerts = append(alerts, &github.Alert{State: &openState}) + alerts = append(alerts, &github.Alert{State: &openState}) + alerts = append(alerts, &github.Alert{State: &closedState}) + } + + if repo == "testRepo2" { + if opts.Page == 1 { + for i := 0; i < 50; i++ { + alerts = append(alerts, &github.Alert{State: &openState}) + } + for i := 0; i < 50; i++ { + alerts = append(alerts, &github.Alert{State: &closedState}) + } + } + + if opts.Page == 2 { + for i := 0; i < 10; i++ { + alerts = append(alerts, &github.Alert{State: &openState}) + } + for i := 0; i < 30; i++ { + alerts = append(alerts, &github.Alert{State: &closedState}) + } + } + } + return alerts, nil, nil } +func (g *githubCodeqlScanningMock) ListAnalysesForRepo(ctx context.Context, owner, repo string, opts *github.AnalysesListOptions) ([]*github.ScanningAnalysis, *github.Response, error) { + resultsCount := 3 + analysis := []*github.ScanningAnalysis{{ResultsCount: &resultsCount}} + return analysis, nil, nil +} + type githubCodeqlScanningErrorMock struct { } @@ -29,22 +62,38 @@ func (g *githubCodeqlScanningErrorMock) ListAlertsForRepo(ctx context.Context, o return []*github.Alert{}, nil, errors.New("Some error") } +func (g *githubCodeqlScanningErrorMock) ListAnalysesForRepo(ctx context.Context, owner, repo string, opts *github.AnalysesListOptions) ([]*github.ScanningAnalysis, *github.Response, error) { + return []*github.ScanningAnalysis{}, nil, errors.New("Some error") +} + func TestGetVulnerabilitiesFromClient(t *testing.T) { ctx := context.Background() t.Parallel() t.Run("Success", func(t *testing.T) { ghCodeqlScanningMock := githubCodeqlScanningMock{} - codeqlScanAuditInstance := NewCodeqlScanAuditInstance("", "", "", "", []string{}) - codeScanning, err := getVulnerabilitiesFromClient(ctx, &ghCodeqlScanningMock, "ref", &codeqlScanAuditInstance) + totalAlerts := 3 + codeqlScanAuditInstance := NewCodeqlScanAuditInstance("", "", "testRepo1", "", []string{}) + codeScanning, err := getVulnerabilitiesFromClient(ctx, &ghCodeqlScanningMock, "ref", &codeqlScanAuditInstance, totalAlerts) assert.NoError(t, err) assert.Equal(t, 3, codeScanning.Total) assert.Equal(t, 1, codeScanning.Audited) }) + t.Run("Success with pagination results", func(t *testing.T) { + ghCodeqlScanningMock := githubCodeqlScanningMock{} + totalAlerts := 120 + codeqlScanAuditInstance := NewCodeqlScanAuditInstance("", "", "testRepo2", "", []string{}) + codeScanning, err := getVulnerabilitiesFromClient(ctx, &ghCodeqlScanningMock, "ref", &codeqlScanAuditInstance, totalAlerts) + assert.NoError(t, err) + assert.Equal(t, 120, codeScanning.Total) + assert.Equal(t, 80, codeScanning.Audited) + }) + t.Run("Error", func(t *testing.T) { ghCodeqlScanningErrorMock := githubCodeqlScanningErrorMock{} + totalAlerts := 3 codeqlScanAuditInstance := NewCodeqlScanAuditInstance("", "", "", "", []string{}) - _, err := getVulnerabilitiesFromClient(ctx, &ghCodeqlScanningErrorMock, "ref", &codeqlScanAuditInstance) + _, err := getVulnerabilitiesFromClient(ctx, &ghCodeqlScanningErrorMock, "ref", &codeqlScanAuditInstance, totalAlerts) assert.Error(t, err) }) } @@ -58,3 +107,22 @@ func TestGetApiUrl(t *testing.T) { assert.Equal(t, "https://github.test.org/api/v3", getApiUrl("https://github.test.org")) }) } + +func TestGetTotalAnalysesFromClient(t *testing.T) { + ctx := context.Background() + t.Parallel() + t.Run("Success", func(t *testing.T) { + ghCodeqlScanningMock := githubCodeqlScanningMock{} + codeqlScanAuditInstance := NewCodeqlScanAuditInstance("", "", "", "", []string{}) + total, err := getTotalAlertsFromClient(ctx, &ghCodeqlScanningMock, "ref", &codeqlScanAuditInstance) + assert.NoError(t, err) + assert.Equal(t, 3, total) + }) + + t.Run("Error", func(t *testing.T) { + ghCodeqlScanningErrorMock := githubCodeqlScanningErrorMock{} + codeqlScanAuditInstance := NewCodeqlScanAuditInstance("", "", "", "", []string{}) + _, err := getTotalAlertsFromClient(ctx, &ghCodeqlScanningErrorMock, "ref", &codeqlScanAuditInstance) + assert.Error(t, err) + }) +} From 072378bb837ba24267d645e0471cc875ca769cf4 Mon Sep 17 00:00:00 2001 From: michaelkubiaczyk <48311127+michaelkubiaczyk@users.noreply.github.com> Date: Thu, 1 Jun 2023 11:03:01 +0200 Subject: [PATCH 06/36] Cxone release - Fixes for 0-result scans, better preset handling (#4387) * Initial in progress * compiling but not yet functional * Missed file * updated checkmarxone step * Working up to fetching a project then breaks * Missed file * Breaks when retrieving projects+proxy set * Create project & run scan working, now polling * Fixed polling * added back the zipfile remove command * Fixed polling again * Generates and downloads PDF report * Updated and working, prep for refactor * Added compliance steps * Cleanup, reporting, added groovy connector * fixed groovy file * checkmarxone to checkmarxOne * checkmarxone to checkmarxOne * split credentials (id+secret, apikey), renamed pullrequestname to branch, groovy fix * Fixed filenames & yaml * missed the metadata_generated.go * added json to sarif conversion * fix:type in new checkmarxone package * fix:type in new checkmarxone package * removed test logs, added temp error log for creds * extra debugging to fix crash * improved auth logging, fixed query parse issue * fixed bug with group fetch when using oauth user * CWE can be -1 if not defined, can't be uint * Query also had CweID * Disabled predicates-fetch in sarif generation * Removing leftover info log message * Better error handling * fixed default preset configuration * removing .bat files - sorry * Cleanup per initial review * refactoring per Gist, fixed project find, add apps * small fix - sorry for commit noise while testing * Fixing issues with incremental scans. * removing maxretries * Updated per PR feedback, further changes todo toda * JSON Report changes and reporting cleanup * removing .bat (again?) * adding docs, groovy unit test, linter fixes * Started adding tests maybe 15% covered * fix(checkmarxOne): test cases for pkg and reporting * fix(checkmarxOne):fix formatting * feat(checkmarxone): update interface with missing method * feat(checkmarxone):change runStep signature to be able to inject dependency * feat(checkmarxone): add tests for step (wip) * Adding a bit more coverage * feat(checkmarxOne): fix code review * feat(checkmarxOne): fix code review * feat(checkmarxOne): fix code review * feat(checkmarxOne): fix integration test PR * adding scan-summary bug workaround, reportgen fail * enforceThresholds fix when no results passed in * fixed gap when preset empty in yaml & project conf * fixed another gap in preset selection * fix 0-result panic * fail when no preset is set anywhere * removed comment --------- Co-authored-by: thtri Co-authored-by: Thanh-Hai Trinh --- cmd/checkmarxOneExecuteScan.go | 25 +++++++++++++++++++------ 1 file changed, 19 insertions(+), 6 deletions(-) diff --git a/cmd/checkmarxOneExecuteScan.go b/cmd/checkmarxOneExecuteScan.go index 9b6efdc889..30f6023e31 100644 --- a/cmd/checkmarxOneExecuteScan.go +++ b/cmd/checkmarxOneExecuteScan.go @@ -272,13 +272,17 @@ func (c *checkmarxOneExecuteScanHelper) SetProjectPreset() error { } if c.config.Preset == "" { - log.Entry().Infof("Pipeline yaml does not specify a preset, will use project configuration (%v).", currentPreset) + if currentPreset == "" { + return fmt.Errorf("must specify the preset in either the pipeline yaml or in the CheckmarxOne project configuration") + } else { + log.Entry().Infof("Pipeline yaml does not specify a preset, will use project configuration (%v).", currentPreset) + } c.config.Preset = currentPreset } else if currentPreset != c.config.Preset { log.Entry().Infof("Project configured preset (%v) does not match pipeline yaml (%v) - updating project configuration.", currentPreset, c.config.Preset) c.sys.SetProjectPreset(c.Project.ProjectID, c.config.Preset, true) } else { - log.Entry().Infof("Project is configured to use preset %v", currentPreset) + log.Entry().Infof("Project is already configured to use pipeline preset %v", currentPreset) } return nil } @@ -532,12 +536,17 @@ func (c *checkmarxOneExecuteScanHelper) ParseResults(scan *checkmarxOne.Scan) (m return detailedResults, fmt.Errorf("Unable to fetch scan metadata for scan %v: %s", scan.ScanID, err) } + totalResultCount := uint64(0) + scansummary, err := c.sys.GetScanSummary(scan.ScanID) if err != nil { - return detailedResults, fmt.Errorf("Unable to fetch scan summary for scan %v: %s", scan.ScanID, err) + /* TODO: scansummary throws a 404 for 0-result scans, once the bug is fixed put this code back. */ + // return detailedResults, fmt.Errorf("Unable to fetch scan summary for scan %v: %s", scan.ScanID, err) + } else { + totalResultCount = scansummary.TotalCount() } - results, err := c.sys.GetScanResults(scan.ScanID, scansummary.TotalCount()) + results, err := c.sys.GetScanResults(scan.ScanID, totalResultCount) if err != nil { return detailedResults, fmt.Errorf("Unable to fetch scan results for scan %v: %s", scan.ScanID, err) } @@ -606,12 +615,15 @@ func (c *checkmarxOneExecuteScanHelper) generateAndDownloadReport(scan *checkmar if finalStatus.Status == "completed" { break + } else if finalStatus.Status == "failed" { + return []byte{}, fmt.Errorf("report generation failed") } time.Sleep(10 * time.Second) } if finalStatus.Status == "completed" { return c.sys.DownloadReport(finalStatus.ReportURL) } + return []byte{}, fmt.Errorf("unexpected status %v recieved", finalStatus.Status) } @@ -954,8 +966,9 @@ func (c *checkmarxOneExecuteScanHelper) enforceThresholds(results *map[string]in } // if the flag is switched on, calculate the Low findings threshold per query if cxLowThresholdPerQuery { - lowPerQueryMap := (*results)["LowPerQuery"].(map[string]map[string]int) - if lowPerQueryMap != nil { + if (*results)["LowPerQuery"] != nil { + lowPerQueryMap := (*results)["LowPerQuery"].(map[string]map[string]int) + for lowQuery, resultsLowQuery := range lowPerQueryMap { lowAuditedPerQuery := resultsLowQuery["Confirmed"] + resultsLowQuery["NotExploitable"] lowOverallPerQuery := resultsLowQuery["Issues"] From 83519eb7719dffdf46dd647954d133733894ddc6 Mon Sep 17 00:00:00 2001 From: Marcus Holl Date: Thu, 1 Jun 2023 15:22:57 +0200 Subject: [PATCH 07/36] fix misleading/wrong comment (#4295) --- vars/piperExecuteBin.groovy | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vars/piperExecuteBin.groovy b/vars/piperExecuteBin.groovy index 6f4d42360b..f9ae3b1d9f 100644 --- a/vars/piperExecuteBin.groovy +++ b/vars/piperExecuteBin.groovy @@ -54,7 +54,7 @@ void call(Map parameters = [:], String stepName, String metadataFile, List crede config += ["ansHookServiceKeyCredentialsId": ansHookServiceKeyCredentialsId] // prepare stashes - // first eliminate empty stashes + // first eliminate non existing stashes config.stashContent = utils.unstashAll(config.stashContent) // then make sure that commonPipelineEnvironment, config, ... is also available when step stashing is active if (config.stashContent?.size() > 0) { From de7027df4035e77c70f647a5953e37510e1aa041 Mon Sep 17 00:00:00 2001 From: Marcus Holl Date: Thu, 1 Jun 2023 15:49:07 +0200 Subject: [PATCH 08/36] stashing tests (#4379) Co-authored-by: Alexander Link <33052602+alxsap@users.noreply.github.com> Co-authored-by: Alexander Link <33052602+alxsap@users.noreply.github.com> --- test/groovy/com/sap/piper/UtilsTest.groovy | 188 +++++++++++++++++++++ 1 file changed, 188 insertions(+) diff --git a/test/groovy/com/sap/piper/UtilsTest.groovy b/test/groovy/com/sap/piper/UtilsTest.groovy index 5a0bb17948..ce46d6ff7c 100644 --- a/test/groovy/com/sap/piper/UtilsTest.groovy +++ b/test/groovy/com/sap/piper/UtilsTest.groovy @@ -10,6 +10,9 @@ import org.junit.rules.ExpectedException import org.junit.rules.RuleChain import static org.hamcrest.Matchers.is +import static org.hamcrest.Matchers.hasItem +import static org.hamcrest.Matchers.hasItems +import static org.hamcrest.Matchers.hasSize import util.JenkinsLoggingRule import util.JenkinsShellCallRule @@ -43,12 +46,197 @@ class UtilsTest extends BasePiperTest { assertThat(result, is('0dad6c33b6246702132454f604dee80740f399ad')) } + @Test + void testStashWithDefaults() { + Map stashProperties + + def examinee = newExaminee( + stashClosure: { Map stashProps -> + stashProperties = stashProps + } + ) + examinee.stash('foo') + + assertThat(stashProperties, is([name: 'foo', includes: '**/*.*', excludes: ''])) + } + + @Test + void testStashWithIncludesAndExcludes() { + Map stashProperties + + def examinee = newExaminee( + stashClosure: { Map stashProps -> + stashProperties = stashProps + } + ) + + examinee.stash('foo', '**/*.mtar', '**/target') + + assert(stashProperties == [name: 'foo', includes: '**/*.mtar', excludes: '**/target']) + } + + @Test + void testStashListStashesAllStashes() { + def stashes = [] as Set + def examinee = newExaminee( + stashClosure: { Map stash -> + stashes << stash + } + ) + + examinee.stashList(nullScript, [ + [ + name: 'foo', + includes: '*.foo', + excludes: 'target/foo/*' + ], + [ + name: 'bar', + includes: '*.bar', + excludes: 'target/bar/*' + ] + ]) + + assert stashes == [ + [name: 'foo', includes: '*.foo', excludes: 'target/foo/*', allowEmpty: true], + [name: 'bar', includes: '*.bar', excludes: 'target/bar/*', allowEmpty: true] + ] as Set + } + + @Test + void testStashListDoesNotSwallowException() { + + thrown.expect(RuntimeException.class) + thrown.expectMessage('something went wrong') + + def examinee = newExaminee( + stashClosure: { Map stash -> + throw new RuntimeException('something went wrong') + } + ) + + examinee.stashList(nullScript, [ + [ + name: 'fail', + includes: '*.fail', + excludes: 'target/fail/*' + ], + ]) + } + + @Test + void testUnstashStageFilesUnstashesAllUnstashableStashes() { + + // We do not fail in case a stash cannot be unstashed + // That might be barely OK for non-existing stashes, but there might also be + // real issues, e.g. related to permission issues when overwriting existing files + // maybe also from other stashes unstashed earlier. + // The behaviour wrt unstashable stashes should be improved. In case of issues + // with unstashing, we should throw an exception + + boolean deleteDirCalled = false + def unstashed = [] + def examinee = newExaminee( + unstashClosure: { def stashName -> + if(stashName == 'fail') { + throw new RuntimeException('something went wrong') + } + unstashed << stashName + } + ) + + nullScript.commonPipelineEnvironment.configuration.stageStashes = [ + foo : [ + unstash: ['stash-1', 'stash-2', 'fail', 'duplicate'] + ] + ] + + nullScript.metaClass.deleteDir = { deleteDirCalled = true } + + def stashResult = examinee.unstashStageFiles(nullScript, 'foo', ['additional-stash', 'duplicate']) + + assertThat(deleteDirCalled, is(true)) + + assertThat(unstashed, hasSize(5)) // should be 4 since we should not unstash 'duplicate' twice + assertThat(unstashed, hasItems('stash-1', 'stash-2', 'additional-stash', 'duplicate')) + + // This is inconsistent. Above we can see only four different stashes has been unstashed ('duplicate' twice), + // but here we see that the stashResult contains six entries, also the 'fail' entry + // for which we throw an exception (... and duplicate twice). + // We should fix that and adjust the test accordingly with the fix. + assertThat(stashResult, hasSize(6)) + assertThat(stashResult, hasItems('stash-1', 'stash-2', 'additional-stash', 'fail', 'duplicate')) + + // cleanup the deleteDir method + nullScript.metaClass = null + } + @Test void testUnstashAllSkipNull() { def stashResult = utils.unstashAll(['a', null, 'b']) assert stashResult == ['a', 'b'] } + @Test + void testUnstashSkipsFailedUnstashes() { + + def examinee = newExaminee( + unstashClosure: { def stashName -> + if(stashName == 'fail') { + throw new RuntimeException('something went wrong') + } + } + ) + + def stashResult = examinee.unstashAll(['a', 'fail', 'b']) + + assert stashResult == ['a', 'b'] + } + + + @Test + void testUnstashAllSucceeds() { + def unstashed = [] as Set + def examinee = newExaminee(unstashClosure: { def stashName -> unstashed << stashName}) + + examinee.unstashAll(['a', 'b']) + + assert(unstashed == ['a', 'b'] as Set) + } + + @Test + void testUnstashFails() { + def logMessages = [] + def examinee = newExaminee( + unstashClosure: { + def stashName -> throw new RuntimeException('something went wrong') + }, + echoClosure: { + // coerce to java.lang.String, we might have GStrings. + // comparism with java.lang.String might fail. + message -> logMessages << message.toString() + } + ) + def stashResult = examinee.unstash('a') + + // in case unstash fails (maybe the stash does not exist, or we cannot unstash due to + // some colliding files in conjunction with file permissions) we emit a log message + // and continue silently instead of failing. In that case we get an empty array back + // instead an array containing the name of the unstashed stash. + assertThat(logMessages, hasItem('Unstash failed: a (something went wrong)')) + assert(stashResult == []) + } + + private Utils newExaminee(Map parameters) { + def examinee = new Utils() + examinee.steps = [ + stash: parameters.stashClosure ?: {}, + unstash: parameters.unstashClosure ?: {}, + ] + examinee.echo = parameters.echoClosure ?: {} + return examinee + } + @Test void testAppendNonExistingParameterToStringList() { Map parameters = [:] From 416cb1d327583df4d880454873683bd7545f080c Mon Sep 17 00:00:00 2001 From: sumeet patil Date: Fri, 2 Jun 2023 18:31:52 +0530 Subject: [PATCH 09/36] fix(codeqlExecuteScan): added report file to output resources (#4388) --- cmd/codeqlExecuteScan.go | 22 +++++++++++----------- cmd/codeqlExecuteScan_generated.go | 2 ++ resources/metadata/codeqlExecuteScan.yaml | 2 ++ 3 files changed, 15 insertions(+), 11 deletions(-) diff --git a/cmd/codeqlExecuteScan.go b/cmd/codeqlExecuteScan.go index 9ac2d7f0ee..5471ed9173 100644 --- a/cmd/codeqlExecuteScan.go +++ b/cmd/codeqlExecuteScan.go @@ -280,19 +280,19 @@ func runCodeqlExecuteScan(config *codeqlExecuteScanOptions, telemetryData *telem return reports, err } - codeqlScanAuditInstance := codeql.NewCodeqlScanAuditInstance(repoInfo.serverUrl, repoInfo.owner, repoInfo.repo, token, []string{}) - scanResults, err := codeqlScanAuditInstance.GetVulnerabilities(repoInfo.ref) - if err != nil { - return reports, errors.Wrap(err, "failed to get scan results") - } + if config.CheckForCompliance { + codeqlScanAuditInstance := codeql.NewCodeqlScanAuditInstance(repoInfo.serverUrl, repoInfo.owner, repoInfo.repo, token, []string{}) + scanResults, err := codeqlScanAuditInstance.GetVulnerabilities(repoInfo.ref) + if err != nil { + return reports, errors.Wrap(err, "failed to get scan results") + } - codeqlAudit := codeql.CodeqlAudit{ToolName: "codeql", RepositoryUrl: repoUrl, CodeScanningLink: repoCodeqlScanUrl, RepositoryReferenceUrl: repoReference, ScanResults: scanResults} - paths, err := codeql.WriteJSONReport(codeqlAudit, config.ModulePath) - if err != nil { - return reports, errors.Wrap(err, "failed to write json compliance report") - } + codeqlAudit := codeql.CodeqlAudit{ToolName: "codeql", RepositoryUrl: repoUrl, CodeScanningLink: repoCodeqlScanUrl, RepositoryReferenceUrl: repoReference, ScanResults: scanResults} + paths, err := codeql.WriteJSONReport(codeqlAudit, config.ModulePath) + if err != nil { + return reports, errors.Wrap(err, "failed to write json compliance report") + } - if config.CheckForCompliance { unaudited := (scanResults.Total - scanResults.Audited) if unaudited > config.VulnerabilityThresholdTotal { msg := fmt.Sprintf("Your repository %v with ref %v is not compliant. Total unaudited issues are %v which is greater than the VulnerabilityThresholdTotal count %v", repoUrl, repoInfo.ref, unaudited, config.VulnerabilityThresholdTotal) diff --git a/cmd/codeqlExecuteScan_generated.go b/cmd/codeqlExecuteScan_generated.go index 2487c60888..650d42cb3e 100644 --- a/cmd/codeqlExecuteScan_generated.go +++ b/cmd/codeqlExecuteScan_generated.go @@ -50,6 +50,7 @@ func (p *codeqlExecuteScanReports) persist(stepConfig codeqlExecuteScanOptions, {FilePattern: "**/*.csv", ParamRef: "", StepResultType: "codeql"}, {FilePattern: "**/*.sarif", ParamRef: "", StepResultType: "codeql"}, {FilePattern: "**/toolrun_codeql_*.json", ParamRef: "", StepResultType: "codeql"}, + {FilePattern: "**/piper_codeql_report.json", ParamRef: "", StepResultType: "codeql"}, } envVars := []gcs.EnvVar{ {Name: "GOOGLE_APPLICATION_CREDENTIALS", Value: gcpJsonKeyFilePath, Modified: false}, @@ -387,6 +388,7 @@ func codeqlExecuteScanMetadata() config.StepData { {"filePattern": "**/*.csv", "type": "codeql"}, {"filePattern": "**/*.sarif", "type": "codeql"}, {"filePattern": "**/toolrun_codeql_*.json", "type": "codeql"}, + {"filePattern": "**/piper_codeql_report.json", "type": "codeql"}, }, }, }, diff --git a/resources/metadata/codeqlExecuteScan.yaml b/resources/metadata/codeqlExecuteScan.yaml index 6d0efc34c1..c6fddd78dd 100644 --- a/resources/metadata/codeqlExecuteScan.yaml +++ b/resources/metadata/codeqlExecuteScan.yaml @@ -171,3 +171,5 @@ spec: type: codeql - filePattern: "**/toolrun_codeql_*.json" type: codeql + - filePattern: "**/piper_codeql_report.json" + type: codeql From 97495fd18bc5a9676378980cb3e185f49ed282b5 Mon Sep 17 00:00:00 2001 From: Marcus Holl Date: Wed, 7 Jun 2023 14:58:44 +0200 Subject: [PATCH 10/36] fix: resolve lint files (#4392) Fix glob pattern for resolving eslint files Do not swallow exception when resolving lint files --- cmd/npmExecuteLint.go | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/cmd/npmExecuteLint.go b/cmd/npmExecuteLint.go index c7c3979dac..237eadcf00 100644 --- a/cmd/npmExecuteLint.go +++ b/cmd/npmExecuteLint.go @@ -167,8 +167,10 @@ func runDefaultLint(npmExecutor npm.Executor, utils lintUtils, failOnError bool) } func findEslintConfigs(utils lintUtils) []string { - unfilteredListOfEslintConfigs, _ := utils.Glob("**/.eslintrc.*") - + unfilteredListOfEslintConfigs, err := utils.Glob("**/.eslintrc*") + if err != nil { + log.Entry().Warnf("Error during resolving lint config files: %v", err) + } var eslintConfigs []string for _, config := range unfilteredListOfEslintConfigs { From 39d52a2123380a4a1570c2882d401f25968e0e50 Mon Sep 17 00:00:00 2001 From: Anil Keshav Date: Wed, 14 Jun 2023 09:11:33 +0200 Subject: [PATCH 11/36] feat (protecodeExecuteScan) enhancing protecode step with registry credentials (#4378) * enhancing protecode with registry credentials * Use protecodeUtils instead of separate package * Add target path for docker config to be created * Fix tests * Fix build flags --------- Co-authored-by: Vyacheslav Starostin --- cmd/protecodeExecuteScan.go | 25 ++++++++--- cmd/protecodeExecuteScan_generated.go | 44 ++++++++++++++++++++ cmd/protecodeExecuteScan_test.go | 9 +++- resources/metadata/protecodeExecuteScan.yaml | 26 ++++++++++++ 4 files changed, 97 insertions(+), 7 deletions(-) diff --git a/cmd/protecodeExecuteScan.go b/cmd/protecodeExecuteScan.go index 52139c43a6..13ad76defc 100644 --- a/cmd/protecodeExecuteScan.go +++ b/cmd/protecodeExecuteScan.go @@ -15,6 +15,7 @@ import ( "github.com/pkg/errors" "github.com/SAP/jenkins-library/pkg/command" + "github.com/SAP/jenkins-library/pkg/docker" piperDocker "github.com/SAP/jenkins-library/pkg/docker" "github.com/SAP/jenkins-library/pkg/log" "github.com/SAP/jenkins-library/pkg/piperutils" @@ -25,9 +26,10 @@ import ( ) const ( - webReportPath = "%s/#/product/%v/" - scanResultFile = "protecodescan_vulns.json" - stepResultFile = "protecodeExecuteScan.json" + webReportPath = "%s/#/product/%v/" + scanResultFile = "protecodescan_vulns.json" + stepResultFile = "protecodeExecuteScan.json" + dockerConfigFile = ".pipeline/docker/config.json" ) type protecodeUtils interface { @@ -72,7 +74,9 @@ func runProtecodeScan(config *protecodeExecuteScanOptions, influx *protecodeExec return err } - correctDockerConfigEnvVar(config) + if err := correctDockerConfigEnvVar(config, utils); err != nil { + return err + } var fileName, filePath string var err error @@ -372,8 +376,18 @@ func uploadFile(utils protecodeUtils, config protecodeExecuteScanOptions, produc return productID } -func correctDockerConfigEnvVar(config *protecodeExecuteScanOptions) { +func correctDockerConfigEnvVar(config *protecodeExecuteScanOptions, utils protecodeUtils) error { + var err error path := config.DockerConfigJSON + + if len(config.DockerConfigJSON) > 0 && len(config.DockerRegistryURL) > 0 && len(config.ContainerRegistryPassword) > 0 && len(config.ContainerRegistryUser) > 0 { + path, err = docker.CreateDockerConfigJSON(config.DockerRegistryURL, config.ContainerRegistryUser, config.ContainerRegistryPassword, dockerConfigFile, config.DockerConfigJSON, utils) + } + + if err != nil { + return errors.Wrapf(err, "failed to create / update docker config json file") + } + if len(path) > 0 { log.Entry().Infof("Docker credentials configuration: %v", path) path, _ := filepath.Abs(path) @@ -383,6 +397,7 @@ func correctDockerConfigEnvVar(config *protecodeExecuteScanOptions) { } else { log.Entry().Info("Docker credentials configuration: NONE") } + return nil } // Calculate version based on versioning model and artifact version or return custom scan version provided by user diff --git a/cmd/protecodeExecuteScan_generated.go b/cmd/protecodeExecuteScan_generated.go index 90c343a879..d93672871e 100644 --- a/cmd/protecodeExecuteScan_generated.go +++ b/cmd/protecodeExecuteScan_generated.go @@ -26,6 +26,8 @@ type protecodeExecuteScanOptions struct { FailOnSevereVulnerabilities bool `json:"failOnSevereVulnerabilities,omitempty"` ScanImage string `json:"scanImage,omitempty"` DockerRegistryURL string `json:"dockerRegistryUrl,omitempty"` + ContainerRegistryPassword string `json:"containerRegistryPassword,omitempty"` + ContainerRegistryUser string `json:"containerRegistryUser,omitempty"` DockerConfigJSON string `json:"dockerConfigJSON,omitempty"` CleanupMode string `json:"cleanupMode,omitempty" validate:"possible-values=none binary complete"` FilePath string `json:"filePath,omitempty"` @@ -173,6 +175,8 @@ BDBA (Protecode) uses a combination of static binary analysis techniques to X-ra log.SetErrorCategory(log.ErrorConfiguration) return err } + log.RegisterSecret(stepConfig.ContainerRegistryPassword) + log.RegisterSecret(stepConfig.ContainerRegistryUser) log.RegisterSecret(stepConfig.DockerConfigJSON) log.RegisterSecret(stepConfig.Username) log.RegisterSecret(stepConfig.Password) @@ -245,6 +249,8 @@ func addProtecodeExecuteScanFlags(cmd *cobra.Command, stepConfig *protecodeExecu cmd.Flags().BoolVar(&stepConfig.FailOnSevereVulnerabilities, "failOnSevereVulnerabilities", true, "Whether to fail the step on severe vulnerabilties or not") cmd.Flags().StringVar(&stepConfig.ScanImage, "scanImage", os.Getenv("PIPER_scanImage"), "The reference to the docker image to scan with Protecode. Note: If possible please also check [fetchUrl](https://www.project-piper.io/steps/protecodeExecuteScan/#fetchurl) parameter, which might help you to optimize upload time.") cmd.Flags().StringVar(&stepConfig.DockerRegistryURL, "dockerRegistryUrl", os.Getenv("PIPER_dockerRegistryUrl"), "The reference to the docker registry to scan with Protecode") + cmd.Flags().StringVar(&stepConfig.ContainerRegistryPassword, "containerRegistryPassword", os.Getenv("PIPER_containerRegistryPassword"), "For `buildTool: docker`: Password for container registry access - typically provided by the CI/CD environment.") + cmd.Flags().StringVar(&stepConfig.ContainerRegistryUser, "containerRegistryUser", os.Getenv("PIPER_containerRegistryUser"), "For `buildTool: docker`: Username for container registry access - typically provided by the CI/CD environment.") cmd.Flags().StringVar(&stepConfig.DockerConfigJSON, "dockerConfigJSON", os.Getenv("PIPER_dockerConfigJSON"), "Path to the file `.docker/config.json` - this is typically provided by your CI/CD system. You can find more details about the Docker credentials in the [Docker documentation](https://docs.docker.com/engine/reference/commandline/login/).") cmd.Flags().StringVar(&stepConfig.CleanupMode, "cleanupMode", `binary`, "Decides which parts are removed from the Protecode backend after the scan") cmd.Flags().StringVar(&stepConfig.FilePath, "filePath", os.Getenv("PIPER_filePath"), "The path to the file from local workspace to scan with Protecode") @@ -332,6 +338,44 @@ func protecodeExecuteScanMetadata() config.StepData { Aliases: []config.Alias{}, Default: os.Getenv("PIPER_dockerRegistryUrl"), }, + { + Name: "containerRegistryPassword", + ResourceRef: []config.ResourceReference{ + { + Name: "commonPipelineEnvironment", + Param: "container/repositoryPassword", + }, + + { + Name: "commonPipelineEnvironment", + Param: "custom/repositoryPassword", + }, + }, + Scope: []string{"PARAMETERS", "STAGES", "STEPS"}, + Type: "string", + Mandatory: false, + Aliases: []config.Alias{}, + Default: os.Getenv("PIPER_containerRegistryPassword"), + }, + { + Name: "containerRegistryUser", + ResourceRef: []config.ResourceReference{ + { + Name: "commonPipelineEnvironment", + Param: "container/repositoryUsername", + }, + + { + Name: "commonPipelineEnvironment", + Param: "custom/repositoryUsername", + }, + }, + Scope: []string{"PARAMETERS", "STAGES", "STEPS"}, + Type: "string", + Mandatory: false, + Aliases: []config.Alias{}, + Default: os.Getenv("PIPER_containerRegistryUser"), + }, { Name: "dockerConfigJSON", ResourceRef: []config.ResourceReference{ diff --git a/cmd/protecodeExecuteScan_test.go b/cmd/protecodeExecuteScan_test.go index 61570920a4..a960313855 100644 --- a/cmd/protecodeExecuteScan_test.go +++ b/cmd/protecodeExecuteScan_test.go @@ -348,6 +348,11 @@ func TestExecuteProtecodeScan(t *testing.T) { } func TestCorrectDockerConfigEnvVar(t *testing.T) { + utils := protecodeTestUtilsBundle{ + FilesMock: &mock.FilesMock{}, + DownloadMock: &mock.DownloadMock{}, + } + t.Run("with credentials", func(t *testing.T) { // init testDirectory := t.TempDir() @@ -366,7 +371,7 @@ func TestCorrectDockerConfigEnvVar(t *testing.T) { resetValue := os.Getenv("DOCKER_CONFIG") defer os.Setenv("DOCKER_CONFIG", resetValue) // test - correctDockerConfigEnvVar(&protecodeExecuteScanOptions{DockerConfigJSON: dockerConfigFile}) + correctDockerConfigEnvVar(&protecodeExecuteScanOptions{DockerConfigJSON: dockerConfigFile}, utils) // assert absolutePath, _ := filepath.Abs(dockerConfigDir) assert.Equal(t, absolutePath, os.Getenv("DOCKER_CONFIG")) @@ -376,7 +381,7 @@ func TestCorrectDockerConfigEnvVar(t *testing.T) { resetValue := os.Getenv("DOCKER_CONFIG") defer os.Setenv("DOCKER_CONFIG", resetValue) // test - correctDockerConfigEnvVar(&protecodeExecuteScanOptions{}) + correctDockerConfigEnvVar(&protecodeExecuteScanOptions{}, utils) // assert assert.Equal(t, resetValue, os.Getenv("DOCKER_CONFIG")) }) diff --git a/resources/metadata/protecodeExecuteScan.yaml b/resources/metadata/protecodeExecuteScan.yaml index d3cbd52599..3c13c97ed0 100644 --- a/resources/metadata/protecodeExecuteScan.yaml +++ b/resources/metadata/protecodeExecuteScan.yaml @@ -67,6 +67,32 @@ spec: - PARAMETERS - STAGES - STEPS + - name: containerRegistryPassword + description: "For `buildTool: docker`: Password for container registry access - typically provided by the CI/CD environment." + type: string + scope: + - PARAMETERS + - STAGES + - STEPS + secret: true + resourceRef: + - name: commonPipelineEnvironment + param: container/repositoryPassword + - name: commonPipelineEnvironment + param: custom/repositoryPassword + - name: containerRegistryUser + description: "For `buildTool: docker`: Username for container registry access - typically provided by the CI/CD environment." + type: string + scope: + - PARAMETERS + - STAGES + - STEPS + secret: true + resourceRef: + - name: commonPipelineEnvironment + param: container/repositoryUsername + - name: commonPipelineEnvironment + param: custom/repositoryUsername - name: dockerConfigJSON type: string description: Path to the file `.docker/config.json` - this is typically provided by your CI/CD system. You can find more details about the Docker credentials in the [Docker documentation](https://docs.docker.com/engine/reference/commandline/login/). From 9b60fcf5067c851e8538b2eb6ddafb57ebad80e9 Mon Sep 17 00:00:00 2001 From: sumeet patil Date: Wed, 14 Jun 2023 16:59:01 +0530 Subject: [PATCH 12/36] fix(codeqlExecuteScan): fixed logic for getting code-scanning alerts (#4393) --- cmd/codeqlExecuteScan.go | 1 + pkg/codeql/codeql.go | 83 ++++++++++++++++----------------------- pkg/codeql/codeql_test.go | 55 ++++++++++---------------- 3 files changed, 55 insertions(+), 84 deletions(-) diff --git a/cmd/codeqlExecuteScan.go b/cmd/codeqlExecuteScan.go index 5471ed9173..b82ec51b10 100644 --- a/cmd/codeqlExecuteScan.go +++ b/cmd/codeqlExecuteScan.go @@ -71,6 +71,7 @@ func execute(utils codeqlExecuteScanUtils, cmd []string, isVerbose bool) error { if isVerbose { cmd = append(cmd, "-v") } + return utils.RunExecutable("codeql", cmd...) } diff --git a/pkg/codeql/codeql.go b/pkg/codeql/codeql.go index b64bdffd5b..dbab245054 100644 --- a/pkg/codeql/codeql.go +++ b/pkg/codeql/codeql.go @@ -2,7 +2,6 @@ package codeql import ( "context" - "errors" sapgithub "github.com/SAP/jenkins-library/pkg/github" "github.com/google/go-github/v45/github" @@ -14,10 +13,11 @@ type CodeqlScanAudit interface { type githubCodeqlScanningService interface { ListAlertsForRepo(ctx context.Context, owner, repo string, opts *github.AlertListOptions) ([]*github.Alert, *github.Response, error) - ListAnalysesForRepo(ctx context.Context, owner, repo string, opts *github.AnalysesListOptions) ([]*github.ScanningAnalysis, *github.Response, error) } const auditStateOpen string = "open" +const auditStateDismissed string = "dismissed" +const codeqlToolName string = "CodeQL" const perPageCount int = 100 func NewCodeqlScanAuditInstance(serverUrl, owner, repository, token string, trustedCerts []string) CodeqlScanAuditInstance { @@ -39,66 +39,51 @@ func (codeqlScanAudit *CodeqlScanAuditInstance) GetVulnerabilities(analyzedRef s if err != nil { return CodeqlScanning{}, err } - totalAlerts, err := getTotalAlertsFromClient(ctx, client.CodeScanning, analyzedRef, codeqlScanAudit) - return getVulnerabilitiesFromClient(ctx, client.CodeScanning, analyzedRef, codeqlScanAudit, totalAlerts) + return getVulnerabilitiesFromClient(ctx, client.CodeScanning, analyzedRef, codeqlScanAudit) } -func getTotalAlertsFromClient(ctx context.Context, codeScannning githubCodeqlScanningService, analyzedRef string, codeqlScanAudit *CodeqlScanAuditInstance) (int, error) { - analysesOptions := github.AnalysesListOptions{ - Ref: &analyzedRef, - } - analyses, _, err := codeScannning.ListAnalysesForRepo(ctx, codeqlScanAudit.owner, codeqlScanAudit.repository, &analysesOptions) - if err != nil { - return 0, err - } - if len(analyses) < 1 { - return 0, errors.New("analyses for ref not found") - } - return *analyses[0].ResultsCount, nil -} +func getVulnerabilitiesFromClient(ctx context.Context, codeScanning githubCodeqlScanningService, analyzedRef string, codeqlScanAudit *CodeqlScanAuditInstance) (CodeqlScanning, error) { + page := 1 + audited := 0 + totalAlerts := 0 + + for page != 0 { + alertOptions := github.AlertListOptions{ + State: "", + Ref: analyzedRef, + ListOptions: github.ListOptions{ + Page: page, + PerPage: perPageCount, + }, + } + + alerts, response, err := codeScanning.ListAlertsForRepo(ctx, codeqlScanAudit.owner, codeqlScanAudit.repository, &alertOptions) + if err != nil { + return CodeqlScanning{}, err + } -func getVulnerabilitiesFromClient(ctx context.Context, codeScanning githubCodeqlScanningService, analyzedRef string, codeqlScanAudit *CodeqlScanAuditInstance, totalAlerts int) (CodeqlScanning, error) { - pages := totalAlerts/perPageCount + 1 - errChan := make(chan error) - openStateCountChan := make(chan int) - for page := 1; page <= pages; page++ { - go func(i int) { - alertOptions := github.AlertListOptions{ - State: "", - Ref: analyzedRef, - ListOptions: github.ListOptions{ - Page: i, - PerPage: perPageCount, - }, + page = response.NextPage + + for _, alert := range alerts { + if *alert.Tool.Name != codeqlToolName { + continue } - alerts, _, err := codeScanning.ListAlertsForRepo(ctx, codeqlScanAudit.owner, codeqlScanAudit.repository, &alertOptions) - if err != nil { - errChan <- err - return + if *alert.State == auditStateDismissed { + audited += 1 + totalAlerts += 1 } - openStateCount := 0 - for _, alert := range alerts { - if *alert.State == auditStateOpen { - openStateCount = openStateCount + 1 - } + if *alert.State == auditStateOpen { + totalAlerts += 1 } - openStateCountChan <- len(alerts) - openStateCount - }(page) + } } codeqlScanning := CodeqlScanning{} codeqlScanning.Total = totalAlerts - for i := 0; i < pages; i++ { - select { - case openStateCount := <-openStateCountChan: - codeqlScanning.Audited += openStateCount - case err := <-errChan: - return CodeqlScanning{}, err - } - } + codeqlScanning.Audited = audited return codeqlScanning, nil } diff --git a/pkg/codeql/codeql_test.go b/pkg/codeql/codeql_test.go index 11c7e7b816..07968d5856 100644 --- a/pkg/codeql/codeql_test.go +++ b/pkg/codeql/codeql_test.go @@ -17,36 +17,43 @@ type githubCodeqlScanningMock struct { func (g *githubCodeqlScanningMock) ListAlertsForRepo(ctx context.Context, owner, repo string, opts *github.AlertListOptions) ([]*github.Alert, *github.Response, error) { openState := "open" - closedState := "closed" + dismissedState := "dismissed" alerts := []*github.Alert{} + response := github.Response{} + codeqlToolName := "CodeQL" + testToolName := "Test" if repo == "testRepo1" { - alerts = append(alerts, &github.Alert{State: &openState}) - alerts = append(alerts, &github.Alert{State: &openState}) - alerts = append(alerts, &github.Alert{State: &closedState}) + alerts = append(alerts, &github.Alert{State: &openState, Tool: &github.Tool{Name: &codeqlToolName}}) + alerts = append(alerts, &github.Alert{State: &openState, Tool: &github.Tool{Name: &codeqlToolName}}) + alerts = append(alerts, &github.Alert{State: &dismissedState, Tool: &github.Tool{Name: &codeqlToolName}}) + alerts = append(alerts, &github.Alert{State: &dismissedState, Tool: &github.Tool{Name: &testToolName}}) + response.NextPage = 0 } if repo == "testRepo2" { if opts.Page == 1 { for i := 0; i < 50; i++ { - alerts = append(alerts, &github.Alert{State: &openState}) + alerts = append(alerts, &github.Alert{State: &openState, Tool: &github.Tool{Name: &codeqlToolName}}) } for i := 0; i < 50; i++ { - alerts = append(alerts, &github.Alert{State: &closedState}) + alerts = append(alerts, &github.Alert{State: &dismissedState, Tool: &github.Tool{Name: &codeqlToolName}}) } + response.NextPage = 2 } if opts.Page == 2 { for i := 0; i < 10; i++ { - alerts = append(alerts, &github.Alert{State: &openState}) + alerts = append(alerts, &github.Alert{State: &openState, Tool: &github.Tool{Name: &codeqlToolName}}) } for i := 0; i < 30; i++ { - alerts = append(alerts, &github.Alert{State: &closedState}) + alerts = append(alerts, &github.Alert{State: &dismissedState, Tool: &github.Tool{Name: &codeqlToolName}}) } + response.NextPage = 0 } } - return alerts, nil, nil + return alerts, &response, nil } func (g *githubCodeqlScanningMock) ListAnalysesForRepo(ctx context.Context, owner, repo string, opts *github.AnalysesListOptions) ([]*github.ScanningAnalysis, *github.Response, error) { @@ -71,9 +78,8 @@ func TestGetVulnerabilitiesFromClient(t *testing.T) { t.Parallel() t.Run("Success", func(t *testing.T) { ghCodeqlScanningMock := githubCodeqlScanningMock{} - totalAlerts := 3 codeqlScanAuditInstance := NewCodeqlScanAuditInstance("", "", "testRepo1", "", []string{}) - codeScanning, err := getVulnerabilitiesFromClient(ctx, &ghCodeqlScanningMock, "ref", &codeqlScanAuditInstance, totalAlerts) + codeScanning, err := getVulnerabilitiesFromClient(ctx, &ghCodeqlScanningMock, "ref", &codeqlScanAuditInstance) assert.NoError(t, err) assert.Equal(t, 3, codeScanning.Total) assert.Equal(t, 1, codeScanning.Audited) @@ -81,19 +87,17 @@ func TestGetVulnerabilitiesFromClient(t *testing.T) { t.Run("Success with pagination results", func(t *testing.T) { ghCodeqlScanningMock := githubCodeqlScanningMock{} - totalAlerts := 120 codeqlScanAuditInstance := NewCodeqlScanAuditInstance("", "", "testRepo2", "", []string{}) - codeScanning, err := getVulnerabilitiesFromClient(ctx, &ghCodeqlScanningMock, "ref", &codeqlScanAuditInstance, totalAlerts) + codeScanning, err := getVulnerabilitiesFromClient(ctx, &ghCodeqlScanningMock, "ref", &codeqlScanAuditInstance) assert.NoError(t, err) - assert.Equal(t, 120, codeScanning.Total) + assert.Equal(t, 140, codeScanning.Total) assert.Equal(t, 80, codeScanning.Audited) }) t.Run("Error", func(t *testing.T) { ghCodeqlScanningErrorMock := githubCodeqlScanningErrorMock{} - totalAlerts := 3 codeqlScanAuditInstance := NewCodeqlScanAuditInstance("", "", "", "", []string{}) - _, err := getVulnerabilitiesFromClient(ctx, &ghCodeqlScanningErrorMock, "ref", &codeqlScanAuditInstance, totalAlerts) + _, err := getVulnerabilitiesFromClient(ctx, &ghCodeqlScanningErrorMock, "ref", &codeqlScanAuditInstance) assert.Error(t, err) }) } @@ -107,22 +111,3 @@ func TestGetApiUrl(t *testing.T) { assert.Equal(t, "https://github.test.org/api/v3", getApiUrl("https://github.test.org")) }) } - -func TestGetTotalAnalysesFromClient(t *testing.T) { - ctx := context.Background() - t.Parallel() - t.Run("Success", func(t *testing.T) { - ghCodeqlScanningMock := githubCodeqlScanningMock{} - codeqlScanAuditInstance := NewCodeqlScanAuditInstance("", "", "", "", []string{}) - total, err := getTotalAlertsFromClient(ctx, &ghCodeqlScanningMock, "ref", &codeqlScanAuditInstance) - assert.NoError(t, err) - assert.Equal(t, 3, total) - }) - - t.Run("Error", func(t *testing.T) { - ghCodeqlScanningErrorMock := githubCodeqlScanningErrorMock{} - codeqlScanAuditInstance := NewCodeqlScanAuditInstance("", "", "", "", []string{}) - _, err := getTotalAlertsFromClient(ctx, &ghCodeqlScanningErrorMock, "ref", &codeqlScanAuditInstance) - assert.Error(t, err) - }) -} From 799853e7910db80e082916ff22e9dc518111734a Mon Sep 17 00:00:00 2001 From: Marcus Holl Date: Thu, 15 Jun 2023 12:27:38 +0200 Subject: [PATCH 13/36] [refactor] avoid code duplication when invoking eslint (#4401) --- cmd/npmExecuteLint.go | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/cmd/npmExecuteLint.go b/cmd/npmExecuteLint.go index 237eadcf00..d84160d1bd 100644 --- a/cmd/npmExecuteLint.go +++ b/cmd/npmExecuteLint.go @@ -140,13 +140,12 @@ func runDefaultLint(npmExecutor npm.Executor, utils lintUtils, failOnError bool) // i.e., .jsx, .ts, .tsx, since we can not be sure that the provided config enables parsing of these file types. if len(eslintConfigs) > 0 { for i, config := range eslintConfigs { + lintPattern := "." dir := filepath.Dir(config) - if dir == "." { - err = execRunner.RunExecutable("npx", "eslint", ".", "-f", "checkstyle", "-o", "./"+strconv.Itoa(i)+"_defaultlint.xml", "--ignore-pattern", "node_modules/", "--ignore-pattern", ".eslintrc.js") - } else { - lintPattern := dir + "/**/*.js" - err = execRunner.RunExecutable("npx", "eslint", lintPattern, "-f", "checkstyle", "-o", "./"+strconv.Itoa(i)+"_defaultlint.xml", "--ignore-pattern", "node_modules/", "--ignore-pattern", ".eslintrc.js") + if dir != "." { + lintPattern = dir + "/**/*.js" } + err = execRunner.RunExecutable("npx", "eslint", lintPattern, "-f", "checkstyle", "-o", "./"+strconv.Itoa(i)+"_defaultlint.xml", "--ignore-pattern", "node_modules/", "--ignore-pattern", ".eslintrc.js") if err != nil { if failOnError { return fmt.Errorf("Lint execution failed. This might be the result of severe linting findings, problems with the provided ESLint configuration (%s), or another issue. Please examine the linting results in the UI or in %s, if available, or the log above. ", config, strconv.Itoa(i)+"_defaultlint.xml") From 8b36ae70e7704db73ad75800865c8037c87a15e7 Mon Sep 17 00:00:00 2001 From: Marcus Holl Date: Mon, 19 Jun 2023 12:04:37 +0200 Subject: [PATCH 14/36] Adjust npmExecuteLint (output-format, print output to console) (#4407) * Adjust npmExecuteLint (output-format, print output to console) Co-authored-by: Srinikitha Kondreddy --- cmd/npmExecuteLint.go | 37 ++++- cmd/npmExecuteLint_generated.go | 22 +++ cmd/npmExecuteLint_test.go | 186 ++++++++++++++++++++++++- resources/metadata/npmExecuteLint.yaml | 24 ++++ 4 files changed, 258 insertions(+), 11 deletions(-) diff --git a/cmd/npmExecuteLint.go b/cmd/npmExecuteLint.go index d84160d1bd..ee9e3da7b8 100644 --- a/cmd/npmExecuteLint.go +++ b/cmd/npmExecuteLint.go @@ -106,7 +106,8 @@ func runNpmExecuteLint(npmExecutor npm.Executor, utils lintUtils, config *npmExe } } - err := runDefaultLint(npmExecutor, utils, config.FailOnError) + err := runDefaultLint(npmExecutor, utils, config.FailOnError, config.OutputFormat, config.OutputFileName) + if err != nil { return err } @@ -127,7 +128,7 @@ func runLintScript(npmExecutor npm.Executor, runScript string, failOnError bool) return nil } -func runDefaultLint(npmExecutor npm.Executor, utils lintUtils, failOnError bool) error { +func runDefaultLint(npmExecutor npm.Executor, utils lintUtils, failOnError bool, outputFormat string, outputFileName string) error { execRunner := utils.getExecRunner() eslintConfigs := findEslintConfigs(utils) @@ -145,7 +146,16 @@ func runDefaultLint(npmExecutor npm.Executor, utils lintUtils, failOnError bool) if dir != "." { lintPattern = dir + "/**/*.js" } - err = execRunner.RunExecutable("npx", "eslint", lintPattern, "-f", "checkstyle", "-o", "./"+strconv.Itoa(i)+"_defaultlint.xml", "--ignore-pattern", "node_modules/", "--ignore-pattern", ".eslintrc.js") + + args := prepareArgs([]string{ + "eslint", + lintPattern, + "-f", outputFormat, + "--ignore-pattern", "node_modules/", + "--ignore-pattern", ".eslintrc.js", + }, fmt.Sprintf("./%s_%%s", strconv.Itoa(i)), outputFileName) + + err = execRunner.RunExecutable("npx", args...) if err != nil { if failOnError { return fmt.Errorf("Lint execution failed. This might be the result of severe linting findings, problems with the provided ESLint configuration (%s), or another issue. Please examine the linting results in the UI or in %s, if available, or the log above. ", config, strconv.Itoa(i)+"_defaultlint.xml") @@ -160,7 +170,18 @@ func runDefaultLint(npmExecutor npm.Executor, utils lintUtils, failOnError bool) // Ignore possible errors when invoking ESLint to not fail the pipeline based on linting results _ = execRunner.RunExecutable("npm", "install", "eslint@^7.0.0", "typescript@^3.7.4", "@typescript-eslint/parser@^3.0.0", "@typescript-eslint/eslint-plugin@^3.0.0") - _ = execRunner.RunExecutable("npx", "--no-install", "eslint", ".", "--ext", ".js,.jsx,.ts,.tsx", "-c", ".pipeline/.eslintrc.json", "-f", "checkstyle", "-o", "./defaultlint.xml", "--ignore-pattern", ".eslintrc.js") + + args := prepareArgs([]string{ + "--no-install", + "eslint", + ".", + "--ext", ".js,.jsx,.ts,.tsx", + "-c", ".pipeline/.eslintrc.json", + "-f", outputFormat, + "--ignore-pattern", ".eslintrc.js", + }, "./%s", outputFileName) + + _ = execRunner.RunExecutable("npx", args...) } return nil } @@ -186,3 +207,11 @@ func findEslintConfigs(utils lintUtils) []string { } return eslintConfigs } + +func prepareArgs(defaultArgs []string, outputFileNamePattern, outputFileName string) []string { + if outputFileName != "" { // in this case we omit the -o flag and output will go to the log + defaultArgs = append(defaultArgs, "-o", fmt.Sprintf(outputFileNamePattern, outputFileName)) + } + return defaultArgs + +} diff --git a/cmd/npmExecuteLint_generated.go b/cmd/npmExecuteLint_generated.go index 7eb6f20d7a..d7aa4b60ba 100644 --- a/cmd/npmExecuteLint_generated.go +++ b/cmd/npmExecuteLint_generated.go @@ -20,6 +20,8 @@ type npmExecuteLintOptions struct { RunScript string `json:"runScript,omitempty"` FailOnError bool `json:"failOnError,omitempty"` DefaultNpmRegistry string `json:"defaultNpmRegistry,omitempty"` + OutputFormat string `json:"outputFormat,omitempty"` + OutputFileName string `json:"outputFileName,omitempty"` } // NpmExecuteLintCommand Execute ci-lint script on all npm packages in a project or execute default linting @@ -120,6 +122,8 @@ func addNpmExecuteLintFlags(cmd *cobra.Command, stepConfig *npmExecuteLintOption cmd.Flags().StringVar(&stepConfig.RunScript, "runScript", `ci-lint`, "List of additional run scripts to execute from package.json.") cmd.Flags().BoolVar(&stepConfig.FailOnError, "failOnError", false, "Defines the behavior in case linting errors are found.") cmd.Flags().StringVar(&stepConfig.DefaultNpmRegistry, "defaultNpmRegistry", os.Getenv("PIPER_defaultNpmRegistry"), "URL of the npm registry to use. Defaults to https://registry.npmjs.org/") + cmd.Flags().StringVar(&stepConfig.OutputFormat, "outputFormat", `checkstyle`, "eslint output format, e.g. stylish, checkstyle") + cmd.Flags().StringVar(&stepConfig.OutputFileName, "outputFileName", `defaultlint.xml`, "name of the output file. There might be a 'N_' prefix where 'N' is a number. When the empty string is provided, we will print to console") } @@ -170,6 +174,24 @@ func npmExecuteLintMetadata() config.StepData { Aliases: []config.Alias{{Name: "npm/defaultNpmRegistry"}}, Default: os.Getenv("PIPER_defaultNpmRegistry"), }, + { + Name: "outputFormat", + ResourceRef: []config.ResourceReference{}, + Scope: []string{"PARAMETERS", "GENERAL", "STAGES", "STEPS"}, + Type: "string", + Mandatory: false, + Aliases: []config.Alias{{Name: "npm/outputFormat"}}, + Default: `checkstyle`, + }, + { + Name: "outputFileName", + ResourceRef: []config.ResourceReference{}, + Scope: []string{"PARAMETERS", "GENERAL", "STAGES", "STEPS"}, + Type: "string", + Mandatory: false, + Aliases: []config.Alias{{Name: "npm/outputFormat"}}, + Default: `defaultlint.xml`, + }, }, }, Containers: []config.Container{ diff --git a/cmd/npmExecuteLint_test.go b/cmd/npmExecuteLint_test.go index 50f16120ef..8092a0b357 100644 --- a/cmd/npmExecuteLint_test.go +++ b/cmd/npmExecuteLint_test.go @@ -32,7 +32,7 @@ func newLintMockUtilsBundle() mockLintUtilsBundle { } func TestNpmExecuteLint(t *testing.T) { - defaultConfig := npmExecuteLintOptions{RunScript: "ci-lint"} + defaultConfig := npmExecuteLintOptions{RunScript: "ci-lint", OutputFormat: "checkstyle", OutputFileName: "defaultlint.xml"} t.Run("Call with ci-lint script and one package.json", func(t *testing.T) { lintUtils := newLintMockUtilsBundle() @@ -67,7 +67,42 @@ func TestNpmExecuteLint(t *testing.T) { if assert.NoError(t, err) { if assert.Equal(t, 2, len(lintUtils.execRunner.Calls)) { - assert.Equal(t, mock.ExecCall{Exec: "npx", Params: []string{"eslint", ".", "-f", "checkstyle", "-o", "./0_defaultlint.xml", "--ignore-pattern", "node_modules/", "--ignore-pattern", ".eslintrc.js"}}, lintUtils.execRunner.Calls[1]) + assert.Equal(t, mock.ExecCall{Exec: "npx", Params: []string{ + "eslint", + ".", + "-f", "checkstyle", + "--ignore-pattern", "node_modules/", + "--ignore-pattern", ".eslintrc.js", + "-o", "./0_defaultlint.xml"}}, lintUtils.execRunner.Calls[1]) + } + } + }) + + t.Run("Call default with ESLint config from user - no redirect to file, stylish format", func(t *testing.T) { + lintUtils := newLintMockUtilsBundle() + lintUtils.AddFile("package.json", []byte("{\"name\": \"Test\" }")) + lintUtils.AddFile(".eslintrc.json", []byte("{\"name\": \"Test\" }")) + + config := npmExecuteLintOptions{RunScript: "ci-lint", OutputFormat: "stylish", OutputFileName: ""} + config.DefaultNpmRegistry = "foo.bar" + + npmUtils := newNpmMockUtilsBundle() + npmUtils.execRunner = lintUtils.execRunner + npmExecutor := npm.Execute{Utils: &npmUtils, Options: npm.ExecutorOptions{}} + + err := runNpmExecuteLint(&npmExecutor, &lintUtils, &config) + + if assert.NoError(t, err) { + if assert.Equal(t, 2, len(lintUtils.execRunner.Calls)) { + assert.Equal(t, mock.ExecCall{Exec: "npx", Params: []string{ + "eslint", + ".", + "-f", "stylish", + "--ignore-pattern", + "node_modules/", + "--ignore-pattern", ".eslintrc.js", + // no -o, --output-file in this case. + }}, lintUtils.execRunner.Calls[1]) } } }) @@ -89,8 +124,61 @@ func TestNpmExecuteLint(t *testing.T) { if assert.NoError(t, err) { if assert.Equal(t, 3, len(lintUtils.execRunner.Calls)) { - assert.Equal(t, mock.ExecCall{Exec: "npx", Params: []string{"eslint", ".", "-f", "checkstyle", "-o", "./0_defaultlint.xml", "--ignore-pattern", "node_modules/", "--ignore-pattern", ".eslintrc.js"}}, lintUtils.execRunner.Calls[1]) - assert.Equal(t, mock.ExecCall{Exec: "npx", Params: []string{"eslint", "src/**/*.js", "-f", "checkstyle", "-o", "./1_defaultlint.xml", "--ignore-pattern", "node_modules/", "--ignore-pattern", ".eslintrc.js"}}, lintUtils.execRunner.Calls[2]) + assert.Equal(t, mock.ExecCall{Exec: "npx", Params: []string{ + "eslint", + ".", + "-f", "checkstyle", + "--ignore-pattern", "node_modules/", + "--ignore-pattern", ".eslintrc.js", + "-o", "./0_defaultlint.xml", + }}, lintUtils.execRunner.Calls[1]) + assert.Equal(t, mock.ExecCall{Exec: "npx", Params: []string{ + "eslint", + "src/**/*.js", + "-f", "checkstyle", + "--ignore-pattern", "node_modules/", + "--ignore-pattern", ".eslintrc.js", + "-o", "./1_defaultlint.xml", + }}, lintUtils.execRunner.Calls[2]) + } + } + }) + + t.Run("Call default with two ESLint configs from user - no redirect to file, stylish format", func(t *testing.T) { + lintUtils := newLintMockUtilsBundle() + lintUtils.AddFile("package.json", []byte("{\"name\": \"Test\" }")) + lintUtils.AddFile(".eslintrc.json", []byte("{\"name\": \"Test\" }")) + lintUtils.AddFile(filepath.Join("src", ".eslintrc.json"), []byte("{\"name\": \"Test\" }")) + + config := defaultConfig + config.DefaultNpmRegistry = "foo.bar" + config.OutputFormat = "stylish" + config.OutputFileName = "" + + npmUtils := newNpmMockUtilsBundle() + npmUtils.execRunner = lintUtils.execRunner + npmExecutor := npm.Execute{Utils: &npmUtils, Options: npm.ExecutorOptions{}} + + err := runNpmExecuteLint(&npmExecutor, &lintUtils, &config) + + if assert.NoError(t, err) { + if assert.Equal(t, 3, len(lintUtils.execRunner.Calls)) { + assert.Equal(t, mock.ExecCall{Exec: "npx", Params: []string{ + "eslint", + ".", + "-f", "stylish", + "--ignore-pattern", "node_modules/", + "--ignore-pattern", ".eslintrc.js", + // no -o --output-file in this case. + }}, lintUtils.execRunner.Calls[1]) + assert.Equal(t, mock.ExecCall{Exec: "npx", Params: []string{ + "eslint", + "src/**/*.js", + "-f", "stylish", + "--ignore-pattern", "node_modules/", + "--ignore-pattern", ".eslintrc.js", + // no -o --output-file in this case. + }}, lintUtils.execRunner.Calls[2]) } } }) @@ -111,7 +199,50 @@ func TestNpmExecuteLint(t *testing.T) { if assert.NoError(t, err) { if assert.Equal(t, 3, len(lintUtils.execRunner.Calls)) { assert.Equal(t, mock.ExecCall{Exec: "npm", Params: []string{"install", "eslint@^7.0.0", "typescript@^3.7.4", "@typescript-eslint/parser@^3.0.0", "@typescript-eslint/eslint-plugin@^3.0.0"}}, lintUtils.execRunner.Calls[1]) - assert.Equal(t, mock.ExecCall{Exec: "npx", Params: []string{"--no-install", "eslint", ".", "--ext", ".js,.jsx,.ts,.tsx", "-c", ".pipeline/.eslintrc.json", "-f", "checkstyle", "-o", "./defaultlint.xml", "--ignore-pattern", ".eslintrc.js"}}, lintUtils.execRunner.Calls[2]) + assert.Equal(t, mock.ExecCall{Exec: "npx", Params: []string{ + "--no-install", + "eslint", + ".", + "--ext", + ".js,.jsx,.ts,.tsx", + "-c", ".pipeline/.eslintrc.json", + "-f", "checkstyle", + "--ignore-pattern", ".eslintrc.js", + "-o", "./defaultlint.xml", + }}, lintUtils.execRunner.Calls[2]) + } + } + }) + + t.Run("Default without ESLint config - no redirect to file, stylish format", func(t *testing.T) { + lintUtils := newLintMockUtilsBundle() + lintUtils.AddFile("package.json", []byte("{\"name\": \"Test\" }")) + + config := defaultConfig + config.DefaultNpmRegistry = "foo.bar" + config.OutputFormat = "stylish" + config.OutputFileName = "" + + npmUtils := newNpmMockUtilsBundle() + npmUtils.execRunner = lintUtils.execRunner + npmExecutor := npm.Execute{Utils: &npmUtils, Options: npm.ExecutorOptions{}} + + err := runNpmExecuteLint(&npmExecutor, &lintUtils, &config) + + if assert.NoError(t, err) { + if assert.Equal(t, 3, len(lintUtils.execRunner.Calls)) { + assert.Equal(t, mock.ExecCall{Exec: "npm", Params: []string{"install", "eslint@^7.0.0", "typescript@^3.7.4", "@typescript-eslint/parser@^3.0.0", "@typescript-eslint/eslint-plugin@^3.0.0"}}, lintUtils.execRunner.Calls[1]) + assert.Equal(t, mock.ExecCall{Exec: "npx", Params: []string{ + "--no-install", + "eslint", + ".", + "--ext", + ".js,.jsx,.ts,.tsx", + "-c", ".pipeline/.eslintrc.json", + "-f", "stylish", + "--ignore-pattern", ".eslintrc.js", + // no -o --output-file in this case. + }}, lintUtils.execRunner.Calls[2]) } } }) @@ -143,10 +274,44 @@ func TestNpmExecuteLint(t *testing.T) { lintUtils := newLintMockUtilsBundle() lintUtils.AddFile("package.json", []byte("{\"name\": \"Test\" }")) lintUtils.AddFile(".eslintrc.json", []byte("{\"name\": \"Test\" }")) - lintUtils.execRunner = &mock.ExecMockRunner{ShouldFailOnCommand: map[string]error{"eslint . -f checkstyle -o ./0_defaultlint.xml --ignore-pattern node_modules/ --ignore-pattern .eslintrc.js": errors.New("exit 1")}} + lintUtils.execRunner = &mock.ExecMockRunner{ShouldFailOnCommand: map[string]error{ + "eslint . -f checkstyle --ignore-pattern node_modules/ --ignore-pattern .eslintrc.js -o ./0_defaultlint.xml": errors.New("exit 1")}} + + config := defaultConfig + config.FailOnError = true + config.DefaultNpmRegistry = "foo.bar" + + npmUtils := newNpmMockUtilsBundle() + npmUtils.execRunner = lintUtils.execRunner + npmExecutor := npm.Execute{Utils: &npmUtils, Options: npm.ExecutorOptions{}} + + err := runNpmExecuteLint(&npmExecutor, &lintUtils, &config) + + if assert.EqualError(t, err, "Lint execution failed. This might be the result of severe linting findings, problems with the provided ESLint configuration (.eslintrc.json), or another issue. Please examine the linting results in the UI or in 0_defaultlint.xml, if available, or the log above. ") { + if assert.Equal(t, 2, len(lintUtils.execRunner.Calls)) { + assert.Equal(t, mock.ExecCall{Exec: "npx", Params: []string{ + "eslint", + ".", + "-f", "checkstyle", + "--ignore-pattern", "node_modules/", + "--ignore-pattern", ".eslintrc.js", + "-o", "./0_defaultlint.xml", + }}, lintUtils.execRunner.Calls[1]) + } + } + }) + + t.Run("Call default with ESLint config from user and failOnError - no redirect to file, stylish format", func(t *testing.T) { + lintUtils := newLintMockUtilsBundle() + lintUtils.AddFile("package.json", []byte("{\"name\": \"Test\" }")) + lintUtils.AddFile(".eslintrc.json", []byte("{\"name\": \"Test\" }")) + lintUtils.execRunner = &mock.ExecMockRunner{ShouldFailOnCommand: map[string]error{ + "eslint . -f stylish --ignore-pattern node_modules/ --ignore-pattern .eslintrc.js": errors.New("exit 1")}} config := defaultConfig config.FailOnError = true + config.OutputFormat = "stylish" + config.OutputFileName = "" config.DefaultNpmRegistry = "foo.bar" npmUtils := newNpmMockUtilsBundle() @@ -157,7 +322,14 @@ func TestNpmExecuteLint(t *testing.T) { if assert.EqualError(t, err, "Lint execution failed. This might be the result of severe linting findings, problems with the provided ESLint configuration (.eslintrc.json), or another issue. Please examine the linting results in the UI or in 0_defaultlint.xml, if available, or the log above. ") { if assert.Equal(t, 2, len(lintUtils.execRunner.Calls)) { - assert.Equal(t, mock.ExecCall{Exec: "npx", Params: []string{"eslint", ".", "-f", "checkstyle", "-o", "./0_defaultlint.xml", "--ignore-pattern", "node_modules/", "--ignore-pattern", ".eslintrc.js"}}, lintUtils.execRunner.Calls[1]) + assert.Equal(t, mock.ExecCall{Exec: "npx", Params: []string{ + "eslint", + ".", + "-f", "stylish", + "--ignore-pattern", "node_modules/", + "--ignore-pattern", ".eslintrc.js", + // no -o, --output-file in this case. + }}, lintUtils.execRunner.Calls[1]) } } }) diff --git a/resources/metadata/npmExecuteLint.yaml b/resources/metadata/npmExecuteLint.yaml index aa1822271f..960890f2d5 100644 --- a/resources/metadata/npmExecuteLint.yaml +++ b/resources/metadata/npmExecuteLint.yaml @@ -45,6 +45,30 @@ spec: mandatory: false aliases: - name: npm/defaultNpmRegistry + - name: outputFormat + type: string + description: eslint output format, e.g. stylish, checkstyle + scope: + - PARAMETERS + - GENERAL + - STAGES + - STEPS + mandatory: false + default: checkstyle + aliases: + - name: npm/outputFormat + - name: outputFileName + type: string + description: name of the output file. There might be a 'N_' prefix where 'N' is a number. When the empty string is provided, we will print to console + scope: + - PARAMETERS + - GENERAL + - STAGES + - STEPS + mandatory: false + default: defaultlint.xml + aliases: + - name: npm/outputFormat containers: - name: node image: node:lts-buster From 6eb4c2e72d45ef3a6fe2a3f394e6bd927d1b83b7 Mon Sep 17 00:00:00 2001 From: Daria Kuznetsova Date: Tue, 20 Jun 2023 14:50:28 +0200 Subject: [PATCH 15/36] fix(codeqlExecuteScan): added waiting for the SARIF file upload (#4409) * added waiting for the sarif file uploaded & tests * increased polling time, added timeout for waiting response from server & tests * fixed handling error while waiting sarif uploaded * added params for checking sarif uploaded & refactor * added test logs * fixed logs and test * added returning missed error * changed params descriptions and server response error processing processing * fixed retrying logic * increased polling timeout params & refactored --- cmd/codeqlExecuteScan.go | 57 ++++++++++++-- cmd/codeqlExecuteScan_generated.go | 22 ++++++ cmd/codeqlExecuteScan_test.go | 93 +++++++++++++++++++++-- pkg/codeql/codeql_test.go | 10 --- pkg/codeql/sarif_upload.go | 68 +++++++++++++++++ resources/metadata/codeqlExecuteScan.yaml | 16 ++++ 6 files changed, 244 insertions(+), 22 deletions(-) create mode 100644 pkg/codeql/sarif_upload.go diff --git a/cmd/codeqlExecuteScan.go b/cmd/codeqlExecuteScan.go index b82ec51b10..271b8d65a7 100644 --- a/cmd/codeqlExecuteScan.go +++ b/cmd/codeqlExecuteScan.go @@ -1,11 +1,13 @@ package cmd import ( + "bytes" "fmt" "os" "path/filepath" "regexp" "strings" + "time" "github.com/SAP/jenkins-library/pkg/codeql" "github.com/SAP/jenkins-library/pkg/command" @@ -36,6 +38,9 @@ type codeqlExecuteScanUtilsBundle struct { *piperutils.Files } +const sarifUploadComplete = "complete" +const sarifUploadFailed = "failed" + func newCodeqlExecuteScanUtils() codeqlExecuteScanUtils { utils := codeqlExecuteScanUtilsBundle{ Command: &command.Command{}, @@ -160,7 +165,7 @@ func getToken(config *codeqlExecuteScanOptions) (bool, string) { return false, "" } -func uploadResults(config *codeqlExecuteScanOptions, repoInfo RepoInfo, token string, utils codeqlExecuteScanUtils) error { +func uploadResults(config *codeqlExecuteScanOptions, repoInfo RepoInfo, token string, utils codeqlExecuteScanUtils) (string, error) { cmd := []string{"github", "upload-results", "--sarif=" + filepath.Join(config.ModulePath, "target", "codeqlReport.sarif")} if config.GithubToken != "" { @@ -185,13 +190,49 @@ func uploadResults(config *codeqlExecuteScanOptions, repoInfo RepoInfo, token st //if no git pramas are passed(commitId, reference, serverUrl, repository), then codeql tries to auto populate it based on git information of the checkout repository. //It also depends on the orchestrator. Some orchestrator keep git information and some not. + + var buffer bytes.Buffer + utils.Stdout(&buffer) err := execute(utils, cmd, GeneralConfig.Verbose) if err != nil { log.Entry().Error("failed to upload sarif results") - return err + return "", err } + utils.Stdout(log.Writer()) - return nil + url := buffer.String() + return strings.TrimSpace(url), nil +} + +func waitSarifUploaded(config *codeqlExecuteScanOptions, codeqlSarifUploader codeql.CodeqlSarifUploader) error { + maxRetries := config.SarifCheckMaxRetries + retryInterval := time.Duration(config.SarifCheckRetryInterval) * time.Second + + log.Entry().Info("waiting for the SARIF to upload") + i := 1 + for { + sarifStatus, err := codeqlSarifUploader.GetSarifStatus() + if err != nil { + return err + } + log.Entry().Infof("the SARIF processing status: %s", sarifStatus.ProcessingStatus) + if sarifStatus.ProcessingStatus == sarifUploadComplete { + return nil + } + if sarifStatus.ProcessingStatus == sarifUploadFailed { + for e := range sarifStatus.Errors { + log.Entry().Error(e) + } + return errors.New("failed to upload sarif file") + } + if i <= maxRetries { + log.Entry().Infof("still waiting for the SARIF to upload: retrying in %d seconds... (retry %d/%d)", config.SarifCheckRetryInterval, i, maxRetries) + time.Sleep(retryInterval) + i++ + continue + } + return errors.New("failed to check sarif uploading status: max retries reached") + } } func runCodeqlExecuteScan(config *codeqlExecuteScanOptions, telemetryData *telemetry.CustomData, utils codeqlExecuteScanUtils) ([]piperutils.Path, error) { @@ -275,11 +316,15 @@ func runCodeqlExecuteScan(config *codeqlExecuteScanOptions, telemetryData *telem return reports, errors.New("failed running upload-results as githubToken was not specified") } - err = uploadResults(config, repoInfo, token, utils) + sarifUrl, err := uploadResults(config, repoInfo, token, utils) if err != nil { - return reports, err } + codeqlSarifUploader := codeql.NewCodeqlSarifUploaderInstance(sarifUrl, token) + err = waitSarifUploaded(config, &codeqlSarifUploader) + if err != nil { + return reports, errors.Wrap(err, "failed to upload sarif") + } if config.CheckForCompliance { codeqlScanAuditInstance := codeql.NewCodeqlScanAuditInstance(repoInfo.serverUrl, repoInfo.owner, repoInfo.repo, token, []string{}) @@ -294,7 +339,7 @@ func runCodeqlExecuteScan(config *codeqlExecuteScanOptions, telemetryData *telem return reports, errors.Wrap(err, "failed to write json compliance report") } - unaudited := (scanResults.Total - scanResults.Audited) + unaudited := scanResults.Total - scanResults.Audited if unaudited > config.VulnerabilityThresholdTotal { msg := fmt.Sprintf("Your repository %v with ref %v is not compliant. Total unaudited issues are %v which is greater than the VulnerabilityThresholdTotal count %v", repoUrl, repoInfo.ref, unaudited, config.VulnerabilityThresholdTotal) return reports, errors.Errorf(msg) diff --git a/cmd/codeqlExecuteScan_generated.go b/cmd/codeqlExecuteScan_generated.go index 650d42cb3e..618c416c49 100644 --- a/cmd/codeqlExecuteScan_generated.go +++ b/cmd/codeqlExecuteScan_generated.go @@ -28,6 +28,8 @@ type codeqlExecuteScanOptions struct { Database string `json:"database,omitempty"` QuerySuite string `json:"querySuite,omitempty"` UploadResults bool `json:"uploadResults,omitempty"` + SarifCheckMaxRetries int `json:"sarifCheckMaxRetries,omitempty"` + SarifCheckRetryInterval int `json:"sarifCheckRetryInterval,omitempty"` Threads string `json:"threads,omitempty"` Ram string `json:"ram,omitempty"` AnalyzedRef string `json:"analyzedRef,omitempty"` @@ -183,6 +185,8 @@ func addCodeqlExecuteScanFlags(cmd *cobra.Command, stepConfig *codeqlExecuteScan cmd.Flags().StringVar(&stepConfig.Database, "database", `codeqlDB`, "Path to the CodeQL database to create. This directory will be created, and must not already exist.") cmd.Flags().StringVar(&stepConfig.QuerySuite, "querySuite", os.Getenv("PIPER_querySuite"), "The name of a CodeQL query suite. If omitted, the default query suite for the language of the database being analyzed will be used.") cmd.Flags().BoolVar(&stepConfig.UploadResults, "uploadResults", false, "Allows you to upload codeql SARIF results to your github project. You will need to set githubToken for this.") + cmd.Flags().IntVar(&stepConfig.SarifCheckMaxRetries, "sarifCheckMaxRetries", 10, "Maximum number of retries when waiting for the server to finish processing the SARIF upload. Only relevant, if checkForCompliance is enabled.") + cmd.Flags().IntVar(&stepConfig.SarifCheckRetryInterval, "sarifCheckRetryInterval", 30, "") cmd.Flags().StringVar(&stepConfig.Threads, "threads", `0`, "Use this many threads for the codeql operations.") cmd.Flags().StringVar(&stepConfig.Ram, "ram", os.Getenv("PIPER_ram"), "Use this much ram (MB) for the codeql operations.") cmd.Flags().StringVar(&stepConfig.AnalyzedRef, "analyzedRef", os.Getenv("PIPER_analyzedRef"), "Name of the ref that was analyzed.") @@ -296,6 +300,24 @@ func codeqlExecuteScanMetadata() config.StepData { Aliases: []config.Alias{}, Default: false, }, + { + Name: "sarifCheckMaxRetries", + ResourceRef: []config.ResourceReference{}, + Scope: []string{"PARAMETERS", "STAGES", "STEPS"}, + Type: "int", + Mandatory: false, + Aliases: []config.Alias{}, + Default: 10, + }, + { + Name: "sarifCheckRetryInterval", + ResourceRef: []config.ResourceReference{}, + Scope: []string{"PARAMETERS", "STAGES", "STEPS"}, + Type: "int", + Mandatory: false, + Aliases: []config.Alias{}, + Default: 30, + }, { Name: "threads", ResourceRef: []config.ResourceReference{}, diff --git a/cmd/codeqlExecuteScan_test.go b/cmd/codeqlExecuteScan_test.go index a19e53243b..b0784a5158 100644 --- a/cmd/codeqlExecuteScan_test.go +++ b/cmd/codeqlExecuteScan_test.go @@ -6,9 +6,12 @@ package cmd import ( "fmt" "testing" + "time" + "github.com/SAP/jenkins-library/pkg/codeql" "github.com/SAP/jenkins-library/pkg/mock" "github.com/SAP/jenkins-library/pkg/orchestrator" + "github.com/pkg/errors" "github.com/stretchr/testify/assert" ) @@ -45,12 +48,6 @@ func TestRunCodeqlExecuteScan(t *testing.T) { assert.Error(t, err) }) - t.Run("Check for compliace fails as repository not specified", func(t *testing.T) { - config := codeqlExecuteScanOptions{BuildTool: "maven", ModulePath: "./", UploadResults: true, GithubToken: "test", CheckForCompliance: true} - _, err := runCodeqlExecuteScan(&config, nil, newCodeqlExecuteScanTestsUtils()) - assert.Error(t, err) - }) - t.Run("Custom buildtool", func(t *testing.T) { config := codeqlExecuteScanOptions{BuildTool: "custom", Language: "javascript", ModulePath: "./"} _, err := runCodeqlExecuteScan(&config, nil, newCodeqlExecuteScanTestsUtils()) @@ -339,3 +336,87 @@ func TestCreateToolRecordCodeql(t *testing.T) { assert.Error(t, err) }) } + +func TestWaitSarifUploaded(t *testing.T) { + t.Parallel() + config := codeqlExecuteScanOptions{SarifCheckRetryInterval: 1, SarifCheckMaxRetries: 5} + t.Run("Fast complete upload", func(t *testing.T) { + codeqlScanAuditMock := CodeqlSarifUploaderMock{counter: 0} + timerStart := time.Now() + err := waitSarifUploaded(&config, &codeqlScanAuditMock) + assert.Less(t, time.Now().Sub(timerStart), time.Second) + assert.NoError(t, err) + }) + t.Run("Long completed upload", func(t *testing.T) { + codeqlScanAuditMock := CodeqlSarifUploaderMock{counter: 2} + timerStart := time.Now() + err := waitSarifUploaded(&config, &codeqlScanAuditMock) + assert.GreaterOrEqual(t, time.Now().Sub(timerStart), time.Second*2) + assert.NoError(t, err) + }) + t.Run("Failed upload", func(t *testing.T) { + codeqlScanAuditMock := CodeqlSarifUploaderMock{counter: -1} + err := waitSarifUploaded(&config, &codeqlScanAuditMock) + assert.Error(t, err) + assert.ErrorContains(t, err, "failed to upload sarif file") + }) + t.Run("Error while checking sarif uploading", func(t *testing.T) { + codeqlScanAuditErrorMock := CodeqlSarifUploaderErrorMock{counter: -1} + err := waitSarifUploaded(&config, &codeqlScanAuditErrorMock) + assert.Error(t, err) + assert.ErrorContains(t, err, "test error") + }) + t.Run("Completed upload after getting errors from server", func(t *testing.T) { + codeqlScanAuditErrorMock := CodeqlSarifUploaderErrorMock{counter: 3} + err := waitSarifUploaded(&config, &codeqlScanAuditErrorMock) + assert.NoError(t, err) + }) + t.Run("Max retries reached", func(t *testing.T) { + codeqlScanAuditErrorMock := CodeqlSarifUploaderErrorMock{counter: 6} + err := waitSarifUploaded(&config, &codeqlScanAuditErrorMock) + assert.Error(t, err) + assert.ErrorContains(t, err, "max retries reached") + }) +} + +type CodeqlSarifUploaderMock struct { + counter int +} + +func (c *CodeqlSarifUploaderMock) GetSarifStatus() (codeql.SarifFileInfo, error) { + if c.counter == 0 { + return codeql.SarifFileInfo{ + ProcessingStatus: "complete", + Errors: nil, + }, nil + } + if c.counter == -1 { + return codeql.SarifFileInfo{ + ProcessingStatus: "failed", + Errors: []string{"upload error"}, + }, nil + } + c.counter-- + return codeql.SarifFileInfo{ + ProcessingStatus: "pending", + Errors: nil, + }, nil +} + +type CodeqlSarifUploaderErrorMock struct { + counter int +} + +func (c *CodeqlSarifUploaderErrorMock) GetSarifStatus() (codeql.SarifFileInfo, error) { + if c.counter == -1 { + return codeql.SarifFileInfo{}, errors.New("test error") + } + if c.counter == 0 { + return codeql.SarifFileInfo{ + ProcessingStatus: "complete", + Errors: nil, + }, nil + } + c.counter-- + return codeql.SarifFileInfo{ProcessingStatus: "Service unavailable"}, nil +} diff --git a/pkg/codeql/codeql_test.go b/pkg/codeql/codeql_test.go index 07968d5856..ee548973dd 100644 --- a/pkg/codeql/codeql_test.go +++ b/pkg/codeql/codeql_test.go @@ -56,12 +56,6 @@ func (g *githubCodeqlScanningMock) ListAlertsForRepo(ctx context.Context, owner, return alerts, &response, nil } -func (g *githubCodeqlScanningMock) ListAnalysesForRepo(ctx context.Context, owner, repo string, opts *github.AnalysesListOptions) ([]*github.ScanningAnalysis, *github.Response, error) { - resultsCount := 3 - analysis := []*github.ScanningAnalysis{{ResultsCount: &resultsCount}} - return analysis, nil, nil -} - type githubCodeqlScanningErrorMock struct { } @@ -69,10 +63,6 @@ func (g *githubCodeqlScanningErrorMock) ListAlertsForRepo(ctx context.Context, o return []*github.Alert{}, nil, errors.New("Some error") } -func (g *githubCodeqlScanningErrorMock) ListAnalysesForRepo(ctx context.Context, owner, repo string, opts *github.AnalysesListOptions) ([]*github.ScanningAnalysis, *github.Response, error) { - return []*github.ScanningAnalysis{}, nil, errors.New("Some error") -} - func TestGetVulnerabilitiesFromClient(t *testing.T) { ctx := context.Background() t.Parallel() diff --git a/pkg/codeql/sarif_upload.go b/pkg/codeql/sarif_upload.go new file mode 100644 index 0000000000..3b241b7f38 --- /dev/null +++ b/pkg/codeql/sarif_upload.go @@ -0,0 +1,68 @@ +package codeql + +import ( + "encoding/json" + "io" + "net/http" +) + +type CodeqlSarifUploader interface { + GetSarifStatus() (SarifFileInfo, error) +} + +func NewCodeqlSarifUploaderInstance(url, token string) CodeqlSarifUploaderInstance { + return CodeqlSarifUploaderInstance{ + url: url, + token: token, + } +} + +type CodeqlSarifUploaderInstance struct { + url string + token string +} + +func (codeqlSarifUploader *CodeqlSarifUploaderInstance) GetSarifStatus() (SarifFileInfo, error) { + return getSarifUploadingStatus(codeqlSarifUploader.url, codeqlSarifUploader.token) +} + +type SarifFileInfo struct { + ProcessingStatus string `json:"processing_status"` + Errors []string `json:"errors"` +} + +const internalServerError = "Internal server error" + +func getSarifUploadingStatus(sarifURL, token string) (SarifFileInfo, error) { + client := http.Client{} + req, err := http.NewRequest("GET", sarifURL, nil) + if err != nil { + return SarifFileInfo{}, err + } + req.Header.Add("Authorization", "Bearer "+token) + req.Header.Add("Accept", "application/vnd.github+json") + req.Header.Add("X-GitHub-Api-Version", "2022-11-28") + + resp, err := client.Do(req) + if err != nil { + return SarifFileInfo{}, err + } + defer resp.Body.Close() + + if resp.StatusCode == http.StatusServiceUnavailable || resp.StatusCode == http.StatusBadGateway || + resp.StatusCode == http.StatusGatewayTimeout { + return SarifFileInfo{ProcessingStatus: internalServerError}, nil + } + + body, err := io.ReadAll(resp.Body) + if err != nil { + return SarifFileInfo{}, err + } + + sarifInfo := SarifFileInfo{} + err = json.Unmarshal(body, &sarifInfo) + if err != nil { + return SarifFileInfo{}, err + } + return sarifInfo, nil +} diff --git a/resources/metadata/codeqlExecuteScan.yaml b/resources/metadata/codeqlExecuteScan.yaml index c6fddd78dd..ad593ada09 100644 --- a/resources/metadata/codeqlExecuteScan.yaml +++ b/resources/metadata/codeqlExecuteScan.yaml @@ -104,6 +104,22 @@ spec: - STAGES - STEPS default: false + - name: sarifCheckMaxRetries + type: int + description: "Maximum number of retries when waiting for the server to finish processing the SARIF upload. Only relevant, if checkForCompliance is enabled." + scope: + - PARAMETERS + - STAGES + - STEPS + default: 10 + - name: sarifCheckRetryInterval + type: int + descriptoin: "Interval in seconds between retries when waiting for the server to finish processing the SARIF upload. Only relevant, if checkForCompliance is enabled." + scope: + - PARAMETERS + - STAGES + - STEPS + default: 30 - name: threads type: string description: "Use this many threads for the codeql operations." From f81b47372393cb8005f61f0c6350f0c40882df04 Mon Sep 17 00:00:00 2001 From: Dmitrii Pavlukhin Date: Wed, 21 Jun 2023 09:18:00 +0300 Subject: [PATCH 16/36] fix(detectExecuteScan): Fix license incorrect fail with FailOn parameter (#4415) Co-authored-by: Andrei Kireev --- cmd/detectExecuteScan.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cmd/detectExecuteScan.go b/cmd/detectExecuteScan.go index 5c8610db33..845eb2b6ee 100644 --- a/cmd/detectExecuteScan.go +++ b/cmd/detectExecuteScan.go @@ -194,7 +194,7 @@ func runDetect(ctx context.Context, config detectExecuteScanOptions, utils detec if strings.Contains(reportingErr.Error(), "License Policy Violations found") { log.Entry().Errorf("License Policy Violations found") log.SetErrorCategory(log.ErrorCompliance) - if err == nil && !piperutils.ContainsStringPart(config.FailOn, "NONE") { + if err == nil && piperutils.ContainsStringPart(config.FailOn, "CRITICAL") { err = errors.New("License Policy Violations found") } } else { From ae4550d0dd3c20f11838f7c2290f2cb1f62c51fd Mon Sep 17 00:00:00 2001 From: Andrei Kireev Date: Fri, 23 Jun 2023 15:24:03 +0300 Subject: [PATCH 17/36] fix(detectExecuteScan) Fixed problem with duplication of maven command (#4412) * Fixed problem with duplication of maven command * go fmt --- cmd/detectExecuteScan.go | 2 +- cmd/detectExecuteScan_test.go | 40 +++++++++++++++++++++++++++++++++++ 2 files changed, 41 insertions(+), 1 deletion(-) diff --git a/cmd/detectExecuteScan.go b/cmd/detectExecuteScan.go index 845eb2b6ee..e80e724918 100644 --- a/cmd/detectExecuteScan.go +++ b/cmd/detectExecuteScan.go @@ -412,7 +412,7 @@ func addDetectArgs(args []string, config detectExecuteScanOptions, utils detectU mavenArgs = append(mavenArgs, fmt.Sprintf("-Dmaven.repo.local=%v", absolutePath)) } - if len(mavenArgs) > 0 { + if len(mavenArgs) > 0 && !checkIfArgumentIsInScanProperties(config, "detect.maven.build.command") { args = append(args, fmt.Sprintf("\"--detect.maven.build.command='%v'\"", strings.Join(mavenArgs, " "))) } diff --git a/cmd/detectExecuteScan_test.go b/cmd/detectExecuteScan_test.go index f88fc805d3..fe24e60dd2 100644 --- a/cmd/detectExecuteScan_test.go +++ b/cmd/detectExecuteScan_test.go @@ -748,6 +748,46 @@ func TestAddDetectArgs(t *testing.T) { "--detect.output.path='report'", }, }, + { + args: []string{"--testProp1=1"}, + options: detectExecuteScanOptions{ + ServerURL: "https://server.url", + BuildTool: "maven", + Token: "apiToken", + ProjectName: "Rapid_scan_on_PRs", + Version: "2.0", + VersioningModel: "major-minor", + CodeLocation: "", + ScanPaths: []string{"path1", "path2"}, + M2Path: "./m2", + GlobalSettingsFile: "pipeline/settings.xml", + ScanProperties: []string{ + "--detect.maven.build.command= --settings .pipeline/settings.xml -DskipTests install", + }, + MinScanInterval: 4, + CustomScanVersion: "2.0", + }, + isPullRequest: true, + expected: []string{ + "--testProp1=1", + "--detect.blackduck.signature.scanner.arguments='--min-scan-interval=4'", + "--detect.maven.build.command=", + "--settings", + ".pipeline/settings.xml", + "-DskipTests", + "install", + "--blackduck.url=https://server.url", + "--blackduck.api.token=apiToken", + "\"--detect.project.name='Rapid_scan_on_PRs'\"", + "\"--detect.project.version.name='2.0'\"", + "\"--detect.code.location.name='Rapid_scan_on_PRs/2.0'\"", + "--detect.blackduck.signature.scanner.paths=path1,path2", + "--detect.source.path='.'", + "--detect.blackduck.scan.mode='RAPID'", + "--detect.cleanup=false", + "--detect.output.path='report'", + }, + }, } for k, v := range testData { From a9bab48557ed611bd83347055f678dd036ea5c17 Mon Sep 17 00:00:00 2001 From: Anil Keshav Date: Mon, 26 Jun 2023 08:47:11 +0200 Subject: [PATCH 18/36] feat (url-logger) Implement logic for a selection classifier (#4411) * forcing the urls finder to relaxed * adding a classifier map * passing the stepName to the kaniko command executor bundle * pass stepName to maven utils for mavenBuild * improve enabling of Maven access log generation * Revert "improve enabling of Maven access log generation" This reverts commit 80b77223cdc674e843b3df3f710e3536153a79a9. * Revert "pass stepName to maven utils for mavenBuild" This reverts commit a4f99ae16048a693f3e4ad4c043a58a49bf33de3. * use reflection to update command stepName for mavenBuild * Revert "use reflection to update command stepName for mavenBuild" This reverts commit ef85c78669b65d608e723f18b453607f03fc2c77. --------- Co-authored-by: I557621 Co-authored-by: Jordi van Liempt <35920075+jliempt@users.noreply.github.com> --- cmd/kanikoExecute.go | 1 + pkg/log/url.go | 26 ++++++++++++++++++++++---- 2 files changed, 23 insertions(+), 4 deletions(-) diff --git a/cmd/kanikoExecute.go b/cmd/kanikoExecute.go index 26bf039e5b..03c5ace1d2 100644 --- a/cmd/kanikoExecute.go +++ b/cmd/kanikoExecute.go @@ -25,6 +25,7 @@ func kanikoExecute(config kanikoExecuteOptions, telemetryData *telemetry.CustomD "unsupported status code 401", }, }, + StepName: "kanikoExecute", } // reroute command output to logging framework diff --git a/pkg/log/url.go b/pkg/log/url.go index ace09cbbe0..3cb58e2d79 100644 --- a/pkg/log/url.go +++ b/pkg/log/url.go @@ -5,9 +5,10 @@ import ( "encoding/json" "fmt" "io/ioutil" - "mvdan.cc/xurls/v2" "os" "sync" + + "mvdan.cc/xurls/v2" ) type ( @@ -89,9 +90,26 @@ func (cl *URLLogger) WriteURLsLogToJSON() error { func (cl *URLLogger) Parse(buf bytes.Buffer) { cl.buf.Lock() defer cl.buf.Unlock() - cl.buf.data = append(cl.buf.data, parseURLs(buf.Bytes())...) + classifier := returnURLStrictClassifier(cl.stepName) + cl.buf.data = append(cl.buf.data, parseURLs(buf.Bytes(), classifier)...) +} + +func parseURLs(src []byte, classifier string) [][]byte { + if classifier == "Strict" { + return xurls.Strict().FindAll(src, -1) + } else { + return xurls.Relaxed().FindAll(src, -1) + } } -func parseURLs(src []byte) [][]byte { - return xurls.Strict().FindAll(src, -1) +func returnURLStrictClassifier(stepName string) string { + + switch stepName { + // golang cli output urls without the http protocol hence making the search less strict + //ToDo: other cases where the url is without protocol + case "golangBuild": + return "Relaxed" + default: + return "Strict" + } } From 244beccb5951ec8c19bb5656299bd9a4a5e960fa Mon Sep 17 00:00:00 2001 From: Marcus Holl Date: Mon, 26 Jun 2023 09:56:18 +0200 Subject: [PATCH 19/36] refactor(dockerExecuteOnKubernetes ): dedicated method for invalidating stashes (#4284) Co-authored-by: Christopher Fenner <26137398+CCFenner@users.noreply.github.com> --- vars/dockerExecuteOnKubernetes.groovy | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/vars/dockerExecuteOnKubernetes.groovy b/vars/dockerExecuteOnKubernetes.groovy index a58c6278f6..9ee5233863 100644 --- a/vars/dockerExecuteOnKubernetes.groovy +++ b/vars/dockerExecuteOnKubernetes.groovy @@ -328,8 +328,7 @@ void executeOnPod(Map config, utils, Closure body, Script script) { lsDir('Directory content before body execution') } if (defaultStashCreated) { - echo "invalidate stash workspace-${config.uniqueId}" - stash name: "workspace-${config.uniqueId}", excludes: '**/*', allowEmpty: true + invalidateStash(config, 'workspace') } def result = body() if (config.verbose) { @@ -464,8 +463,7 @@ private void unstashWorkspace(config, prefix) { echo "Unstash workspace failed with throwable ${e.getMessage()}" throw e } finally { - echo "invalidate stash ${prefix}-${config.uniqueId}" - stash name: "${prefix}-${config.uniqueId}", excludes: '**/*', allowEmpty: true + invalidateStash(config, prefix) } } @@ -640,3 +638,9 @@ private List getContainerEnvs(config, imageName, defaultEnvVars, defaultConfig) return containerEnv } + +private void invalidateStash(def config, String prefix) { + String name = "${prefix}-${config.uniqueId}" + echo "invalidate stash ${name}" + stash name: name, excludes: '**/*', allowEmpty: true +} From 4d182e951645cf2ddc09200b66ea249ca1fbaa17 Mon Sep 17 00:00:00 2001 From: Jk1484 <35270240+Jk1484@users.noreply.github.com> Date: Mon, 26 Jun 2023 20:19:36 +0500 Subject: [PATCH 20/36] adding tag for running of unit tests (#4416) --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 7c708ade25..b0fcbb459c 100644 --- a/Dockerfile +++ b/Dockerfile @@ -3,7 +3,7 @@ COPY . /build WORKDIR /build # execute tests -RUN go test ./... -cover +RUN go test ./... -tags=unit -cover ## ONLY tests so far, building to be added later # execute build From 9c23cb3820427a75c47ab009b87fb9900062d55e Mon Sep 17 00:00:00 2001 From: Christopher Fenner <26137398+CCFenner@users.noreply.github.com> Date: Tue, 27 Jun 2023 17:29:59 +0200 Subject: [PATCH 21/36] Update verify-go.yml (#4425) --- .github/workflows/verify-go.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/verify-go.yml b/.github/workflows/verify-go.yml index 82798a4f2c..469a2489d7 100644 --- a/.github/workflows/verify-go.yml +++ b/.github/workflows/verify-go.yml @@ -26,7 +26,7 @@ jobs: - name: checkout uses: actions/checkout@v2 - name: unit-test - uses: paambaati/codeclimate-action@v2.6.0 + uses: paambaati/codeclimate-action@v4 env: CC_TEST_REPORTER_ID: ${{ secrets.CODE_CLIMATE_REPORTER_ID }} with: From bc2cdd18b841d76bdd8733f9ac6a85b0e0260f7c Mon Sep 17 00:00:00 2001 From: Jordi van Liempt <35920075+jliempt@users.noreply.github.com> Date: Wed, 28 Jun 2023 09:27:08 +0200 Subject: [PATCH 22/36] fix(url-logger): Enable access log generation for mavenBuild (#4421) Co-authored-by: I557621 --- cmd/mavenBuild.go | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/cmd/mavenBuild.go b/cmd/mavenBuild.go index 39c34b04ae..e242deff2d 100644 --- a/cmd/mavenBuild.go +++ b/cmd/mavenBuild.go @@ -5,6 +5,7 @@ import ( "os" "path" "path/filepath" + "reflect" "strings" "github.com/SAP/jenkins-library/pkg/buildsettings" @@ -25,6 +26,12 @@ const ( func mavenBuild(config mavenBuildOptions, telemetryData *telemetry.CustomData, commonPipelineEnvironment *mavenBuildCommonPipelineEnvironment) { utils := maven.NewUtilsBundle() + // enables url-log.json creation + cmd := reflect.ValueOf(utils).Elem().FieldByName("Command") + if cmd.IsValid() { + reflect.Indirect(cmd).FieldByName("StepName").SetString("mavenBuild") + } + err := runMavenBuild(&config, telemetryData, utils, commonPipelineEnvironment) if err != nil { log.Entry().WithError(err).Fatal("step execution failed") From 30d82e920d57101c3780bc6f7f9d38004789c1b3 Mon Sep 17 00:00:00 2001 From: Jk1484 <35270240+Jk1484@users.noreply.github.com> Date: Thu, 29 Jun 2023 13:11:34 +0500 Subject: [PATCH 23/36] fix(kaniko): Update documentation (#4405) * replacing mandatory if to mandatory. * Revert "replacing mandatory if to mandatory." This reverts commit f98ab5f0ff56ed24bc4be7e460f3c4cf4ef7761d. * Update documentation * go generate * Update documentation * go generate --------- Co-authored-by: Ashly Mathew Co-authored-by: Vyacheslav Starostin --- cmd/kanikoExecute_generated.go | 8 ++++++-- resources/metadata/kanikoExecute.yaml | 8 ++++++-- 2 files changed, 12 insertions(+), 4 deletions(-) diff --git a/cmd/kanikoExecute_generated.go b/cmd/kanikoExecute_generated.go index db10dbd8be..4e309da214 100644 --- a/cmd/kanikoExecute_generated.go +++ b/cmd/kanikoExecute_generated.go @@ -140,6 +140,10 @@ func KanikoExecuteCommand() *cobra.Command { Short: "Executes a [Kaniko](https://github.com/GoogleContainerTools/kaniko) build for creating a Docker container.", Long: `Executes a [Kaniko](https://github.com/GoogleContainerTools/kaniko) build for creating a Docker container. +### Building one container image + +For building one container image the step expects that one of the containerImage, containerImageName or --destination (via buildOptions) is set. + ### Building multiple container images The step allows you to build multiple container images with one run. @@ -286,8 +290,8 @@ func addKanikoExecuteFlags(cmd *cobra.Command, stepConfig *kanikoExecuteOptions) cmd.Flags().StringSliceVar(&stepConfig.BuildOptions, "buildOptions", []string{`--skip-tls-verify-pull`, `--ignore-path=/workspace`, `--ignore-path=/busybox`}, "Defines a list of build options for the [kaniko](https://github.com/GoogleContainerTools/kaniko) build.") cmd.Flags().StringVar(&stepConfig.BuildSettingsInfo, "buildSettingsInfo", os.Getenv("PIPER_buildSettingsInfo"), "Build settings info is typically filled by the step automatically to create information about the build settings that were used during the mta build. This information is typically used for compliance related processes.") cmd.Flags().StringVar(&stepConfig.ContainerBuildOptions, "containerBuildOptions", os.Getenv("PIPER_containerBuildOptions"), "Deprected, please use buildOptions. Defines the build options for the [kaniko](https://github.com/GoogleContainerTools/kaniko) build.") - cmd.Flags().StringVar(&stepConfig.ContainerImage, "containerImage", os.Getenv("PIPER_containerImage"), "Defines the full name of the Docker image to be created including registry, image name and tag like `my.docker.registry/path/myImageName:myTag`. If left empty, image will not be pushed.") - cmd.Flags().StringVar(&stepConfig.ContainerImageName, "containerImageName", os.Getenv("PIPER_containerImageName"), "Name of the container which will be built - will be used instead of parameter `containerImage`") + cmd.Flags().StringVar(&stepConfig.ContainerImage, "containerImage", os.Getenv("PIPER_containerImage"), "Defines the full name of the Docker image to be created including registry, image name and tag like `my.docker.registry/path/myImageName:myTag`. If `containerImage` is not provided, then `containerImageName` or `--destination` (via buildOptions) should be provided.") + cmd.Flags().StringVar(&stepConfig.ContainerImageName, "containerImageName", os.Getenv("PIPER_containerImageName"), "Name of the container which will be built - will be used instead of parameter `containerImage`. If `containerImageName` is not provided, then `containerImage` or `--destination` (via buildOptions) should be provided.") cmd.Flags().StringVar(&stepConfig.ContainerImageTag, "containerImageTag", os.Getenv("PIPER_containerImageTag"), "Tag of the container which will be built - will be used instead of parameter `containerImage`") cmd.Flags().BoolVar(&stepConfig.ContainerMultiImageBuild, "containerMultiImageBuild", false, "Defines if multiple containers should be build. Dockerfiles are used using the pattern **/Dockerfile*. Excludes can be defined via [`containerMultiImageBuildExcludes`](#containermultiimagebuildexscludes).") cmd.Flags().StringSliceVar(&stepConfig.ContainerMultiImageBuildExcludes, "containerMultiImageBuildExcludes", []string{}, "Defines a list of Dockerfile paths to exclude from the build when using [`containerMultiImageBuild`](#containermultiimagebuild).") diff --git a/resources/metadata/kanikoExecute.yaml b/resources/metadata/kanikoExecute.yaml index 9da817ee88..0894cbd1ca 100644 --- a/resources/metadata/kanikoExecute.yaml +++ b/resources/metadata/kanikoExecute.yaml @@ -4,6 +4,10 @@ metadata: longDescription: | Executes a [Kaniko](https://github.com/GoogleContainerTools/kaniko) build for creating a Docker container. + ### Building one container image + + For building one container image the step expects that one of the containerImage, containerImageName or --destination (via buildOptions) is set. + ### Building multiple container images The step allows you to build multiple container images with one run. @@ -109,7 +113,7 @@ spec: - name: containerImageNameAndTag deprecated: true type: string - description: Defines the full name of the Docker image to be created including registry, image name and tag like `my.docker.registry/path/myImageName:myTag`. If left empty, image will not be pushed. + description: Defines the full name of the Docker image to be created including registry, image name and tag like `my.docker.registry/path/myImageName:myTag`. If `containerImage` is not provided, then `containerImageName` or `--destination` (via buildOptions) should be provided. scope: - PARAMETERS - STAGES @@ -118,7 +122,7 @@ spec: aliases: - name: dockerImageName type: string - description: Name of the container which will be built - will be used instead of parameter `containerImage` + description: Name of the container which will be built - will be used instead of parameter `containerImage`. If `containerImageName` is not provided, then `containerImage` or `--destination` (via buildOptions) should be provided. mandatoryIf: - name: containerMultiImageBuild value: true From a614923e180a5e8b0575270ab3d863716818b338 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Fri, 30 Jun 2023 13:32:32 +0200 Subject: [PATCH 24/36] chore: Configure Renovate (#4351) * Add renovate.json * Update and rename renovate.json to .github/renovate.json * Update renovate.json * Update renovate.json --------- Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> Co-authored-by: Christopher Fenner <26137398+CCFenner@users.noreply.github.com> --- .github/renovate.json | 10 ++++++++++ 1 file changed, 10 insertions(+) create mode 100644 .github/renovate.json diff --git a/.github/renovate.json b/.github/renovate.json new file mode 100644 index 0000000000..85197a4fc2 --- /dev/null +++ b/.github/renovate.json @@ -0,0 +1,10 @@ +{ + "$schema": "https://docs.renovatebot.com/renovate-schema.json", + "extends": [ + "config:base" + ], + "ignorePaths": [ + "integration/testdata/**", + "test/resources/**" + ] +} From d8dacda12120f644f231dc46084678db371974e3 Mon Sep 17 00:00:00 2001 From: Pavel Busko Date: Fri, 30 Jun 2023 14:02:35 +0200 Subject: [PATCH 25/36] feat(cnbBuild): support setting registry username and password via parameters (#4426) * feat(cnbBuild): support setting registry username and password via parameters * fix gitops integration test assertion Co-authored-by: Pavel Busko * Update integration/integration_gitops_test.go --------- Co-authored-by: Ralf Pannemans --- cmd/cnbBuild.go | 18 ++++++++- cmd/cnbBuild_generated.go | 37 ++++++++++++++++--- integration/docker_test_executor.go | 4 +- integration/integration_cnb_test.go | 28 +++++++------- integration/integration_gitops_test.go | 3 +- .../testdata/TestCnbIntegration/config.json | 5 +++ resources/metadata/cnbBuild.yaml | 28 +++++++++++++- 7 files changed, 97 insertions(+), 26 deletions(-) create mode 100644 integration/testdata/TestCnbIntegration/config.json diff --git a/cmd/cnbBuild.go b/cmd/cnbBuild.go index 74bb30301b..4b49b51a76 100644 --- a/cmd/cnbBuild.go +++ b/cmd/cnbBuild.go @@ -385,10 +385,26 @@ func callCnbBuild(config *cnbBuildOptions, telemetryData *telemetry.CustomData, err = renameDockerConfig(config, utils) if err != nil { log.SetErrorCategory(log.ErrorConfiguration) - return errors.Wrapf(err, "failed to rename DockerConfigJSON file '%v'", config.DockerConfigJSON) + return errors.Wrapf(err, "failed to rename DockerConfigJSON file '%s'", config.DockerConfigJSON) } } + if config.ContainerRegistryUser != "" && config.ContainerRegistryPassword != "" { + log.Entry().Debug("enhancing docker config with the provided credentials") + if config.DockerConfigJSON == "" { + config.DockerConfigJSON = "/tmp/config.json" + } + log.Entry().Debugf("using docker config file %q", config.DockerConfigJSON) + + _, err = docker.CreateDockerConfigJSON(config.ContainerRegistryURL, config.ContainerRegistryUser, config.ContainerRegistryPassword, "", config.DockerConfigJSON, utils) + if err != nil { + log.SetErrorCategory(log.ErrorBuild) + return errors.Wrapf(err, "failed to update DockerConfigJSON file %q", config.DockerConfigJSON) + } + + log.Entry().Debugf("docker config %q has been updated", config.DockerConfigJSON) + } + mergedConfigs, err := processConfigs(*config, config.MultipleImages) if err != nil { return errors.Wrap(err, "failed to process config") diff --git a/cmd/cnbBuild_generated.go b/cmd/cnbBuild_generated.go index 45c8f7ec05..c714123ed9 100644 --- a/cmd/cnbBuild_generated.go +++ b/cmd/cnbBuild_generated.go @@ -26,6 +26,8 @@ type cnbBuildOptions struct { ContainerImageAlias string `json:"containerImageAlias,omitempty"` ContainerImageTag string `json:"containerImageTag,omitempty"` ContainerRegistryURL string `json:"containerRegistryUrl,omitempty"` + ContainerRegistryUser string `json:"containerRegistryUser,omitempty"` + ContainerRegistryPassword string `json:"containerRegistryPassword,omitempty"` Buildpacks []string `json:"buildpacks,omitempty"` BuildEnvVars map[string]interface{} `json:"buildEnvVars,omitempty"` Path string `json:"path,omitempty"` @@ -224,6 +226,8 @@ func addCnbBuildFlags(cmd *cobra.Command, stepConfig *cnbBuildOptions) { cmd.Flags().StringVar(&stepConfig.ContainerImageAlias, "containerImageAlias", os.Getenv("PIPER_containerImageAlias"), "Logical name used for this image.\n") cmd.Flags().StringVar(&stepConfig.ContainerImageTag, "containerImageTag", os.Getenv("PIPER_containerImageTag"), "Tag of the container which will be built") cmd.Flags().StringVar(&stepConfig.ContainerRegistryURL, "containerRegistryUrl", os.Getenv("PIPER_containerRegistryUrl"), "Container registry where the image should be pushed to.\n\n**Note**: `containerRegistryUrl` should include only the domain. If you want to publish an image under `docker.io/example/my-image`, you must set `containerRegistryUrl: \"docker.io\"` and `containerImageName: \"example/my-image\"`.\n") + cmd.Flags().StringVar(&stepConfig.ContainerRegistryUser, "containerRegistryUser", os.Getenv("PIPER_containerRegistryUser"), "Username of the container registry where the image should be pushed to - which will updated in a docker config json file. If a docker config json file is provided via parameter `dockerConfigJSON`, then the existing file will be enhanced") + cmd.Flags().StringVar(&stepConfig.ContainerRegistryPassword, "containerRegistryPassword", os.Getenv("PIPER_containerRegistryPassword"), "Password of the container registry where the image should be pushed to - which will updated in a docker config json file. If a docker config json file is provided via parameter `dockerConfigJSON`, then the existing file will be enhanced") cmd.Flags().StringSliceVar(&stepConfig.Buildpacks, "buildpacks", []string{}, "List of custom buildpacks to use in the form of `$HOSTNAME/$REPO[:$TAG]`.") cmd.Flags().StringVar(&stepConfig.Path, "path", os.Getenv("PIPER_path"), "Glob that should either point to a directory with your sources or one artifact in zip format.\nThis property determines the input to the buildpack.\n") @@ -308,6 +312,34 @@ func cnbBuildMetadata() config.StepData { Aliases: []config.Alias{{Name: "dockerRegistryUrl"}}, Default: os.Getenv("PIPER_containerRegistryUrl"), }, + { + Name: "containerRegistryUser", + ResourceRef: []config.ResourceReference{ + { + Name: "commonPipelineEnvironment", + Param: "container/repositoryUsername", + }, + }, + Scope: []string{"GENERAL", "PARAMETERS", "STAGES", "STEPS"}, + Type: "string", + Mandatory: false, + Aliases: []config.Alias{{Name: "dockerRegistryUser"}}, + Default: os.Getenv("PIPER_containerRegistryUser"), + }, + { + Name: "containerRegistryPassword", + ResourceRef: []config.ResourceReference{ + { + Name: "commonPipelineEnvironment", + Param: "container/repositoryPassword", + }, + }, + Scope: []string{"GENERAL", "PARAMETERS", "STAGES", "STEPS"}, + Type: "string", + Mandatory: false, + Aliases: []config.Alias{{Name: "dockerRegistryPassword"}}, + Default: os.Getenv("PIPER_containerRegistryPassword"), + }, { Name: "buildpacks", ResourceRef: []config.ResourceReference{ @@ -351,11 +383,6 @@ func cnbBuildMetadata() config.StepData { { Name: "dockerConfigJSON", ResourceRef: []config.ResourceReference{ - { - Name: "commonPipelineEnvironment", - Param: "custom/dockerConfigJSON", - }, - { Name: "dockerConfigJsonCredentialsId", Type: "secret", diff --git a/integration/docker_test_executor.go b/integration/docker_test_executor.go index 87f944434f..890610410c 100644 --- a/integration/docker_test_executor.go +++ b/integration/docker_test_executor.go @@ -293,9 +293,7 @@ func (d *IntegrationTestDockerExecRunner) assertFileContentEquals(t *testing.T, t.Fatalf("unable to get tar file content: %s", err) } - if !strings.Contains(str.String(), contentWant) { - assert.Equal(t, str.String(), contentWant, fmt.Sprintf("Unexpected content of file '%s'", fileWant)) - } + assert.Equal(t, str.String(), contentWant, fmt.Sprintf("Unexpected content of file '%s'", fileWant)) } func (d *IntegrationTestDockerExecRunner) terminate(t *testing.T) { diff --git a/integration/integration_cnb_test.go b/integration/integration_cnb_test.go index 39be5ee048..edfe98debf 100644 --- a/integration/integration_cnb_test.go +++ b/integration/integration_cnb_test.go @@ -50,7 +50,6 @@ func TestCNBIntegrationNPMProject(t *testing.T) { "PIPER_VAULTCREDENTIAL_DYNATRACE_API_KEY": "api-key-content", }, }) - defer container.terminate(t) container2 := givenThisContainer(t, IntegrationTestDockerExecRunnerBundle{ Image: baseBuilder, @@ -61,9 +60,8 @@ func TestCNBIntegrationNPMProject(t *testing.T) { "PIPER_VAULTCREDENTIAL_DYNATRACE_API_KEY": "api-key-content", }, }) - defer container2.terminate(t) - err := container.whenRunningPiperCommand("cnbBuild", "--noTelemetry", "--verbose", "--path", "TestCnbIntegration/project", "--customConfig", "TestCnbIntegration/config.yml", "--containerImageName", "node", "--containerImageTag", "0.0.1", "--containerRegistryUrl", registryURL, "--defaultProcess", "greeter") + err := container.whenRunningPiperCommand("cnbBuild", "--noTelemetry", "--verbose", "--path", "TestCnbIntegration/project", "--customConfig", "TestCnbIntegration/config.yml", "--containerImageName", "node", "--containerImageTag", "0.0.1", "--dockerConfigJSON", "TestCnbIntegration/config.json", "--containerRegistryUrl", registryURL, "--containerRegistryUser", "foo", "--containerRegistryPassword", "bar", "--defaultProcess", "greeter") assert.NoError(t, err) container.assertHasOutput(t, "running command: /cnb/lifecycle/creator") container.assertHasOutput(t, "Selected Node Engine version (using BP_NODE_VERSION): 16") @@ -72,9 +70,10 @@ func TestCNBIntegrationNPMProject(t *testing.T) { container.assertHasOutput(t, "Setting default process type 'greeter'") container.assertHasOutput(t, "*** Images (sha256:") container.assertHasOutput(t, "SUCCESS") + container.assertFileContentEquals(t, "/project/TestCnbIntegration/config.json", "{\"auths\":{\"localhost:5000\":{\"auth\":\"Zm9vOmJhcg==\"},\"test.registry.io\":{}}}") container.terminate(t) - err = container2.whenRunningPiperCommand("cnbBuild", "--noTelemetry", "--verbose", "--path", "TestCnbIntegration/project", "--customConfig", "TestCnbIntegration/config.yml", "--containerImageName", "node", "--containerImageTag", "0.0.1", "--containerRegistryUrl", registryURL, "--projectDescriptor", "project-with-id.toml") + err = container2.whenRunningPiperCommand("cnbBuild", "--noTelemetry", "--verbose", "--path", "TestCnbIntegration/project", "--customConfig", "TestCnbIntegration/config.yml", "--containerImageName", "node", "--containerImageTag", "0.0.1", "--containerRegistryUrl", registryURL, "--containerRegistryUser", "foo", "--containerRegistryPassword", "bar", "--projectDescriptor", "project-with-id.toml") assert.NoError(t, err) container2.assertHasOutput(t, "running command: /cnb/lifecycle/creator") container2.assertHasOutput(t, "Selected Node Engine version (using BP_NODE_VERSION): 16") @@ -82,6 +81,7 @@ func TestCNBIntegrationNPMProject(t *testing.T) { container2.assertHasOutput(t, fmt.Sprintf("Saving %s/node:0.0.1", registryURL)) container2.assertHasOutput(t, "*** Images (sha256:") container2.assertHasOutput(t, "SUCCESS") + container2.assertFileContentEquals(t, "/tmp/config.json", "{\"auths\":{\"localhost:5000\":{\"auth\":\"Zm9vOmJhcg==\"}}}") container2.terminate(t) } @@ -97,7 +97,6 @@ func TestCNBIntegrationProjectDescriptor(t *testing.T) { TestDir: []string{"testdata", "TestCnbIntegration", "project"}, Network: fmt.Sprintf("container:%s", registryContainer.GetContainerID()), }) - defer container.terminate(t) err := container.whenRunningPiperCommand("cnbBuild", "--noTelemetry", "--verbose", "--containerImageName", "not-found", "--containerImageTag", "0.0.1", "--containerRegistryUrl", registryURL) assert.NoError(t, err) @@ -114,6 +113,7 @@ func TestCNBIntegrationProjectDescriptor(t *testing.T) { "*** Images (sha256:", "SUCCESS", ) + container.terminate(t) } func TestCNBIntegrationZipPath(t *testing.T) { @@ -128,7 +128,6 @@ func TestCNBIntegrationZipPath(t *testing.T) { TestDir: []string{"testdata", "TestCnbIntegration", "zip"}, Network: fmt.Sprintf("container:%s", registryContainer.GetContainerID()), }) - defer container.terminate(t) err := container.whenRunningPiperCommand("cnbBuild", "--noTelemetry", "--verbose", "--containerImageName", "not-found", "--containerImageTag", "0.0.1", "--containerRegistryUrl", registryURL, "--path", "go.zip", "--createBOM") assert.NoError(t, err) @@ -143,6 +142,7 @@ func TestCNBIntegrationZipPath(t *testing.T) { "syft packages registry:localhost:5000/not-found:0.0.1 -o cyclonedx-xml --file bom-docker-0.xml -q", ) container.assertHasFiles(t, "/project/bom-docker-0.xml") + container.terminate(t) } func TestCNBIntegrationNonZipPath(t *testing.T) { @@ -157,12 +157,12 @@ func TestCNBIntegrationNonZipPath(t *testing.T) { TestDir: []string{"testdata", "TestMtaIntegration", "npm"}, Network: fmt.Sprintf("container:%s", registryContainer.GetContainerID()), }) - defer container.terminate(t) err := container.whenRunningPiperCommand("cnbBuild", "--noTelemetry", "--verbose", "--containerImageName", "not-found", "--containerImageTag", "0.0.1", "--containerRegistryUrl", registryURL, "--path", "mta.yaml") assert.Error(t, err) container.assertHasOutput(t, "Copying '/project/mta.yaml' into '/workspace' failed: application path must be a directory or zip") + container.terminate(t) } func TestCNBIntegrationNPMCustomBuildpacksFullProject(t *testing.T) { @@ -177,7 +177,6 @@ func TestCNBIntegrationNPMCustomBuildpacksFullProject(t *testing.T) { TestDir: []string{"testdata", "TestMtaIntegration", "npm"}, Network: fmt.Sprintf("container:%s", registryContainer.GetContainerID()), }) - defer container.terminate(t) err := container.whenRunningPiperCommand("cnbBuild", "--noTelemetry", "--verbose", "--buildpacks", "gcr.io/paketo-buildpacks/nodejs:0.19.0", "--containerImageName", "not-found", "--containerImageTag", "0.0.1", "--containerRegistryUrl", registryURL) assert.NoError(t, err) @@ -191,6 +190,7 @@ func TestCNBIntegrationNPMCustomBuildpacksFullProject(t *testing.T) { "*** Images (sha256:", "SUCCESS", ) + container.terminate(t) } func TestCNBIntegrationNPMCustomBuildpacksBuildpacklessProject(t *testing.T) { @@ -205,7 +205,6 @@ func TestCNBIntegrationNPMCustomBuildpacksBuildpacklessProject(t *testing.T) { TestDir: []string{"testdata", "TestMtaIntegration", "npm"}, Network: fmt.Sprintf("container:%s", registryContainer.GetContainerID()), }) - defer container.terminate(t) err := container.whenRunningPiperCommand("cnbBuild", "--noTelemetry", "--verbose", "--buildpacks", "gcr.io/paketo-buildpacks/nodejs:0.19.0", "--containerImageName", "not-found", "--containerImageTag", "0.0.1", "--containerRegistryUrl", registryURL) assert.NoError(t, err) @@ -218,6 +217,7 @@ func TestCNBIntegrationNPMCustomBuildpacksBuildpacklessProject(t *testing.T) { "*** Images (sha256:", "SUCCESS", ) + container.terminate(t) } func TestCNBIntegrationWrongBuilderProject(t *testing.T) { @@ -226,12 +226,12 @@ func TestCNBIntegrationWrongBuilderProject(t *testing.T) { Image: "nginx:latest", TestDir: []string{"testdata", "TestMtaIntegration", "npm"}, }) - defer container.terminate(t) err := container.whenRunningPiperCommand("cnbBuild", "--noTelemetry", "--verbose", "--containerImageName", "not-found", "--containerImageTag", "0.0.1", "--containerRegistryUrl", "test") assert.Error(t, err) container.assertHasOutput(t, "the provided dockerImage is not a valid builder") + container.terminate(t) } func TestCNBIntegrationBindings(t *testing.T) { @@ -249,7 +249,6 @@ func TestCNBIntegrationBindings(t *testing.T) { "PIPER_VAULTCREDENTIAL_DYNATRACE_API_KEY": "api-key-content", }, }) - defer container.terminate(t) err := container.whenRunningPiperCommand("cnbBuild", "--noTelemetry", "--verbose", "--customConfig", "TestCnbIntegration/config.yml", "--containerImageName", "not-found", "--containerImageTag", "0.0.1", "--containerRegistryUrl", registryURL, "--path", "TestMtaIntegration/maven") assert.Error(t, err) @@ -261,6 +260,7 @@ func TestCNBIntegrationBindings(t *testing.T) { ) container.assertFileContentEquals(t, "/tmp/platform/bindings/maven-settings/settings.xml", "invalid xml") container.assertFileContentEquals(t, "/tmp/platform/bindings/dynatrace/api-key", "api-key-content") + container.terminate(t) } func TestCNBIntegrationMultiImage(t *testing.T) { @@ -275,7 +275,6 @@ func TestCNBIntegrationMultiImage(t *testing.T) { TestDir: []string{"testdata", "TestCnbIntegration"}, Network: fmt.Sprintf("container:%s", registryContainer.GetContainerID()), }) - defer container.terminate(t) err := container.whenRunningPiperCommand("cnbBuild", "--noTelemetry", "--verbose", "--customConfig", "config_multi_image.yml", "--createBOM") assert.NoError(t, err) @@ -295,6 +294,7 @@ func TestCNBIntegrationMultiImage(t *testing.T) { container.assertHasFiles(t, "/project/bom-docker-0.xml") container.assertHasFiles(t, "/project/bom-docker-1.xml") container.assertHasFiles(t, "/project/bom-docker-2.xml") + container.terminate(t) } func TestCNBIntegrationPreserveFiles(t *testing.T) { @@ -309,12 +309,12 @@ func TestCNBIntegrationPreserveFiles(t *testing.T) { TestDir: []string{"testdata", "TestCnbIntegration"}, Network: fmt.Sprintf("container:%s", registryContainer.GetContainerID()), }) - defer container.terminate(t) err := container.whenRunningPiperCommand("cnbBuild", "--noTelemetry", "--verbose", "--customConfig", "config_preserve_files.yml") assert.NoError(t, err) container.assertHasFiles(t, "/project/project/node_modules/base/README.md", "/project/project/package-lock.json") + container.terminate(t) } func TestCNBIntegrationPreserveFilesIgnored(t *testing.T) { @@ -329,9 +329,9 @@ func TestCNBIntegrationPreserveFilesIgnored(t *testing.T) { TestDir: []string{"testdata", "TestCnbIntegration"}, Network: fmt.Sprintf("container:%s", registryContainer.GetContainerID()), }) - defer container.terminate(t) err := container.whenRunningPiperCommand("cnbBuild", "--noTelemetry", "--verbose", "--customConfig", "config_preserve_files.yml", "--path", "zip/go.zip", "--containerImageName", "go-zip") assert.NoError(t, err) container.assertHasOutput(t, "skipping preserving files because the source") + container.terminate(t) } diff --git a/integration/integration_gitops_test.go b/integration/integration_gitops_test.go index 09aedb9333..b29a32831a 100644 --- a/integration/integration_gitops_test.go +++ b/integration/integration_gitops_test.go @@ -34,5 +34,6 @@ func TestGitOpsIntegrationUpdateDeployment(t *testing.T) { newName: image newTag: "456" apiVersion: kustomize.config.k8s.io/v1beta1 -kind: Kustomization`) +kind: Kustomization +`) } diff --git a/integration/testdata/TestCnbIntegration/config.json b/integration/testdata/TestCnbIntegration/config.json new file mode 100644 index 0000000000..e1cc1b7a1f --- /dev/null +++ b/integration/testdata/TestCnbIntegration/config.json @@ -0,0 +1,5 @@ +{ + "auths": { + "test.registry.io": {} + } +} \ No newline at end of file diff --git a/resources/metadata/cnbBuild.yaml b/resources/metadata/cnbBuild.yaml index 21f86af7cd..809ca5dbdc 100644 --- a/resources/metadata/cnbBuild.yaml +++ b/resources/metadata/cnbBuild.yaml @@ -95,6 +95,32 @@ spec: resourceRef: - name: commonPipelineEnvironment param: container/registryUrl + - name: containerRegistryUser + aliases: + - name: dockerRegistryUser + type: string + description: Username of the container registry where the image should be pushed to - which will updated in a docker config json file. If a docker config json file is provided via parameter `dockerConfigJSON`, then the existing file will be enhanced + scope: + - GENERAL + - PARAMETERS + - STAGES + - STEPS + resourceRef: + - name: commonPipelineEnvironment + param: container/repositoryUsername + - name: containerRegistryPassword + aliases: + - name: dockerRegistryPassword + type: string + description: Password of the container registry where the image should be pushed to - which will updated in a docker config json file. If a docker config json file is provided via parameter `dockerConfigJSON`, then the existing file will be enhanced + scope: + - GENERAL + - PARAMETERS + - STAGES + - STEPS + resourceRef: + - name: commonPipelineEnvironment + param: container/repositoryPassword - name: buildpacks type: "[]string" description: List of custom buildpacks to use in the form of `$HOSTNAME/$REPO[:$TAG]`. @@ -150,8 +176,6 @@ spec: - PARAMETERS secret: true resourceRef: - - name: commonPipelineEnvironment - param: custom/dockerConfigJSON - name: dockerConfigJsonCredentialsId type: secret - type: vaultSecretFile From 1befaa80a2b2cf38271996661659940d3eb18af0 Mon Sep 17 00:00:00 2001 From: Pavel Busko Date: Mon, 3 Jul 2023 10:55:06 +0200 Subject: [PATCH 26/36] fix(cnbBuild): correctly construct docker config using credentials (#4441) Co-authored-by: Ralf Pannemans --- cmd/cnbBuild.go | 13 ++++++++++++- integration/integration_cnb_test.go | 2 +- pkg/cnbutils/auth.go | 6 +++++- 3 files changed, 18 insertions(+), 3 deletions(-) diff --git a/cmd/cnbBuild.go b/cmd/cnbBuild.go index 4b49b51a76..6b3dd3fa2c 100644 --- a/cmd/cnbBuild.go +++ b/cmd/cnbBuild.go @@ -7,6 +7,7 @@ import ( "os" "path" "path/filepath" + "regexp" "github.com/SAP/jenkins-library/pkg/buildsettings" "github.com/SAP/jenkins-library/pkg/certutils" @@ -396,7 +397,17 @@ func callCnbBuild(config *cnbBuildOptions, telemetryData *telemetry.CustomData, } log.Entry().Debugf("using docker config file %q", config.DockerConfigJSON) - _, err = docker.CreateDockerConfigJSON(config.ContainerRegistryURL, config.ContainerRegistryUser, config.ContainerRegistryPassword, "", config.DockerConfigJSON, utils) + if matched, _ := regexp.MatchString("^(http|https)://.*", config.ContainerRegistryURL); !matched { + config.ContainerRegistryURL = fmt.Sprintf("https://%s", config.ContainerRegistryURL) + } + + containerRegistry, err := docker.ContainerRegistryFromURL(config.ContainerRegistryURL) + if err != nil { + log.SetErrorCategory(log.ErrorConfiguration) + return errors.Wrapf(err, "failed to read registry url %q", config.ContainerRegistryURL) + } + + _, err = docker.CreateDockerConfigJSON(containerRegistry, config.ContainerRegistryUser, config.ContainerRegistryPassword, "", config.DockerConfigJSON, utils) if err != nil { log.SetErrorCategory(log.ErrorBuild) return errors.Wrapf(err, "failed to update DockerConfigJSON file %q", config.DockerConfigJSON) diff --git a/integration/integration_cnb_test.go b/integration/integration_cnb_test.go index edfe98debf..159cb0bfee 100644 --- a/integration/integration_cnb_test.go +++ b/integration/integration_cnb_test.go @@ -61,7 +61,7 @@ func TestCNBIntegrationNPMProject(t *testing.T) { }, }) - err := container.whenRunningPiperCommand("cnbBuild", "--noTelemetry", "--verbose", "--path", "TestCnbIntegration/project", "--customConfig", "TestCnbIntegration/config.yml", "--containerImageName", "node", "--containerImageTag", "0.0.1", "--dockerConfigJSON", "TestCnbIntegration/config.json", "--containerRegistryUrl", registryURL, "--containerRegistryUser", "foo", "--containerRegistryPassword", "bar", "--defaultProcess", "greeter") + err := container.whenRunningPiperCommand("cnbBuild", "--noTelemetry", "--verbose", "--path", "TestCnbIntegration/project", "--customConfig", "TestCnbIntegration/config.yml", "--containerImageName", "node", "--containerImageTag", "0.0.1", "--dockerConfigJSON", "TestCnbIntegration/config.json", "--containerRegistryUrl", fmt.Sprintf("http://%s", registryURL), "--containerRegistryUser", "foo", "--containerRegistryPassword", "bar", "--defaultProcess", "greeter") assert.NoError(t, err) container.assertHasOutput(t, "running command: /cnb/lifecycle/creator") container.assertHasOutput(t, "Selected Node Engine version (using BP_NODE_VERSION): 16") diff --git a/pkg/cnbutils/auth.go b/pkg/cnbutils/auth.go index 140d9cbb81..98a5b58b3b 100644 --- a/pkg/cnbutils/auth.go +++ b/pkg/cnbutils/auth.go @@ -14,6 +14,7 @@ func GenerateCnbAuth(config string, utils BuildUtils) (string, error) { dockerConfig := &configfile.ConfigFile{} if config != "" { + log.Entry().Debugf("using docker config file %q", config) dockerConfigJSON, err := utils.FileRead(config) if err != nil { return "", err @@ -37,10 +38,13 @@ func GenerateCnbAuth(config string, utils BuildUtils) (string, error) { } log.Entry().Debugf("Adding credentials for: registry %q", registry) - auth[registry] = fmt.Sprintf("Basic %s", value.Auth) } + if len(auth) == 0 { + log.Entry().Warn("docker config file is empty!") + } + cnbRegistryAuth, err := json.Marshal(auth) if err != nil { return "", err From 13f1e94adefacd20c7a6f95135760a22b9701dab Mon Sep 17 00:00:00 2001 From: Pavel Busko Date: Tue, 4 Jul 2023 14:19:02 +0200 Subject: [PATCH 27/36] fix(cnbBuild): read dockerConfigJSON from CPE and merge it with user-provided (#4444) Co-authored-by: Ralf Pannemans --- cmd/cnbBuild.go | 68 +++++++------------ cmd/cnbBuild_generated.go | 49 +++++-------- cmd/cnbBuild_test.go | 11 ++- integration/integration_cnb_test.go | 19 +++--- .../custom/dockerConfigJSON | 1 + .../TestCnbIntegration/.pipeline/config.json | 5 ++ .../testdata/TestCnbIntegration/config.yml | 2 +- pkg/docker/docker.go | 59 ++++++++++++++++ pkg/docker/docker_test.go | 44 ++++++++++++ resources/metadata/cnbBuild.yaml | 33 ++------- 10 files changed, 170 insertions(+), 121 deletions(-) create mode 100644 integration/testdata/TestCnbIntegration/.pipeline/commonPipelineEnvironment/custom/dockerConfigJSON create mode 100644 integration/testdata/TestCnbIntegration/.pipeline/config.json diff --git a/cmd/cnbBuild.go b/cmd/cnbBuild.go index 6b3dd3fa2c..4ef75a4c07 100644 --- a/cmd/cnbBuild.go +++ b/cmd/cnbBuild.go @@ -7,7 +7,6 @@ import ( "os" "path" "path/filepath" - "regexp" "github.com/SAP/jenkins-library/pkg/buildsettings" "github.com/SAP/jenkins-library/pkg/certutils" @@ -212,28 +211,26 @@ func extractZip(source, target string) error { return nil } -func renameDockerConfig(config *cnbBuildOptions, utils cnbutils.BuildUtils) error { - if filepath.Base(config.DockerConfigJSON) != "config.json" { - log.Entry().Debugf("Renaming docker config file from '%s' to 'config.json'", filepath.Base(config.DockerConfigJSON)) +func ensureDockerConfig(config *cnbBuildOptions, utils cnbutils.BuildUtils) error { + newFile := "/tmp/config.json" + if config.DockerConfigJSON == "" { + config.DockerConfigJSON = newFile - newPath := filepath.Join(filepath.Dir(config.DockerConfigJSON), "config.json") - alreadyExists, err := utils.FileExists(newPath) - if err != nil { - return err - } - - if alreadyExists { - return nil - } + return utils.FileWrite(config.DockerConfigJSON, []byte("{}"), os.ModePerm) + } - err = utils.FileRename(config.DockerConfigJSON, newPath) - if err != nil { - return err - } + log.Entry().Debugf("Copying docker config file from '%s' to '%s'", config.DockerConfigJSON, newFile) + _, err := utils.Copy(config.DockerConfigJSON, newFile) + if err != nil { + return err + } - config.DockerConfigJSON = newPath + err = utils.Chmod(newFile, 0644) + if err != nil { + return err } + config.DockerConfigJSON = newFile return nil } @@ -382,38 +379,19 @@ func callCnbBuild(config *cnbBuildOptions, telemetryData *telemetry.CustomData, } commonPipelineEnvironment.custom.buildSettingsInfo = buildSettingsInfo - if len(config.DockerConfigJSON) > 0 { - err = renameDockerConfig(config, utils) - if err != nil { - log.SetErrorCategory(log.ErrorConfiguration) - return errors.Wrapf(err, "failed to rename DockerConfigJSON file '%s'", config.DockerConfigJSON) - } + err = ensureDockerConfig(config, utils) + if err != nil { + log.SetErrorCategory(log.ErrorConfiguration) + return errors.Wrapf(err, "failed to create/rename DockerConfigJSON file") } - if config.ContainerRegistryUser != "" && config.ContainerRegistryPassword != "" { - log.Entry().Debug("enhancing docker config with the provided credentials") - if config.DockerConfigJSON == "" { - config.DockerConfigJSON = "/tmp/config.json" - } - log.Entry().Debugf("using docker config file %q", config.DockerConfigJSON) - - if matched, _ := regexp.MatchString("^(http|https)://.*", config.ContainerRegistryURL); !matched { - config.ContainerRegistryURL = fmt.Sprintf("https://%s", config.ContainerRegistryURL) - } - - containerRegistry, err := docker.ContainerRegistryFromURL(config.ContainerRegistryURL) + if config.DockerConfigJSONCPE != "" { + log.Entry().Debugf("merging docker config file '%s' into '%s'", config.DockerConfigJSONCPE, config.DockerConfigJSON) + err = docker.MergeDockerConfigJSON(config.DockerConfigJSONCPE, config.DockerConfigJSON, utils) if err != nil { log.SetErrorCategory(log.ErrorConfiguration) - return errors.Wrapf(err, "failed to read registry url %q", config.ContainerRegistryURL) + return errors.Wrapf(err, "failed to merge DockerConfigJSON files") } - - _, err = docker.CreateDockerConfigJSON(containerRegistry, config.ContainerRegistryUser, config.ContainerRegistryPassword, "", config.DockerConfigJSON, utils) - if err != nil { - log.SetErrorCategory(log.ErrorBuild) - return errors.Wrapf(err, "failed to update DockerConfigJSON file %q", config.DockerConfigJSON) - } - - log.Entry().Debugf("docker config %q has been updated", config.DockerConfigJSON) } mergedConfigs, err := processConfigs(*config, config.MultipleImages) diff --git a/cmd/cnbBuild_generated.go b/cmd/cnbBuild_generated.go index c714123ed9..945daa6600 100644 --- a/cmd/cnbBuild_generated.go +++ b/cmd/cnbBuild_generated.go @@ -26,13 +26,12 @@ type cnbBuildOptions struct { ContainerImageAlias string `json:"containerImageAlias,omitempty"` ContainerImageTag string `json:"containerImageTag,omitempty"` ContainerRegistryURL string `json:"containerRegistryUrl,omitempty"` - ContainerRegistryUser string `json:"containerRegistryUser,omitempty"` - ContainerRegistryPassword string `json:"containerRegistryPassword,omitempty"` Buildpacks []string `json:"buildpacks,omitempty"` BuildEnvVars map[string]interface{} `json:"buildEnvVars,omitempty"` Path string `json:"path,omitempty"` ProjectDescriptor string `json:"projectDescriptor,omitempty"` DockerConfigJSON string `json:"dockerConfigJSON,omitempty"` + DockerConfigJSONCPE string `json:"dockerConfigJSONCPE,omitempty"` CustomTLSCertificateLinks []string `json:"customTlsCertificateLinks,omitempty"` AdditionalTags []string `json:"additionalTags,omitempty"` Bindings map[string]interface{} `json:"bindings,omitempty"` @@ -158,6 +157,7 @@ func CnbBuildCommand() *cobra.Command { return err } log.RegisterSecret(stepConfig.DockerConfigJSON) + log.RegisterSecret(stepConfig.DockerConfigJSONCPE) if len(GeneralConfig.HookConfig.SentryConfig.Dsn) > 0 { sentryHook := log.NewSentryHook(GeneralConfig.HookConfig.SentryConfig.Dsn, GeneralConfig.CorrelationID) @@ -226,13 +226,12 @@ func addCnbBuildFlags(cmd *cobra.Command, stepConfig *cnbBuildOptions) { cmd.Flags().StringVar(&stepConfig.ContainerImageAlias, "containerImageAlias", os.Getenv("PIPER_containerImageAlias"), "Logical name used for this image.\n") cmd.Flags().StringVar(&stepConfig.ContainerImageTag, "containerImageTag", os.Getenv("PIPER_containerImageTag"), "Tag of the container which will be built") cmd.Flags().StringVar(&stepConfig.ContainerRegistryURL, "containerRegistryUrl", os.Getenv("PIPER_containerRegistryUrl"), "Container registry where the image should be pushed to.\n\n**Note**: `containerRegistryUrl` should include only the domain. If you want to publish an image under `docker.io/example/my-image`, you must set `containerRegistryUrl: \"docker.io\"` and `containerImageName: \"example/my-image\"`.\n") - cmd.Flags().StringVar(&stepConfig.ContainerRegistryUser, "containerRegistryUser", os.Getenv("PIPER_containerRegistryUser"), "Username of the container registry where the image should be pushed to - which will updated in a docker config json file. If a docker config json file is provided via parameter `dockerConfigJSON`, then the existing file will be enhanced") - cmd.Flags().StringVar(&stepConfig.ContainerRegistryPassword, "containerRegistryPassword", os.Getenv("PIPER_containerRegistryPassword"), "Password of the container registry where the image should be pushed to - which will updated in a docker config json file. If a docker config json file is provided via parameter `dockerConfigJSON`, then the existing file will be enhanced") cmd.Flags().StringSliceVar(&stepConfig.Buildpacks, "buildpacks", []string{}, "List of custom buildpacks to use in the form of `$HOSTNAME/$REPO[:$TAG]`.") cmd.Flags().StringVar(&stepConfig.Path, "path", os.Getenv("PIPER_path"), "Glob that should either point to a directory with your sources or one artifact in zip format.\nThis property determines the input to the buildpack.\n") cmd.Flags().StringVar(&stepConfig.ProjectDescriptor, "projectDescriptor", `project.toml`, "Relative path to the project.toml file.\nSee [buildpacks.io](https://buildpacks.io/docs/reference/config/project-descriptor/) for the reference.\nParameters passed to the cnbBuild step will take precedence over the parameters set in the project.toml file, except the `env` block.\nEnvironment variables declared in a project descriptor file, will be merged with the `buildEnvVars` property, with the `buildEnvVars` having a precedence.\n\n*Note*: The project descriptor path should be relative to what is set in the [path](#path) property. If the `path` property is pointing to a zip archive (e.g. jar file), project descriptor path will be relative to the root of the workspace.\n\n*Note*: Inline buildpacks (see [specification](https://buildpacks.io/docs/reference/config/project-descriptor/#build-_table-optional_)) are not supported yet.\n") cmd.Flags().StringVar(&stepConfig.DockerConfigJSON, "dockerConfigJSON", os.Getenv("PIPER_dockerConfigJSON"), "Path to the file `.docker/config.json` - this is typically provided by your CI/CD system. You can find more details about the Docker credentials in the [Docker documentation](https://docs.docker.com/engine/reference/commandline/login/).") + cmd.Flags().StringVar(&stepConfig.DockerConfigJSONCPE, "dockerConfigJSONCPE", os.Getenv("PIPER_dockerConfigJSONCPE"), "This property is intended only for reading the `dockerConfigJSON` from the Common Pipeline Environment. If you want to provide your own credentials, please refer to the [dockerConfigJSON](#dockerConfigJSON) property. If both properties are set, the config files will be merged, with the [dockerConfigJSON](#dockerConfigJSON) having higher priority.") cmd.Flags().StringSliceVar(&stepConfig.CustomTLSCertificateLinks, "customTlsCertificateLinks", []string{}, "List containing download links of custom TLS certificates. This is required to ensure trusted connections to registries with custom certificates.") cmd.Flags().StringSliceVar(&stepConfig.AdditionalTags, "additionalTags", []string{}, "List of tags which will be pushed to the registry (additionally to the provided `containerImageTag`), e.g. \"latest\".") @@ -312,34 +311,6 @@ func cnbBuildMetadata() config.StepData { Aliases: []config.Alias{{Name: "dockerRegistryUrl"}}, Default: os.Getenv("PIPER_containerRegistryUrl"), }, - { - Name: "containerRegistryUser", - ResourceRef: []config.ResourceReference{ - { - Name: "commonPipelineEnvironment", - Param: "container/repositoryUsername", - }, - }, - Scope: []string{"GENERAL", "PARAMETERS", "STAGES", "STEPS"}, - Type: "string", - Mandatory: false, - Aliases: []config.Alias{{Name: "dockerRegistryUser"}}, - Default: os.Getenv("PIPER_containerRegistryUser"), - }, - { - Name: "containerRegistryPassword", - ResourceRef: []config.ResourceReference{ - { - Name: "commonPipelineEnvironment", - Param: "container/repositoryPassword", - }, - }, - Scope: []string{"GENERAL", "PARAMETERS", "STAGES", "STEPS"}, - Type: "string", - Mandatory: false, - Aliases: []config.Alias{{Name: "dockerRegistryPassword"}}, - Default: os.Getenv("PIPER_containerRegistryPassword"), - }, { Name: "buildpacks", ResourceRef: []config.ResourceReference{ @@ -400,6 +371,20 @@ func cnbBuildMetadata() config.StepData { Aliases: []config.Alias{}, Default: os.Getenv("PIPER_dockerConfigJSON"), }, + { + Name: "dockerConfigJSONCPE", + ResourceRef: []config.ResourceReference{ + { + Name: "commonPipelineEnvironment", + Param: "custom/dockerConfigJSON", + }, + }, + Scope: []string{}, + Type: "string", + Mandatory: false, + Aliases: []config.Alias{}, + Default: os.Getenv("PIPER_dockerConfigJSONCPE"), + }, { Name: "customTlsCertificateLinks", ResourceRef: []config.ResourceReference{}, diff --git a/cmd/cnbBuild_test.go b/cmd/cnbBuild_test.go index 98f8b0f691..2f1fa8226c 100644 --- a/cmd/cnbBuild_test.go +++ b/cmd/cnbBuild_test.go @@ -230,11 +230,8 @@ func TestRunCnbBuild(t *testing.T) { assert.Contains(t, runner.Calls[0].Params, fmt.Sprintf("%s/%s:%s", config.ContainerRegistryURL, config.ContainerImageName, config.ContainerImageTag)) assert.Contains(t, runner.Calls[0].Params, fmt.Sprintf("%s/%s:latest", config.ContainerRegistryURL, config.ContainerImageName)) - initialFileExists, _ := utils.FileExists("/path/to/test.json") - renamedFileExists, _ := utils.FileExists("/path/to/config.json") - - assert.False(t, initialFileExists) - assert.True(t, renamedFileExists) + copiedFileExists, _ := utils.FileExists("/tmp/config.json") + assert.True(t, copiedFileExists) }) t.Run("success case (customTlsCertificates)", func(t *testing.T) { @@ -420,7 +417,7 @@ func TestRunCnbBuild(t *testing.T) { addBuilderFiles(&utils) err := callCnbBuild(&config, &telemetry.CustomData{}, &utils, &cnbBuildCommonPipelineEnvironment{}, &piperhttp.Client{}) - assert.EqualError(t, err, "failed to generate CNB_REGISTRY_AUTH: could not read 'not-there/config.json'") + assert.EqualError(t, err, "failed to create/rename DockerConfigJSON file: cannot copy 'not-there/config.json': file does not exist") }) t.Run("error case: DockerConfigJSON file not there (not config.json)", func(t *testing.T) { @@ -436,7 +433,7 @@ func TestRunCnbBuild(t *testing.T) { addBuilderFiles(&utils) err := callCnbBuild(&config, &telemetry.CustomData{}, &utils, &cnbBuildCommonPipelineEnvironment{}, &piperhttp.Client{}) - assert.EqualError(t, err, "failed to rename DockerConfigJSON file 'not-there': renaming file 'not-there' is not supported, since it does not exist, or is not a leaf-entry") + assert.EqualError(t, err, "failed to create/rename DockerConfigJSON file: cannot copy 'not-there': file does not exist") }) t.Run("error case: dockerImage is not a valid builder", func(t *testing.T) { diff --git a/integration/integration_cnb_test.go b/integration/integration_cnb_test.go index 159cb0bfee..944651cf8e 100644 --- a/integration/integration_cnb_test.go +++ b/integration/integration_cnb_test.go @@ -44,7 +44,7 @@ func TestCNBIntegrationNPMProject(t *testing.T) { container := givenThisContainer(t, IntegrationTestDockerExecRunnerBundle{ Image: baseBuilder, User: "cnb", - TestDir: []string{"testdata"}, + TestDir: []string{"testdata", "TestCnbIntegration"}, Network: fmt.Sprintf("container:%s", registryContainer.GetContainerID()), Environment: map[string]string{ "PIPER_VAULTCREDENTIAL_DYNATRACE_API_KEY": "api-key-content", @@ -54,14 +54,14 @@ func TestCNBIntegrationNPMProject(t *testing.T) { container2 := givenThisContainer(t, IntegrationTestDockerExecRunnerBundle{ Image: baseBuilder, User: "cnb", - TestDir: []string{"testdata"}, + TestDir: []string{"testdata", "TestCnbIntegration"}, Network: fmt.Sprintf("container:%s", registryContainer.GetContainerID()), Environment: map[string]string{ "PIPER_VAULTCREDENTIAL_DYNATRACE_API_KEY": "api-key-content", }, }) - err := container.whenRunningPiperCommand("cnbBuild", "--noTelemetry", "--verbose", "--path", "TestCnbIntegration/project", "--customConfig", "TestCnbIntegration/config.yml", "--containerImageName", "node", "--containerImageTag", "0.0.1", "--dockerConfigJSON", "TestCnbIntegration/config.json", "--containerRegistryUrl", fmt.Sprintf("http://%s", registryURL), "--containerRegistryUser", "foo", "--containerRegistryPassword", "bar", "--defaultProcess", "greeter") + err := container.whenRunningPiperCommand("cnbBuild", "--noTelemetry", "--verbose", "--path", "project", "--customConfig", "config.yml", "--containerImageName", "node", "--containerImageTag", "0.0.1", "--dockerConfigJSON", "config.json", "--containerRegistryUrl", fmt.Sprintf("http://%s", registryURL), "--defaultProcess", "greeter") assert.NoError(t, err) container.assertHasOutput(t, "running command: /cnb/lifecycle/creator") container.assertHasOutput(t, "Selected Node Engine version (using BP_NODE_VERSION): 16") @@ -70,10 +70,10 @@ func TestCNBIntegrationNPMProject(t *testing.T) { container.assertHasOutput(t, "Setting default process type 'greeter'") container.assertHasOutput(t, "*** Images (sha256:") container.assertHasOutput(t, "SUCCESS") - container.assertFileContentEquals(t, "/project/TestCnbIntegration/config.json", "{\"auths\":{\"localhost:5000\":{\"auth\":\"Zm9vOmJhcg==\"},\"test.registry.io\":{}}}") + container.assertFileContentEquals(t, "/tmp/config.json", "{\n\t\"auths\": {\n\t\t\"test.registry.io\": {},\n\t\t\"test2.registry.io\": {}\n\t}\n}") container.terminate(t) - err = container2.whenRunningPiperCommand("cnbBuild", "--noTelemetry", "--verbose", "--path", "TestCnbIntegration/project", "--customConfig", "TestCnbIntegration/config.yml", "--containerImageName", "node", "--containerImageTag", "0.0.1", "--containerRegistryUrl", registryURL, "--containerRegistryUser", "foo", "--containerRegistryPassword", "bar", "--projectDescriptor", "project-with-id.toml") + err = container2.whenRunningPiperCommand("cnbBuild", "--noTelemetry", "--verbose", "--path", "project", "--customConfig", "config.yml", "--containerImageName", "node", "--containerImageTag", "0.0.1", "--containerRegistryUrl", registryURL, "--projectDescriptor", "project-with-id.toml") assert.NoError(t, err) container2.assertHasOutput(t, "running command: /cnb/lifecycle/creator") container2.assertHasOutput(t, "Selected Node Engine version (using BP_NODE_VERSION): 16") @@ -81,7 +81,7 @@ func TestCNBIntegrationNPMProject(t *testing.T) { container2.assertHasOutput(t, fmt.Sprintf("Saving %s/node:0.0.1", registryURL)) container2.assertHasOutput(t, "*** Images (sha256:") container2.assertHasOutput(t, "SUCCESS") - container2.assertFileContentEquals(t, "/tmp/config.json", "{\"auths\":{\"localhost:5000\":{\"auth\":\"Zm9vOmJhcg==\"}}}") + container2.assertFileContentEquals(t, "/tmp/config.json", "{\n\t\"auths\": {\n\t\t\"test2.registry.io\": {}\n\t}\n}") container2.terminate(t) } @@ -243,17 +243,16 @@ func TestCNBIntegrationBindings(t *testing.T) { container := givenThisContainer(t, IntegrationTestDockerExecRunnerBundle{ Image: baseBuilder, User: "cnb", - TestDir: []string{"testdata"}, + TestDir: []string{"testdata", "TestCnbIntegration"}, Network: fmt.Sprintf("container:%s", registryContainer.GetContainerID()), Environment: map[string]string{ "PIPER_VAULTCREDENTIAL_DYNATRACE_API_KEY": "api-key-content", }, }) - err := container.whenRunningPiperCommand("cnbBuild", "--noTelemetry", "--verbose", "--customConfig", "TestCnbIntegration/config.yml", "--containerImageName", "not-found", "--containerImageTag", "0.0.1", "--containerRegistryUrl", registryURL, "--path", "TestMtaIntegration/maven") - assert.Error(t, err) + err := container.whenRunningPiperCommand("cnbBuild", "--noTelemetry", "--verbose", "--customConfig", "config.yml", "--containerImageName", "not-found", "--containerImageTag", "0.0.1", "--containerRegistryUrl", registryURL, "--path", "project") + assert.NoError(t, err) - container.assertHasOutput(t, "bindings/maven-settings/settings.xml: only whitespace content allowed before start tag") container.assertHasFiles(t, "/tmp/platform/bindings/dummy-binding/type", "/tmp/platform/bindings/dummy-binding/dummy.yml", diff --git a/integration/testdata/TestCnbIntegration/.pipeline/commonPipelineEnvironment/custom/dockerConfigJSON b/integration/testdata/TestCnbIntegration/.pipeline/commonPipelineEnvironment/custom/dockerConfigJSON new file mode 100644 index 0000000000..7370907038 --- /dev/null +++ b/integration/testdata/TestCnbIntegration/.pipeline/commonPipelineEnvironment/custom/dockerConfigJSON @@ -0,0 +1 @@ +.pipeline/config.json \ No newline at end of file diff --git a/integration/testdata/TestCnbIntegration/.pipeline/config.json b/integration/testdata/TestCnbIntegration/.pipeline/config.json new file mode 100644 index 0000000000..3316381b5e --- /dev/null +++ b/integration/testdata/TestCnbIntegration/.pipeline/config.json @@ -0,0 +1,5 @@ +{ + "auths": { + "test2.registry.io": {} + } +} \ No newline at end of file diff --git a/integration/testdata/TestCnbIntegration/config.yml b/integration/testdata/TestCnbIntegration/config.yml index e2428c1c83..da73f0510e 100644 --- a/integration/testdata/TestCnbIntegration/config.yml +++ b/integration/testdata/TestCnbIntegration/config.yml @@ -12,7 +12,7 @@ steps: type: dummy data: - key: dummy.yml - file: TestCnbIntegration/config.yml + file: config.yml dynatrace: type: Dynatrace data: diff --git a/pkg/docker/docker.go b/pkg/docker/docker.go index d2bb9d7241..dc614119fe 100644 --- a/pkg/docker/docker.go +++ b/pkg/docker/docker.go @@ -1,6 +1,7 @@ package docker import ( + "bytes" "encoding/base64" "encoding/json" "fmt" @@ -14,6 +15,9 @@ import ( "github.com/SAP/jenkins-library/pkg/piperutils" "github.com/pkg/errors" + "github.com/docker/cli/cli/config" + "github.com/docker/cli/cli/config/configfile" + cranecmd "github.com/google/go-containerregistry/cmd/crane/cmd" "github.com/google/go-containerregistry/pkg/authn" "github.com/google/go-containerregistry/pkg/crane" @@ -27,6 +31,61 @@ type AuthEntry struct { Auth string `json:"auth,omitempty"` } +// MergeDockerConfigJSON merges two docker config.json files. +func MergeDockerConfigJSON(sourcePath, targetPath string, utils piperutils.FileUtils) error { + if exists, _ := utils.FileExists(sourcePath); !exists { + return fmt.Errorf("source dockerConfigJSON file %q does not exist", sourcePath) + } + + sourceReader, err := utils.Open(sourcePath) + if err != nil { + return errors.Wrapf(err, "failed to open file %q", sourcePath) + } + defer sourceReader.Close() + + sourceConfig, err := config.LoadFromReader(sourceReader) + if err != nil { + return errors.Wrapf(err, "failed to read file %q", sourcePath) + } + + var targetConfig *configfile.ConfigFile + if exists, _ := utils.FileExists(targetPath); !exists { + log.Entry().Warnf("target dockerConfigJSON file %q does not exist, creating a new one", sourcePath) + targetConfig = configfile.New(targetPath) + } else { + targetReader, err := utils.Open(targetPath) + if err != nil { + return errors.Wrapf(err, "failed to open file %q", targetReader) + } + defer targetReader.Close() + targetConfig, err = config.LoadFromReader(targetReader) + if err != nil { + return errors.Wrapf(err, "failed to read file %q", targetPath) + } + } + + for registry, auth := range sourceConfig.GetAuthConfigs() { + targetConfig.AuthConfigs[registry] = auth + } + + buf := bytes.NewBuffer(nil) + err = targetConfig.SaveToWriter(buf) + if err != nil { + return errors.Wrapf(err, "failed to save file %q", targetPath) + } + + err = utils.MkdirAll(filepath.Dir(targetPath), 0777) + if err != nil { + return fmt.Errorf("failed to create directory path for the file %q: %w", targetPath, err) + } + err = utils.FileWrite(targetPath, buf.Bytes(), 0666) + if err != nil { + return fmt.Errorf("failed to write %q: %w", targetPath, err) + } + + return nil +} + // CreateDockerConfigJSON creates / updates a Docker config.json with registry credentials func CreateDockerConfigJSON(registryURL, username, password, targetPath, configPath string, utils piperutils.FileUtils) (string, error) { diff --git a/pkg/docker/docker_test.go b/pkg/docker/docker_test.go index 399651fd35..d3d8d89922 100644 --- a/pkg/docker/docker_test.go +++ b/pkg/docker/docker_test.go @@ -148,3 +148,47 @@ func TestImageListWithFilePath(t *testing.T) { }) } } + +func TestMergeDockerConfigJSON(t *testing.T) { + t.Parallel() + + t.Run("success - both files present", func(t *testing.T) { + sourceFile := "/tmp/source.json" + targetFile := "/tmp/target.json" + expectedContent := "{\n\t\"auths\": {\n\t\t\"bar\": {},\n\t\t\"foo\": {\n\t\t\t\"auth\": \"Zm9vOmJhcg==\"\n\t\t}\n\t}\n}" + + utilsMock := mock.FilesMock{} + utilsMock.AddFile(targetFile, []byte("{\"auths\": {\"foo\": {\"auth\": \"dGVzdDp0ZXN0\"}}}")) + utilsMock.AddFile(sourceFile, []byte("{\"auths\": {\"bar\": {}, \"foo\": {\"auth\": \"Zm9vOmJhcg==\"}}}")) + + err := MergeDockerConfigJSON(sourceFile, targetFile, &utilsMock) + assert.NoError(t, err) + + content, err := utilsMock.FileRead(targetFile) + assert.NoError(t, err) + assert.Equal(t, expectedContent, string(content)) + }) + + t.Run("success - target file is missing", func(t *testing.T) { + sourceFile := "/tmp/source.json" + targetFile := "/tmp/target.json" + expectedContent := "{\n\t\"auths\": {\n\t\t\"bar\": {},\n\t\t\"foo\": {\n\t\t\t\"auth\": \"Zm9vOmJhcg==\"\n\t\t}\n\t}\n}" + + utilsMock := mock.FilesMock{} + utilsMock.AddFile(sourceFile, []byte("{\"auths\": {\"bar\": {}, \"foo\": {\"auth\": \"Zm9vOmJhcg==\"}}}")) + + err := MergeDockerConfigJSON(sourceFile, targetFile, &utilsMock) + assert.NoError(t, err) + + content, err := utilsMock.FileRead(targetFile) + assert.NoError(t, err) + assert.Equal(t, expectedContent, string(content)) + }) + + t.Run("error - source file is missing", func(t *testing.T) { + utilsMock := mock.FilesMock{} + err := MergeDockerConfigJSON("missing-file", "also-missing-file", &utilsMock) + assert.Error(t, err) + assert.Equal(t, "source dockerConfigJSON file \"missing-file\" does not exist", err.Error()) + }) +} diff --git a/resources/metadata/cnbBuild.yaml b/resources/metadata/cnbBuild.yaml index 809ca5dbdc..9fe08524f7 100644 --- a/resources/metadata/cnbBuild.yaml +++ b/resources/metadata/cnbBuild.yaml @@ -95,32 +95,6 @@ spec: resourceRef: - name: commonPipelineEnvironment param: container/registryUrl - - name: containerRegistryUser - aliases: - - name: dockerRegistryUser - type: string - description: Username of the container registry where the image should be pushed to - which will updated in a docker config json file. If a docker config json file is provided via parameter `dockerConfigJSON`, then the existing file will be enhanced - scope: - - GENERAL - - PARAMETERS - - STAGES - - STEPS - resourceRef: - - name: commonPipelineEnvironment - param: container/repositoryUsername - - name: containerRegistryPassword - aliases: - - name: dockerRegistryPassword - type: string - description: Password of the container registry where the image should be pushed to - which will updated in a docker config json file. If a docker config json file is provided via parameter `dockerConfigJSON`, then the existing file will be enhanced - scope: - - GENERAL - - PARAMETERS - - STAGES - - STEPS - resourceRef: - - name: commonPipelineEnvironment - param: container/repositoryPassword - name: buildpacks type: "[]string" description: List of custom buildpacks to use in the form of `$HOSTNAME/$REPO[:$TAG]`. @@ -181,6 +155,13 @@ spec: - type: vaultSecretFile name: dockerConfigFileVaultSecretName default: docker-config + - name: dockerConfigJSONCPE + type: string + description: This property is intended only for reading the `dockerConfigJSON` from the Common Pipeline Environment. If you want to provide your own credentials, please refer to the [dockerConfigJSON](#dockerConfigJSON) property. If both properties are set, the config files will be merged, with the [dockerConfigJSON](#dockerConfigJSON) having higher priority. + secret: true + resourceRef: + - name: commonPipelineEnvironment + param: custom/dockerConfigJSON - name: customTlsCertificateLinks type: "[]string" description: List containing download links of custom TLS certificates. This is required to ensure trusted connections to registries with custom certificates. From 8fe1d5553e6c805a56adfc46cacdd8bb481deccb Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Tue, 4 Jul 2023 14:35:44 +0200 Subject: [PATCH 28/36] chore(deps): update actions/setup-python action to v4 (#4437) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- .github/workflows/verify-yaml.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/verify-yaml.yml b/.github/workflows/verify-yaml.yml index ee13aeb3d8..d2873b0425 100644 --- a/.github/workflows/verify-yaml.yml +++ b/.github/workflows/verify-yaml.yml @@ -15,7 +15,7 @@ jobs: - uses: styfle/cancel-workflow-action@0.10.0 - uses: actions/checkout@master - - uses: actions/setup-python@v2 + - uses: actions/setup-python@v4 with: python-version: 3.8 From c7ab4240e94be05da0cf052554d287f5098b6c5a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 4 Jul 2023 13:00:11 +0000 Subject: [PATCH 29/36] build(deps): bump github.com/docker/distribution (#4359) Bumps [github.com/docker/distribution](https://github.com/docker/distribution) from 2.8.1+incompatible to 2.8.2+incompatible. - [Release notes](https://github.com/docker/distribution/releases) - [Commits](https://github.com/docker/distribution/compare/v2.8.1...v2.8.2) --- updated-dependencies: - dependency-name: github.com/docker/distribution dependency-type: indirect ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Jordi van Liempt <35920075+jliempt@users.noreply.github.com> --- go.mod | 2 +- go.sum | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/go.mod b/go.mod index 50e48dc45f..382c52b565 100644 --- a/go.mod +++ b/go.mod @@ -140,7 +140,7 @@ require ( github.com/digitalocean/godo v1.7.5 // indirect github.com/dimchansky/utfbom v1.1.1 // indirect github.com/dnaeon/go-vcr v1.2.0 // indirect - github.com/docker/distribution v2.8.1+incompatible // indirect + github.com/docker/distribution v2.8.2+incompatible // indirect github.com/docker/docker v20.10.17+incompatible // indirect github.com/docker/docker-credential-helpers v0.6.4 // indirect github.com/docker/go-connections v0.4.0 // indirect diff --git a/go.sum b/go.sum index c7e6de3a09..95d5f98776 100644 --- a/go.sum +++ b/go.sum @@ -588,8 +588,8 @@ github.com/docker/cli v20.10.17+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hH github.com/docker/distribution v0.0.0-20190905152932-14b96e55d84c/go.mod h1:0+TTO4EOBfRPhZXAeF1Vu+W3hHZ8eLp8PgKVZlcvtFY= github.com/docker/distribution v2.7.1-0.20190205005809-0d3efadf0154+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w= github.com/docker/distribution v2.7.1+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w= -github.com/docker/distribution v2.8.1+incompatible h1:Q50tZOPR6T/hjNsyc9g8/syEs6bk8XXApsHjKukMl68= -github.com/docker/distribution v2.8.1+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w= +github.com/docker/distribution v2.8.2+incompatible h1:T3de5rq0dB1j30rp0sA2rER+m322EBzniBPB6ZIzuh8= +github.com/docker/distribution v2.8.2+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w= github.com/docker/docker v1.4.2-0.20190924003213-a8608b5b67c7/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= github.com/docker/docker v1.4.2-0.20200319182547-c7ad2b866182/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= github.com/docker/docker v20.10.5+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= From 920c2480e704dfe95fed5822fc745d05e5b95bce Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Tue, 4 Jul 2023 15:53:40 +0200 Subject: [PATCH 30/36] chore(deps): update actions/stale action to v8 (#4438) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- .github/workflows/stale.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml index e092a54ef5..7f1c4c9324 100644 --- a/.github/workflows/stale.yml +++ b/.github/workflows/stale.yml @@ -8,7 +8,7 @@ jobs: runs-on: ubuntu-latest steps: - uses: styfle/cancel-workflow-action@0.10.0 - - uses: actions/stale@v4 + - uses: actions/stale@v8 with: repo-token: ${{ secrets.GITHUB_TOKEN }} stale-issue-message: 'Thank you for your contribution! This issue is stale because it has been open 60 days with no activity. In order to keep it open, please remove stale label or add a comment within the next 10 days. If you need a Piper team member to remove the stale label make sure to add `@SAP/jenkins-library-team` to your comment.' From fa11eb47aa842329df60b427b177ad3e23515767 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 4 Jul 2023 16:10:54 +0200 Subject: [PATCH 31/36] build(deps): bump github.com/opencontainers/runc from 1.1.2 to 1.1.5 (#4307) Bumps [github.com/opencontainers/runc](https://github.com/opencontainers/runc) from 1.1.2 to 1.1.5. - [Release notes](https://github.com/opencontainers/runc/releases) - [Changelog](https://github.com/opencontainers/runc/blob/v1.1.5/CHANGELOG.md) - [Commits](https://github.com/opencontainers/runc/compare/v1.1.2...v1.1.5) --- updated-dependencies: - dependency-name: github.com/opencontainers/runc dependency-type: indirect ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Jordi van Liempt <35920075+jliempt@users.noreply.github.com> --- go.mod | 2 +- go.sum | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/go.mod b/go.mod index 382c52b565..d0057e6bfd 100644 --- a/go.mod +++ b/go.mod @@ -263,7 +263,7 @@ require ( github.com/onsi/ginkgo v1.16.5 // indirect github.com/opencontainers/go-digest v1.0.0 // indirect github.com/opencontainers/image-spec v1.0.3-0.20220114050600-8b9d41f48198 // indirect - github.com/opencontainers/runc v1.1.2 // indirect + github.com/opencontainers/runc v1.1.5 // indirect github.com/opentracing/opentracing-go v1.2.0 // indirect github.com/oracle/oci-go-sdk v13.1.0+incompatible // indirect github.com/packethost/packngo v0.1.1-0.20180711074735-b9cb5096f54c // indirect diff --git a/go.sum b/go.sum index 95d5f98776..a0f987ce80 100644 --- a/go.sum +++ b/go.sum @@ -1664,8 +1664,8 @@ github.com/opencontainers/runc v1.0.0-rc8.0.20190926000215-3e425f80a8c9/go.mod h github.com/opencontainers/runc v1.0.0-rc9/go.mod h1:qT5XzbpPznkRYVz/mWwUaVBUv2rmF59PVA73FjuZG0U= github.com/opencontainers/runc v1.0.0-rc93/go.mod h1:3NOsor4w32B2tC0Zbl8Knk4Wg84SM2ImC1fxBuqJ/H0= github.com/opencontainers/runc v1.0.2/go.mod h1:aTaHFFwQXuA71CiyxOdFFIorAoemI04suvGRQFzWTD0= -github.com/opencontainers/runc v1.1.2 h1:2VSZwLx5k/BfsBxMMipG/LYUnmqOD/BPkIVgQUcTlLw= -github.com/opencontainers/runc v1.1.2/go.mod h1:Tj1hFw6eFWp/o33uxGf5yF2BX5yz2Z6iptFpuvbbKqc= +github.com/opencontainers/runc v1.1.5 h1:L44KXEpKmfWDcS02aeGm8QNTFXTo2D+8MYGDIJ/GDEs= +github.com/opencontainers/runc v1.1.5/go.mod h1:1J5XiS+vdZ3wCyZybsuxXZWGrgSr8fFJHLXuG2PsnNg= github.com/opencontainers/runtime-spec v0.1.2-0.20190507144316-5b71a03e2700/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0= github.com/opencontainers/runtime-spec v1.0.1/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0= github.com/opencontainers/runtime-spec v1.0.2-0.20190207185410-29686dbc5559/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0= @@ -1846,7 +1846,7 @@ github.com/sean-/pager v0.0.0-20180208200047-666be9bf53b5/go.mod h1:BeybITEsBEg6 github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529 h1:nn5Wsu0esKSJiIVhscUtVbo7ada43DJhG55ua/hjS5I= github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc= github.com/seccomp/libseccomp-golang v0.9.1/go.mod h1:GbW5+tmTXfcxTToHLXlScSlAvWlF4P2Ca7zGrPiEpWo= -github.com/seccomp/libseccomp-golang v0.9.2-0.20210429002308-3879420cc921/go.mod h1:JA8cRccbGaA1s33RQf7Y1+q9gHmZX1yB/z9WDN1C6fg= +github.com/seccomp/libseccomp-golang v0.9.2-0.20220502022130-f33da4d89646/go.mod h1:JA8cRccbGaA1s33RQf7Y1+q9gHmZX1yB/z9WDN1C6fg= github.com/securego/gosec/v2 v2.9.1/go.mod h1:oDcDLcatOJxkCGaCaq8lua1jTnYf6Sou4wdiJ1n4iHc= github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo= github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM= From 3e0da62a0b21ac0949fe473926e1ecd86cc3b9fa Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Tue, 4 Jul 2023 17:34:16 +0200 Subject: [PATCH 32/36] chore(deps): update actions/checkout action to v3 (#4440) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- .github/workflows/documentation.yml | 2 +- .github/workflows/markdown.yml | 2 +- .github/workflows/release-go.yml | 2 +- .github/workflows/update-go-dependencies.yml | 2 +- .github/workflows/upload-go-master.yml | 2 +- .github/workflows/verify-go.yml | 10 +++++----- .github/workflows/verify-groovy.yml | 2 +- 7 files changed, 11 insertions(+), 11 deletions(-) diff --git a/.github/workflows/documentation.yml b/.github/workflows/documentation.yml index 1f511785b9..9ca4be3d20 100644 --- a/.github/workflows/documentation.yml +++ b/.github/workflows/documentation.yml @@ -14,7 +14,7 @@ jobs: steps: - uses: styfle/cancel-workflow-action@0.10.0 - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - uses: actions/setup-java@v1 with: diff --git a/.github/workflows/markdown.yml b/.github/workflows/markdown.yml index 61c1e3d9fa..bb049f6ea5 100644 --- a/.github/workflows/markdown.yml +++ b/.github/workflows/markdown.yml @@ -16,7 +16,7 @@ jobs: name: 'Format' steps: - uses: styfle/cancel-workflow-action@0.10.0 - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Markdown Linting uses: nosborn/github-action-markdown-cli@v1.1.1 with: diff --git a/.github/workflows/release-go.yml b/.github/workflows/release-go.yml index 4bd06d3bd5..0ba39cd3ce 100644 --- a/.github/workflows/release-go.yml +++ b/.github/workflows/release-go.yml @@ -12,7 +12,7 @@ jobs: runs-on: ubuntu-latest steps: - uses: styfle/cancel-workflow-action@0.10.0 - - uses: actions/checkout@v1 + - uses: actions/checkout@v3 # Workaround for https://github.com/SAP/jenkins-library/issues/1723, build only works with jdk8 currently - uses: actions/setup-java@v1 with: diff --git a/.github/workflows/update-go-dependencies.yml b/.github/workflows/update-go-dependencies.yml index 8618c1fb77..6ba075ac96 100644 --- a/.github/workflows/update-go-dependencies.yml +++ b/.github/workflows/update-go-dependencies.yml @@ -10,7 +10,7 @@ jobs: runs-on: ubuntu-latest steps: - uses: styfle/cancel-workflow-action@0.10.0 - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - uses: actions/setup-go@v1 with: go-version: '1.18.x' diff --git a/.github/workflows/upload-go-master.yml b/.github/workflows/upload-go-master.yml index 7eaff4e109..cbacd19448 100644 --- a/.github/workflows/upload-go-master.yml +++ b/.github/workflows/upload-go-master.yml @@ -10,7 +10,7 @@ jobs: runs-on: ubuntu-latest steps: - uses: styfle/cancel-workflow-action@0.10.0 - - uses: actions/checkout@v1 + - uses: actions/checkout@v3 - uses: actions/setup-go@v1 with: go-version: '1.18.x' diff --git a/.github/workflows/verify-go.yml b/.github/workflows/verify-go.yml index 469a2489d7..2dbe8e8588 100644 --- a/.github/workflows/verify-go.yml +++ b/.github/workflows/verify-go.yml @@ -24,7 +24,7 @@ jobs: restore-keys: | ${{ runner.os }}-golang- - name: checkout - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: unit-test uses: paambaati/codeclimate-action@v4 env: @@ -49,7 +49,7 @@ jobs: ${{ runner.os }}-golang-format ${{ runner.os }}-golang- - name: checkout - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: format run: go fmt ./... - name: verify @@ -62,7 +62,7 @@ jobs: go-version: '1.19.x' # action requires go@1.19 - name: checkout - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: fetch-depth: 1 - name: staticcheck @@ -85,7 +85,7 @@ jobs: ${{ runner.os }}-golang-generate ${{ runner.os }}-golang- - name: checkout - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: generate run: go run pkg/generator/step-metadata.go - name: verify @@ -105,7 +105,7 @@ jobs: ${{ runner.os }}-golang-dependencies ${{ runner.os }}-golang- - name: checkout - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: cleanup dependencies run: go mod tidy - name: verify diff --git a/.github/workflows/verify-groovy.yml b/.github/workflows/verify-groovy.yml index 45d28c9fc9..9f4dbab913 100644 --- a/.github/workflows/verify-groovy.yml +++ b/.github/workflows/verify-groovy.yml @@ -14,7 +14,7 @@ jobs: steps: - uses: styfle/cancel-workflow-action@0.10.0 - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - uses: actions/setup-java@v1 with: From 380144b457a10be0d368c16e7627bb9ade4c0b13 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Wed, 5 Jul 2023 10:14:28 +0200 Subject: [PATCH 33/36] chore(deps): update actions/setup-go action to v4 (#4435) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- .github/workflows/documentation.yml | 2 +- .github/workflows/integration-tests-pr.yml | 6 +++--- .github/workflows/integration-tests.yml | 6 +++--- .github/workflows/update-go-dependencies.yml | 2 +- .github/workflows/upload-go-master.yml | 2 +- .github/workflows/verify-go.yml | 10 +++++----- 6 files changed, 14 insertions(+), 14 deletions(-) diff --git a/.github/workflows/documentation.yml b/.github/workflows/documentation.yml index 9ca4be3d20..b24a3a754e 100644 --- a/.github/workflows/documentation.yml +++ b/.github/workflows/documentation.yml @@ -20,7 +20,7 @@ jobs: with: java-version: '1.8' - - uses: actions/setup-go@v1 + - uses: actions/setup-go@v4 with: go-version: '1.18.x' diff --git a/.github/workflows/integration-tests-pr.yml b/.github/workflows/integration-tests-pr.yml index a50b696bb3..1aaad2d2a5 100644 --- a/.github/workflows/integration-tests-pr.yml +++ b/.github/workflows/integration-tests-pr.yml @@ -74,7 +74,7 @@ jobs: - uses: actions/checkout@v3 with: ref: ${{ needs.start.outputs.sha }} - - uses: actions/setup-go@v3 + - uses: actions/setup-go@v4 with: go-version: ${{ needs.start.outputs.go_version }} - name: Build @@ -98,7 +98,7 @@ jobs: - uses: actions/checkout@v3 with: ref: ${{ needs.start.outputs.sha }} - - uses: actions/setup-go@v3 + - uses: actions/setup-go@v4 with: go-version: ${{ needs.start.outputs.go_version }} - name: Build @@ -130,7 +130,7 @@ jobs: - uses: actions/checkout@v3 with: ref: ${{ needs.start.outputs.sha }} - - uses: actions/setup-go@v3 + - uses: actions/setup-go@v4 with: go-version: ${{ needs.start.outputs.go_version }} - name: Download Piper binary diff --git a/.github/workflows/integration-tests.yml b/.github/workflows/integration-tests.yml index fc362bf370..12b7e3cafe 100644 --- a/.github/workflows/integration-tests.yml +++ b/.github/workflows/integration-tests.yml @@ -45,7 +45,7 @@ jobs: - uses: actions/checkout@v3 with: ref: ${{ needs.start.outputs.sha }} - - uses: actions/setup-go@v3 + - uses: actions/setup-go@v4 with: go-version: ${{ needs.start.outputs.go_version }} - name: Build @@ -70,7 +70,7 @@ jobs: - uses: actions/checkout@v3 with: ref: ${{ needs.start.outputs.sha }} - - uses: actions/setup-go@v3 + - uses: actions/setup-go@v4 with: go-version: ${{ needs.start.outputs.go_version }} - name: Build @@ -102,7 +102,7 @@ jobs: - uses: actions/checkout@v3 with: ref: ${{ needs.start.outputs.sha }} - - uses: actions/setup-go@v3 + - uses: actions/setup-go@v4 with: go-version: ${{ needs.start.outputs.go_version }} - name: Download Piper binary diff --git a/.github/workflows/update-go-dependencies.yml b/.github/workflows/update-go-dependencies.yml index 6ba075ac96..b79ab238d4 100644 --- a/.github/workflows/update-go-dependencies.yml +++ b/.github/workflows/update-go-dependencies.yml @@ -11,7 +11,7 @@ jobs: steps: - uses: styfle/cancel-workflow-action@0.10.0 - uses: actions/checkout@v3 - - uses: actions/setup-go@v1 + - uses: actions/setup-go@v4 with: go-version: '1.18.x' - name: Perform update diff --git a/.github/workflows/upload-go-master.yml b/.github/workflows/upload-go-master.yml index cbacd19448..f910b16440 100644 --- a/.github/workflows/upload-go-master.yml +++ b/.github/workflows/upload-go-master.yml @@ -11,7 +11,7 @@ jobs: steps: - uses: styfle/cancel-workflow-action@0.10.0 - uses: actions/checkout@v3 - - uses: actions/setup-go@v1 + - uses: actions/setup-go@v4 with: go-version: '1.18.x' - env: diff --git a/.github/workflows/verify-go.yml b/.github/workflows/verify-go.yml index 2dbe8e8588..c5e3d19042 100644 --- a/.github/workflows/verify-go.yml +++ b/.github/workflows/verify-go.yml @@ -13,7 +13,7 @@ jobs: runs-on: ubuntu-latest steps: - uses: styfle/cancel-workflow-action@0.10.0 - - uses: actions/setup-go@v1 + - uses: actions/setup-go@v4 with: go-version: '1.18.x' - name: Cache Golang Packages @@ -37,7 +37,7 @@ jobs: format: runs-on: ubuntu-latest steps: - - uses: actions/setup-go@v1 + - uses: actions/setup-go@v4 with: go-version: '1.18.x' - name: Cache Golang Packages @@ -57,7 +57,7 @@ jobs: lint: runs-on: ubuntu-latest steps: - - uses: actions/setup-go@v1 + - uses: actions/setup-go@v4 with: go-version: '1.19.x' # action requires go@1.19 @@ -73,7 +73,7 @@ jobs: generate: runs-on: ubuntu-latest steps: - - uses: actions/setup-go@v1 + - uses: actions/setup-go@v4 with: go-version: '1.18.x' - name: Cache Golang Packages @@ -93,7 +93,7 @@ jobs: dependencies: runs-on: ubuntu-latest steps: - - uses: actions/setup-go@v1 + - uses: actions/setup-go@v4 with: go-version: '1.18.x' - name: Cache Golang Packages From 9b1aebfd13314f5ed843c5d7cfd762d954aefb6a Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Wed, 5 Jul 2023 11:35:34 +0200 Subject: [PATCH 34/36] chore(deps): update actions/setup-java action to v3 (#4436) * chore(deps): update actions/setup-java action to v3 * Apply suggestions from code review --------- Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> Co-authored-by: Christopher Fenner <26137398+CCFenner@users.noreply.github.com> --- .github/workflows/consumer-tests-pr.yml | 3 ++- .github/workflows/consumer-tests.yml | 3 ++- .github/workflows/documentation.yml | 5 +++-- .github/workflows/release-go.yml | 5 +++-- .github/workflows/verify-groovy.yml | 5 +++-- 5 files changed, 13 insertions(+), 8 deletions(-) diff --git a/.github/workflows/consumer-tests-pr.yml b/.github/workflows/consumer-tests-pr.yml index 7b14193a72..8700cbbd81 100644 --- a/.github/workflows/consumer-tests-pr.yml +++ b/.github/workflows/consumer-tests-pr.yml @@ -44,9 +44,10 @@ jobs: with: repository: ${{ steps.repository.outputs.repository }} ref: ${{ steps.branch_name.outputs.branch_name }} - - uses: actions/setup-java@v1 + - uses: actions/setup-java@v3 with: java-version: 11 + distribution: zulu - name: Install Groovy run: | sudo apt-get update diff --git a/.github/workflows/consumer-tests.yml b/.github/workflows/consumer-tests.yml index 35e1b0121d..5d7be028f8 100644 --- a/.github/workflows/consumer-tests.yml +++ b/.github/workflows/consumer-tests.yml @@ -14,9 +14,10 @@ jobs: steps: - uses: styfle/cancel-workflow-action@0.10.0 - uses: actions/checkout@v3 - - uses: actions/setup-java@v1 + - uses: actions/setup-java@v3 with: java-version: 11 + distribution: zulu - name: Install Groovy run: | sudo apt-get update diff --git a/.github/workflows/documentation.yml b/.github/workflows/documentation.yml index b24a3a754e..8a46e53ad5 100644 --- a/.github/workflows/documentation.yml +++ b/.github/workflows/documentation.yml @@ -16,9 +16,10 @@ jobs: - uses: actions/checkout@v3 - - uses: actions/setup-java@v1 + - uses: actions/setup-java@v3 with: - java-version: '1.8' + java-version: 8 + distribution: zulu - uses: actions/setup-go@v4 with: diff --git a/.github/workflows/release-go.yml b/.github/workflows/release-go.yml index 0ba39cd3ce..ba4f5a6802 100644 --- a/.github/workflows/release-go.yml +++ b/.github/workflows/release-go.yml @@ -14,9 +14,10 @@ jobs: - uses: styfle/cancel-workflow-action@0.10.0 - uses: actions/checkout@v3 # Workaround for https://github.com/SAP/jenkins-library/issues/1723, build only works with jdk8 currently - - uses: actions/setup-java@v1 + - uses: actions/setup-java@v3 with: - java-version: '8' + java-version: 8 + distribution: zulu - name: Prepare Release run: | curl --insecure --silent --location --write-out '%{http_code}' --output ./piper_master https://github.com/SAP/jenkins-library/releases/latest/download/piper_master diff --git a/.github/workflows/verify-groovy.yml b/.github/workflows/verify-groovy.yml index 9f4dbab913..ea017bd342 100644 --- a/.github/workflows/verify-groovy.yml +++ b/.github/workflows/verify-groovy.yml @@ -16,9 +16,10 @@ jobs: - uses: actions/checkout@v3 - - uses: actions/setup-java@v1 + - uses: actions/setup-java@v3 with: - java-version: 1.8 + java-version: 8 + distribution: zulu - name: Cache Maven Packages uses: actions/cache@v1 From b9bb5265ca1b240ec080596e97c538795239c108 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Wed, 5 Jul 2023 12:08:14 +0200 Subject: [PATCH 35/36] chore(deps): update actions/cache action to v3 (#4439) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- .github/workflows/documentation.yml | 4 ++-- .github/workflows/verify-go.yml | 8 ++++---- .github/workflows/verify-groovy.yml | 2 +- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/.github/workflows/documentation.yml b/.github/workflows/documentation.yml index 8a46e53ad5..ffd80b2667 100644 --- a/.github/workflows/documentation.yml +++ b/.github/workflows/documentation.yml @@ -29,7 +29,7 @@ jobs: run: sudo apt-get update && sudo apt-get install groovy -y - name: Cache Maven Packages - uses: actions/cache@v1 + uses: actions/cache@v3 with: path: ~/.m2/repository key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }} @@ -37,7 +37,7 @@ jobs: ${{ runner.os }}-maven- - name: Cache Go Packages - uses: actions/cache@v1 + uses: actions/cache@v3 with: path: ~/go/pkg/mod key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }} diff --git a/.github/workflows/verify-go.yml b/.github/workflows/verify-go.yml index c5e3d19042..9e0d1d7e85 100644 --- a/.github/workflows/verify-go.yml +++ b/.github/workflows/verify-go.yml @@ -17,7 +17,7 @@ jobs: with: go-version: '1.18.x' - name: Cache Golang Packages - uses: actions/cache@v1 + uses: actions/cache@v3 with: path: ~/go/pkg/mod key: ${{ runner.os }}-golang-${{ hashFiles('go.sum') }} @@ -41,7 +41,7 @@ jobs: with: go-version: '1.18.x' - name: Cache Golang Packages - uses: actions/cache@v1 + uses: actions/cache@v3 with: path: ~/go/pkg/mod key: ${{ runner.os }}-golang-format${{ hashFiles('go.sum') }} @@ -77,7 +77,7 @@ jobs: with: go-version: '1.18.x' - name: Cache Golang Packages - uses: actions/cache@v1 + uses: actions/cache@v3 with: path: ~/go/pkg/mod key: ${{ runner.os }}-golang-generate${{ hashFiles('go.sum') }} @@ -97,7 +97,7 @@ jobs: with: go-version: '1.18.x' - name: Cache Golang Packages - uses: actions/cache@v1 + uses: actions/cache@v3 with: path: ~/go/pkg/mod key: ${{ runner.os }}-golang-dependencies${{ hashFiles('go.sum') }} diff --git a/.github/workflows/verify-groovy.yml b/.github/workflows/verify-groovy.yml index ea017bd342..ca64a9c968 100644 --- a/.github/workflows/verify-groovy.yml +++ b/.github/workflows/verify-groovy.yml @@ -22,7 +22,7 @@ jobs: distribution: zulu - name: Cache Maven Packages - uses: actions/cache@v1 + uses: actions/cache@v3 with: path: ~/.m2/repository key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }} From 610e21230677cb91f1a8056de392b13cd913c1bc Mon Sep 17 00:00:00 2001 From: Pavel Busko Date: Thu, 6 Jul 2023 11:34:05 +0200 Subject: [PATCH 36/36] feat(cnbBuild) Add support for pre and post-buildpacks (#4448) * Add pre and post buildpacks Co-authored-by: Johannes Dillmann Co-authored-by: Ralf Pannemans Co-authored-by: Pavel Busko * fix integration tests Co-authored-by: Pavel Busko Co-authored-by: Ralf Pannemans * simplify if clauses Co-authored-by: Pavel Busko --------- Co-authored-by: Johannes Dillmann Co-authored-by: Ralf Pannemans --- cmd/cnbBuild.go | 41 ++++- cmd/cnbBuild_generated.go | 34 +++- cmd/cnbBuild_test.go | 124 ++++++++++++-- go.mod | 3 +- go.sum | 2 - .../github_actions_integration_test_list.yml | 1 + integration/integration_cnb_test.go | 32 +++- .../testdata/TestCnbIntegration/config.yml | 3 + .../TestCnbIntegration/project/package.json | 1 + pkg/cnbutils/buildpack.go | 89 +++++++--- pkg/cnbutils/buildpack_test.go | 50 +++++- pkg/cnbutils/order.go | 76 ++++++++- pkg/cnbutils/order_test.go | 156 +++++++++++++++++- pkg/cnbutils/project/descriptor.go | 102 ++++++------ pkg/cnbutils/project/descriptor_test.go | 100 ++++++++++- pkg/cnbutils/project/metadata/metadata.go | 2 +- .../project/metadata/metadata_test.go | 9 +- pkg/cnbutils/project/types/types.go | 58 +++++++ pkg/cnbutils/project/v01/project.go | 30 ++++ pkg/cnbutils/project/v02/project.go | 78 +++++++++ pkg/cnbutils/report.go | 2 +- pkg/cnbutils/report_test.go | 2 +- pkg/mock/dockerClient.go | 12 +- resources/metadata/cnbBuild.yaml | 22 ++- 24 files changed, 913 insertions(+), 116 deletions(-) create mode 100644 pkg/cnbutils/project/types/types.go create mode 100644 pkg/cnbutils/project/v01/project.go create mode 100644 pkg/cnbutils/project/v02/project.go diff --git a/cmd/cnbBuild.go b/cmd/cnbBuild.go index 4ef75a4c07..9c80aa8ab9 100644 --- a/cmd/cnbBuild.go +++ b/cmd/cnbBuild.go @@ -112,10 +112,29 @@ func processConfigs(main cnbBuildOptions, multipleImages []map[string]interface{ return result, nil } -func setCustomBuildpacks(bpacks []string, dockerCreds string, utils cnbutils.BuildUtils) (string, string, error) { +func setCustomBuildpacks(bpacks, preBuildpacks, postBuildpacks []string, dockerCreds string, utils cnbutils.BuildUtils) (string, string, error) { buildpacksPath := "/tmp/buildpacks" orderPath := "/tmp/buildpacks/order.toml" - newOrder, err := cnbutils.DownloadBuildpacks(buildpacksPath, bpacks, dockerCreds, utils) + err := cnbutils.DownloadBuildpacks(buildpacksPath, append(bpacks, append(preBuildpacks, postBuildpacks...)...), dockerCreds, utils) + if err != nil { + return "", "", err + } + + if len(bpacks) == 0 && (len(postBuildpacks) > 0 || len(preBuildpacks) > 0) { + matches, err := utils.Glob("/cnb/buildpacks/*") + if err != nil { + return "", "", err + } + + for _, match := range matches { + err = cnbutils.CreateVersionSymlinks(buildpacksPath, match, utils) + if err != nil { + return "", "", err + } + } + } + + newOrder, err := cnbutils.CreateOrder(bpacks, preBuildpacks, postBuildpacks, dockerCreds, utils) if err != nil { return "", "", err } @@ -475,10 +494,18 @@ func runCnbBuild(config *cnbBuildOptions, cnbTelemetry *cnbBuildTelemetry, utils config.mergeEnvVars(descriptor.EnvVars) - if (config.Buildpacks == nil || len(config.Buildpacks) == 0) && len(descriptor.Buildpacks) > 0 { + if len(config.Buildpacks) == 0 { config.Buildpacks = descriptor.Buildpacks } + if len(config.PreBuildpacks) == 0 { + config.PreBuildpacks = descriptor.PreBuildpacks + } + + if len(config.PostBuildpacks) == 0 { + config.PostBuildpacks = descriptor.PostBuildpacks + } + if descriptor.Exclude != nil { exclude = descriptor.Exclude } @@ -563,11 +590,13 @@ func runCnbBuild(config *cnbBuildOptions, cnbTelemetry *cnbBuildTelemetry, utils metadata.WriteProjectMetadata(GeneralConfig.EnvRootPath, utils) var buildpacksPath = "/cnb/buildpacks" - var orderPath = "/cnb/order.toml" + var orderPath = cnbutils.DefaultOrderPath - if config.Buildpacks != nil && len(config.Buildpacks) > 0 { + if len(config.Buildpacks) > 0 || len(config.PreBuildpacks) > 0 || len(config.PostBuildpacks) > 0 { log.Entry().Infof("Setting custom buildpacks: '%v'", config.Buildpacks) - buildpacksPath, orderPath, err = setCustomBuildpacks(config.Buildpacks, config.DockerConfigJSON, utils) + log.Entry().Infof("Pre-buildpacks: '%v'", config.PreBuildpacks) + log.Entry().Infof("Post-buildpacks: '%v'", config.PostBuildpacks) + buildpacksPath, orderPath, err = setCustomBuildpacks(config.Buildpacks, config.PreBuildpacks, config.PostBuildpacks, config.DockerConfigJSON, utils) defer func() { _ = utils.RemoveAll(buildpacksPath) }() defer func() { _ = utils.RemoveAll(orderPath) }() if err != nil { diff --git a/cmd/cnbBuild_generated.go b/cmd/cnbBuild_generated.go index 945daa6600..0a150d1607 100644 --- a/cmd/cnbBuild_generated.go +++ b/cmd/cnbBuild_generated.go @@ -27,6 +27,8 @@ type cnbBuildOptions struct { ContainerImageTag string `json:"containerImageTag,omitempty"` ContainerRegistryURL string `json:"containerRegistryUrl,omitempty"` Buildpacks []string `json:"buildpacks,omitempty"` + PreBuildpacks []string `json:"preBuildpacks,omitempty"` + PostBuildpacks []string `json:"postBuildpacks,omitempty"` BuildEnvVars map[string]interface{} `json:"buildEnvVars,omitempty"` Path string `json:"path,omitempty"` ProjectDescriptor string `json:"projectDescriptor,omitempty"` @@ -226,7 +228,9 @@ func addCnbBuildFlags(cmd *cobra.Command, stepConfig *cnbBuildOptions) { cmd.Flags().StringVar(&stepConfig.ContainerImageAlias, "containerImageAlias", os.Getenv("PIPER_containerImageAlias"), "Logical name used for this image.\n") cmd.Flags().StringVar(&stepConfig.ContainerImageTag, "containerImageTag", os.Getenv("PIPER_containerImageTag"), "Tag of the container which will be built") cmd.Flags().StringVar(&stepConfig.ContainerRegistryURL, "containerRegistryUrl", os.Getenv("PIPER_containerRegistryUrl"), "Container registry where the image should be pushed to.\n\n**Note**: `containerRegistryUrl` should include only the domain. If you want to publish an image under `docker.io/example/my-image`, you must set `containerRegistryUrl: \"docker.io\"` and `containerImageName: \"example/my-image\"`.\n") - cmd.Flags().StringSliceVar(&stepConfig.Buildpacks, "buildpacks", []string{}, "List of custom buildpacks to use in the form of `$HOSTNAME/$REPO[:$TAG]`.") + cmd.Flags().StringSliceVar(&stepConfig.Buildpacks, "buildpacks", []string{}, "List of custom buildpacks to use in the form of `$HOSTNAME/$REPO[:$TAG]`. When this property is specified, buildpacks which are part of the builder will be ignored.") + cmd.Flags().StringSliceVar(&stepConfig.PreBuildpacks, "preBuildpacks", []string{}, "Buildpacks to prepend to the groups in the builder's order.") + cmd.Flags().StringSliceVar(&stepConfig.PostBuildpacks, "postBuildpacks", []string{}, "Buildpacks to append to the groups in the builder's order.") cmd.Flags().StringVar(&stepConfig.Path, "path", os.Getenv("PIPER_path"), "Glob that should either point to a directory with your sources or one artifact in zip format.\nThis property determines the input to the buildpack.\n") cmd.Flags().StringVar(&stepConfig.ProjectDescriptor, "projectDescriptor", `project.toml`, "Relative path to the project.toml file.\nSee [buildpacks.io](https://buildpacks.io/docs/reference/config/project-descriptor/) for the reference.\nParameters passed to the cnbBuild step will take precedence over the parameters set in the project.toml file, except the `env` block.\nEnvironment variables declared in a project descriptor file, will be merged with the `buildEnvVars` property, with the `buildEnvVars` having a precedence.\n\n*Note*: The project descriptor path should be relative to what is set in the [path](#path) property. If the `path` property is pointing to a zip archive (e.g. jar file), project descriptor path will be relative to the root of the workspace.\n\n*Note*: Inline buildpacks (see [specification](https://buildpacks.io/docs/reference/config/project-descriptor/#build-_table-optional_)) are not supported yet.\n") @@ -325,6 +329,34 @@ func cnbBuildMetadata() config.StepData { Aliases: []config.Alias{}, Default: []string{}, }, + { + Name: "preBuildpacks", + ResourceRef: []config.ResourceReference{ + { + Name: "commonPipelineEnvironment", + Param: "container/preBuildpacks", + }, + }, + Scope: []string{"PARAMETERS", "STAGES", "STEPS"}, + Type: "[]string", + Mandatory: false, + Aliases: []config.Alias{}, + Default: []string{}, + }, + { + Name: "postBuildpacks", + ResourceRef: []config.ResourceReference{ + { + Name: "commonPipelineEnvironment", + Param: "container/postBuildpacks", + }, + }, + Scope: []string{"PARAMETERS", "STAGES", "STEPS"}, + Type: "[]string", + Mandatory: false, + Aliases: []config.Alias{}, + Default: []string{}, + }, { Name: "buildEnvVars", ResourceRef: []config.ResourceReference{}, diff --git a/cmd/cnbBuild_test.go b/cmd/cnbBuild_test.go index 2f1fa8226c..f5c7592a70 100644 --- a/cmd/cnbBuild_test.go +++ b/cmd/cnbBuild_test.go @@ -25,23 +25,56 @@ import ( const imageRegistry = "some-registry" func newCnbBuildTestsUtils() cnbutils.MockUtils { + imageStub := func(imageRef, target string) (v1.Image, error) { + fakeImage := &fake.FakeImage{} + var imageConfig v1.Config + switch imageRef { + case "pre-test": + imageConfig = v1.Config{ + Labels: map[string]string{ + "io.buildpacks.buildpackage.metadata": "{\"id\": \"pre-testbuildpack\", \"version\": \"0.0.1\"}", + }, + } + case "post-test": + imageConfig = v1.Config{ + Labels: map[string]string{ + "io.buildpacks.buildpackage.metadata": "{\"id\": \"post-testbuildpack\", \"version\": \"0.0.1\"}", + }, + } + default: + imageConfig = v1.Config{ + Labels: map[string]string{ + "io.buildpacks.buildpackage.metadata": "{\"id\": \"testbuildpack\", \"version\": \"0.0.1\"}", + }, + } + } + + fakeImage.ConfigFileReturns(&v1.ConfigFile{ + Config: imageConfig, + }, nil) + + return fakeImage, nil + } + utils := cnbutils.MockUtils{ ExecMockRunner: &mock.ExecMockRunner{}, FilesMock: &mock.FilesMock{}, - DownloadMock: &mock.DownloadMock{}, - } - - fakeImage := &fake.FakeImage{} - fakeImage.ConfigFileReturns(&v1.ConfigFile{ - Config: v1.Config{ - Labels: map[string]string{ - "io.buildpacks.buildpackage.metadata": "{\"id\": \"testbuildpack\", \"version\": \"0.0.1\"}", + DownloadMock: &mock.DownloadMock{ + ImageContentStub: imageStub, + ImageInfoStub: func(imageRef string) (v1.Image, error) { + return imageStub(imageRef, "") }, }, - }, nil) + } - utils.RemoteImageInfo = fakeImage - utils.ReturnImage = fakeImage + utils.AddFile("/cnb/order.toml", []byte(`[[order]] + [[order.group]] + id = "buildpacks/java" + version = "1.8.0" +[[order]] + [[order.group]] + id = "buildpacks/nodejs" + version = "1.6.0"`)) utils.AddFile("/layers/report.toml", []byte(`[build] [image] tags = ["localhost:5000/not-found:0.0.1"] @@ -234,6 +267,75 @@ func TestRunCnbBuild(t *testing.T) { assert.True(t, copiedFileExists) }) + t.Run("success case (custom buildpacks, pre and post buildpacks and custom env variables, renaming docker conf file, additional tag)", func(t *testing.T) { + t.Parallel() + config := cnbBuildOptions{ + ContainerImageName: "my-image", + ContainerImageTag: "0.0.1", + ContainerRegistryURL: imageRegistry, + DockerConfigJSON: "/path/to/test.json", + PreBuildpacks: []string{"pre-test"}, + PostBuildpacks: []string{"post-test"}, + Buildpacks: []string{"test"}, + BuildEnvVars: map[string]interface{}{ + "FOO": "BAR", + }, + AdditionalTags: []string{"latest"}, + } + + utils := newCnbBuildTestsUtils() + utils.FilesMock.AddFile(config.DockerConfigJSON, []byte(`{"auths":{"my-registry":{"auth":"dXNlcjpwYXNz"}}}`)) + addBuilderFiles(&utils) + + err := callCnbBuild(&config, &telemetry.CustomData{}, &utils, &cnbBuildCommonPipelineEnvironment{}, &piperhttp.Client{}) + + require.NoError(t, err) + runner := utils.ExecMockRunner + assert.Contains(t, runner.Env, "CNB_REGISTRY_AUTH={\"my-registry\":\"Basic dXNlcjpwYXNz\"}") + assert.Equal(t, creatorPath, runner.Calls[0].Exec) + assert.Contains(t, runner.Calls[0].Params, "/tmp/buildpacks") + assert.Contains(t, runner.Calls[0].Params, "/tmp/buildpacks/order.toml") + assert.Contains(t, runner.Calls[0].Params, fmt.Sprintf("%s/%s:%s", config.ContainerRegistryURL, config.ContainerImageName, config.ContainerImageTag)) + assert.Contains(t, runner.Calls[0].Params, fmt.Sprintf("%s/%s:latest", config.ContainerRegistryURL, config.ContainerImageName)) + + copiedFileExists, _ := utils.FileExists("/tmp/config.json") + assert.True(t, copiedFileExists) + }) + + t.Run("success case (custom pre and post buildpacks and custom env variables, renaming docker conf file, additional tag)", func(t *testing.T) { + t.Parallel() + config := cnbBuildOptions{ + ContainerImageName: "my-image", + ContainerImageTag: "0.0.1", + ContainerRegistryURL: imageRegistry, + DockerConfigJSON: "/path/to/test.json", + PostBuildpacks: []string{"post-test"}, + PreBuildpacks: []string{"pre-test"}, + BuildEnvVars: map[string]interface{}{ + "FOO": "BAR", + }, + AdditionalTags: []string{"latest"}, + } + + utils := newCnbBuildTestsUtils() + utils.FilesMock.AddFile(config.DockerConfigJSON, []byte(`{"auths":{"my-registry":{"auth":"dXNlcjpwYXNz"}}}`)) + addBuilderFiles(&utils) + + err := callCnbBuild(&config, &telemetry.CustomData{}, &utils, &cnbBuildCommonPipelineEnvironment{}, &piperhttp.Client{}) + + require.NoError(t, err) + runner := utils.ExecMockRunner + assert.Contains(t, runner.Env, "CNB_REGISTRY_AUTH={\"my-registry\":\"Basic dXNlcjpwYXNz\"}") + assert.Equal(t, creatorPath, runner.Calls[0].Exec) + assert.Contains(t, runner.Calls[0].Params, "/tmp/buildpacks") + assert.Contains(t, runner.Calls[0].Params, "/tmp/buildpacks/order.toml") + assert.Contains(t, runner.Calls[0].Params, fmt.Sprintf("%s/%s:%s", config.ContainerRegistryURL, config.ContainerImageName, config.ContainerImageTag)) + assert.Contains(t, runner.Calls[0].Params, fmt.Sprintf("%s/%s:latest", config.ContainerRegistryURL, config.ContainerImageName)) + + copiedFileExists, _ := utils.FileExists("/tmp/config.json") + assert.True(t, copiedFileExists) + }) + t.Run("success case (customTlsCertificates)", func(t *testing.T) { t.Parallel() httpmock.Activate() diff --git a/go.mod b/go.mod index d0057e6bfd..704331cfab 100644 --- a/go.mod +++ b/go.mod @@ -9,6 +9,7 @@ replace golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d => golang.org/x/c require ( cloud.google.com/go/storage v1.22.1 github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v0.4.0 + github.com/BurntSushi/toml v1.1.0 github.com/Jeffail/gabs/v2 v2.6.1 github.com/Masterminds/sprig v2.22.0+incompatible github.com/antchfx/htmlquery v1.2.4 @@ -46,7 +47,6 @@ require ( github.com/mitchellh/mapstructure v1.5.0 github.com/motemen/go-nuts v0.0.0-20210915132349-615a782f2c69 github.com/package-url/packageurl-go v0.1.0 - github.com/pelletier/go-toml v1.9.5 github.com/piper-validation/fortify-client-go v0.0.0-20220126145513-7b3e9a72af01 github.com/pkg/errors v0.9.1 github.com/sabhiram/go-gitignore v0.0.0-20210923224102-525f6e181f06 @@ -87,7 +87,6 @@ require ( github.com/Azure/go-autorest/autorest/validation v0.3.1 // indirect github.com/Azure/go-autorest/logger v0.2.1 // indirect github.com/Azure/go-autorest/tracing v0.6.0 // indirect - github.com/BurntSushi/toml v1.1.0 // indirect github.com/CycloneDX/cyclonedx-go v0.6.0 github.com/DataDog/datadog-go v3.2.0+incompatible // indirect github.com/Jeffail/gabs v1.1.1 // indirect diff --git a/go.sum b/go.sum index a0f987ce80..2194e632ac 100644 --- a/go.sum +++ b/go.sum @@ -1707,8 +1707,6 @@ github.com/pelletier/go-toml v1.7.0/go.mod h1:vwGMzjaWMwyfHwgIBhI2YUM4fB6nL6lVAv github.com/pelletier/go-toml v1.8.1/go.mod h1:T2/BmBdy8dvIRq1a/8aqjN41wvWlN4lrapLU/GW4pbc= github.com/pelletier/go-toml v1.9.3/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= github.com/pelletier/go-toml v1.9.4/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= -github.com/pelletier/go-toml v1.9.5 h1:4yBQzkHv+7BHq2PQUZF3Mx0IYxG7LsP222s7Agd3ve8= -github.com/pelletier/go-toml v1.9.5/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= github.com/peterbourgon/diskv v2.0.1+incompatible h1:UBdAOUP5p4RWqPBg048CAvpKN+vxiaj6gdUUzhl4XmI= github.com/peterbourgon/diskv v2.0.1+incompatible/go.mod h1:uqqh8zWWbv1HBMNONnaR/tNboyR3/BZd58JJSHlUSCU= github.com/petermattis/goid v0.0.0-20180202154549-b0b1615b78e5 h1:q2e307iGHPdTGp0hoxKjt1H5pDo6utceo3dQVK3I5XQ= diff --git a/integration/github_actions_integration_test_list.yml b/integration/github_actions_integration_test_list.yml index fc9acd2dff..006f177b2d 100644 --- a/integration/github_actions_integration_test_list.yml +++ b/integration/github_actions_integration_test_list.yml @@ -9,6 +9,7 @@ run: - '"TestCNBIntegrationNPMCustomBuildpacksBuildpacklessProject"' - '"TestCNBIntegrationNPMCustomBuildpacksFullProject"' - '"TestCNBIntegrationProjectDescriptor"' + - '"TestCNBIntegrationPrePostBuildpacks"' - '"TestGolangIntegration"' - '"TestGradleIntegration"' diff --git a/integration/integration_cnb_test.go b/integration/integration_cnb_test.go index 944651cf8e..1319e1bcc1 100644 --- a/integration/integration_cnb_test.go +++ b/integration/integration_cnb_test.go @@ -17,7 +17,7 @@ import ( const ( registryURL = "localhost:5000" - baseBuilder = "paketobuildpacks/builder:0.3.26-base" + baseBuilder = "paketobuildpacks/builder:0.3.280-base" ) func setupDockerRegistry(t *testing.T, ctx context.Context) testcontainers.Container { @@ -135,7 +135,7 @@ func TestCNBIntegrationZipPath(t *testing.T) { container.assertHasOutput(t, "running command: /cnb/lifecycle/creator", "Installing Go", - "Paketo Go Build Buildpack", + "Paketo Buildpack for Go Build", fmt.Sprintf("Saving %s/not-found:0.0.1", registryURL), "*** Images (sha256:", "SUCCESS", @@ -279,9 +279,9 @@ func TestCNBIntegrationMultiImage(t *testing.T) { assert.NoError(t, err) container.assertHasOutput(t, - "Previous image with name \"localhost:5000/io-buildpacks-my-app:latest\" not found", + "Image with name \"localhost:5000/io-buildpacks-my-app:latest\" not found", "Saving localhost:5000/io-buildpacks-my-app:latest...", - "Previous image with name \"localhost:5000/go-app:v1.0.0\" not found", + "Image with name \"localhost:5000/go-app:v1.0.0\" not found", "Saving localhost:5000/go-app:v1.0.0...", "Using cached buildpack", "Saving localhost:5000/my-app2:latest...", @@ -334,3 +334,27 @@ func TestCNBIntegrationPreserveFilesIgnored(t *testing.T) { container.assertHasOutput(t, "skipping preserving files because the source") container.terminate(t) } + +func TestCNBIntegrationPrePostBuildpacks(t *testing.T) { + t.Parallel() + ctx := context.Background() + registryContainer := setupDockerRegistry(t, ctx) + defer registryContainer.Terminate(ctx) + + container := givenThisContainer(t, IntegrationTestDockerExecRunnerBundle{ + Image: baseBuilder, + User: "cnb", + TestDir: []string{"testdata", "TestCnbIntegration"}, + Network: fmt.Sprintf("container:%s", registryContainer.GetContainerID()), + Environment: map[string]string{ + "PIPER_VAULTCREDENTIAL_DYNATRACE_API_KEY": "api-key-content", + }, + }) + + err := container.whenRunningPiperCommand("cnbBuild", "--noTelemetry", "--verbose", "--projectDescriptor", "", "--path", "project", "--customConfig", "config.yml", "--containerImageTag", "0.0.1", "--containerImageName", "not-found", "--containerRegistryUrl", registryURL, "--postBuildpacks", "paketobuildpacks/datadog") + assert.NoError(t, err) + container.assertHasOutput(t, "Setting custom buildpacks: '[]'") + container.assertHasOutput(t, "Pre-buildpacks: '[]'") + container.assertHasOutput(t, "Post-buildpacks: '[paketobuildpacks/datadog]'") + container.terminate(t) +} diff --git a/integration/testdata/TestCnbIntegration/config.yml b/integration/testdata/TestCnbIntegration/config.yml index da73f0510e..78aed906a0 100644 --- a/integration/testdata/TestCnbIntegration/config.yml +++ b/integration/testdata/TestCnbIntegration/config.yml @@ -3,6 +3,9 @@ general: collectTelemetryData: false steps: cnbBuild: + buildEnvVars: + BP_DATADOG_ENABLED: true + BP_EAR_KEY: 74657374 bindings: maven-settings: type: maven diff --git a/integration/testdata/TestCnbIntegration/project/package.json b/integration/testdata/TestCnbIntegration/project/package.json index 2c34dae7ee..be34f94b01 100644 --- a/integration/testdata/TestCnbIntegration/project/package.json +++ b/integration/testdata/TestCnbIntegration/project/package.json @@ -1,6 +1,7 @@ { "name": "test-mta-js", "version": "1.0.0", + "main": "srv/hello.js", "dependencies": { "jest": "^26.0.1", "jest-jenkins-reporter": "^1.0.2" diff --git a/pkg/cnbutils/buildpack.go b/pkg/cnbutils/buildpack.go index 5dc929f09d..8b744206dd 100644 --- a/pkg/cnbutils/buildpack.go +++ b/pkg/cnbutils/buildpack.go @@ -27,36 +27,30 @@ type License struct { URI string `toml:"uri" json:"uri"` } -func DownloadBuildpacks(path string, bpacks []string, dockerCreds string, utils BuildUtils) (Order, error) { +func DownloadBuildpacks(path string, bpacks []string, dockerCreds string, utils BuildUtils) error { if dockerCreds != "" { os.Setenv("DOCKER_CONFIG", filepath.Dir(dockerCreds)) } - var orderEntry OrderEntry - order := Order{ - Utils: utils, - } - err := utils.MkdirAll(bpCacheDir, os.ModePerm) if err != nil { - return Order{}, errors.Wrap(err, "failed to create temp directory for buildpack cache") + return errors.Wrap(err, "failed to create temp directory for buildpack cache") } for _, bpack := range bpacks { - var bpackMeta BuildPackMetadata imageInfo, err := utils.GetRemoteImageInfo(bpack) if err != nil { - return Order{}, errors.Wrap(err, "failed to get remote image info of buildpack") + return errors.Wrap(err, "failed to get remote image info of buildpack") } hash, err := imageInfo.Digest() if err != nil { - return Order{}, errors.Wrap(err, "failed to get image digest") + return errors.Wrap(err, "failed to get image digest") } cacheDir := filepath.Join(bpCacheDir, hash.String()) cacheExists, err := utils.DirExists(cacheDir) if err != nil { - return Order{}, errors.Wrapf(err, "failed to check if cache dir '%s' exists", cacheDir) + return errors.Wrapf(err, "failed to check if cache dir '%s' exists", cacheDir) } if cacheExists { @@ -64,36 +58,83 @@ func DownloadBuildpacks(path string, bpacks []string, dockerCreds string, utils } else { err := utils.MkdirAll(cacheDir, os.ModePerm) if err != nil { - return Order{}, errors.Wrap(err, "failed to create temp directory for buildpack cache") + return errors.Wrap(err, "failed to create temp directory for buildpack cache") } log.Entry().Infof("Downloading buildpack '%s' to %s", bpack, cacheDir) - img, err := utils.DownloadImageContent(bpack, cacheDir) + _, err = utils.DownloadImageContent(bpack, cacheDir) if err != nil { - return Order{}, errors.Wrapf(err, "failed download buildpack image '%s'", bpack) + return errors.Wrapf(err, "failed download buildpack image '%s'", bpack) } - imageInfo = img + } + + matches, err := utils.Glob(filepath.Join(cacheDir, "cnb/buildpacks/*")) + if err != nil { + return err + } + + for _, match := range matches { + err = CreateVersionSymlinks(path, match, utils) + if err != nil { + return err + } + } + } + + return nil +} + +func GetMetadata(bpacks []string, utils BuildUtils) ([]BuildPackMetadata, error) { + var metadata []BuildPackMetadata + + for _, bpack := range bpacks { + var bpackMeta BuildPackMetadata + imageInfo, err := utils.GetRemoteImageInfo(bpack) + if err != nil { + return nil, err } imgConf, err := imageInfo.ConfigFile() if err != nil { - return Order{}, errors.Wrapf(err, "failed to read '%s' image config", bpack) + return nil, errors.Wrapf(err, "failed to read '%s' image config", bpack) } err = json.Unmarshal([]byte(imgConf.Config.Labels["io.buildpacks.buildpackage.metadata"]), &bpackMeta) if err != nil { - return Order{}, errors.Wrapf(err, "failed unmarshal '%s' image label", bpack) + return nil, err } - log.Entry().Debugf("Buildpack metadata: '%v'", bpackMeta) - orderEntry.Group = append(orderEntry.Group, bpackMeta) + metadata = append(metadata, bpackMeta) + } + + return metadata, nil +} + +func CreateVersionSymlinks(basePath, buildpackDir string, utils BuildUtils) error { + newBuildpackPath := filepath.Join(basePath, filepath.Base(buildpackDir)) + err := utils.MkdirAll(newBuildpackPath, os.ModePerm) + if err != nil { + return err + } - err = CopyProject(filepath.Join(cacheDir, "cnb/buildpacks"), path, nil, nil, utils) + versions, err := utils.Glob(filepath.Join(buildpackDir, "*")) + if err != nil { + return err + } + + for _, version := range versions { + newVersionPath := filepath.Join(newBuildpackPath, filepath.Base(version)) + exists, err := utils.DirExists(newVersionPath) if err != nil { - return Order{}, err + return err } - } - order.Order = []OrderEntry{orderEntry} + if !exists { + err = utils.Symlink(version, newVersionPath) + if err != nil { + return err + } + } + } - return order, nil + return nil } diff --git a/pkg/cnbutils/buildpack_test.go b/pkg/cnbutils/buildpack_test.go index a480272716..af9a694b39 100644 --- a/pkg/cnbutils/buildpack_test.go +++ b/pkg/cnbutils/buildpack_test.go @@ -4,11 +4,13 @@ package cnbutils_test import ( + "fmt" "testing" "github.com/SAP/jenkins-library/pkg/cnbutils" "github.com/SAP/jenkins-library/pkg/mock" v1 "github.com/google/go-containerregistry/pkg/v1" + "github.com/google/go-containerregistry/pkg/v1/fake" fakeImage "github.com/google/go-containerregistry/pkg/v1/fake" "github.com/stretchr/testify/assert" ) @@ -20,8 +22,9 @@ func TestBuildpackDownload(t *testing.T) { DownloadMock: &mock.DownloadMock{}, } - t.Run("it creates an order object", func(t *testing.T) { + t.Run("successfully downloads a buildpack", func(t *testing.T) { fakeImg := &fakeImage.FakeImage{} + fakeImg.DigestReturns(v1.NewHash("sha256:2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824")) fakeImg.ConfigFileReturns(&v1.ConfigFile{ Config: v1.Config{ Labels: map[string]string{ @@ -32,9 +35,50 @@ func TestBuildpackDownload(t *testing.T) { mockUtils.ReturnImage = fakeImg mockUtils.RemoteImageInfo = fakeImg - order, err := cnbutils.DownloadBuildpacks("/destination", []string{"buildpack"}, "/tmp/config.json", mockUtils) + err := cnbutils.DownloadBuildpacks("/destination", []string{"buildpack"}, "/tmp/config.json", mockUtils) + assert.NoError(t, err) + }) +} + +func TestGetMetadata(t *testing.T) { + var mockUtils = &cnbutils.MockUtils{ + ExecMockRunner: &mock.ExecMockRunner{}, + FilesMock: &mock.FilesMock{}, + DownloadMock: &mock.DownloadMock{ + ImageInfoStub: func(imageRef string) (v1.Image, error) { + return &fake.FakeImage{ + ConfigFileStub: func() (*v1.ConfigFile, error) { + return &v1.ConfigFile{ + Config: v1.Config{ + Labels: map[string]string{ + "io.buildpacks.buildpackage.metadata": fmt.Sprintf("{\"id\": \"%s\", \"version\": \"0.0.1\"}", imageRef), + }, + }, + }, nil + }, + }, nil + }, + }, + } + t.Run("returns empty metadata", func(t *testing.T) { + meta, err := cnbutils.GetMetadata(nil, mockUtils) assert.NoError(t, err) - assert.Equal(t, 1, len(order.Order)) + assert.Empty(t, meta) + }) + + t.Run("returns metadata of the provided buildpacks", func(t *testing.T) { + meta, err := cnbutils.GetMetadata([]string{"buildpack1", "buildpack2"}, mockUtils) + assert.NoError(t, err) + assert.Equal(t, []cnbutils.BuildPackMetadata{ + { + ID: "buildpack1", + Version: "0.0.1", + }, + { + ID: "buildpack2", + Version: "0.0.1", + }, + }, meta) }) } diff --git a/pkg/cnbutils/order.go b/pkg/cnbutils/order.go index 8c9213e23c..a7f9d0dd1d 100644 --- a/pkg/cnbutils/order.go +++ b/pkg/cnbutils/order.go @@ -2,10 +2,14 @@ package cnbutils import ( "bytes" + "os" + "path/filepath" - "github.com/pelletier/go-toml" + "github.com/BurntSushi/toml" ) +const DefaultOrderPath = "/cnb/order.toml" + type Order struct { Order []OrderEntry `toml:"order"` Utils BuildUtils `toml:"-"` @@ -30,3 +34,73 @@ func (o Order) Save(path string) error { return nil } + +func loadExistingOrder(utils BuildUtils) (Order, error) { + order := Order{ + Utils: utils, + } + + orderReader, err := utils.Open(DefaultOrderPath) + if err != nil { + return Order{}, err + } + defer orderReader.Close() + + _, err = toml.NewDecoder(orderReader).Decode(&order) + if err != nil { + return Order{}, err + } + + return order, nil +} + +func newOrder(bpacks []string, utils BuildUtils) (Order, error) { + buildpacksMeta, err := GetMetadata(bpacks, utils) + if err != nil { + return Order{}, err + } + + return Order{ + Utils: utils, + Order: []OrderEntry{{ + Group: buildpacksMeta, + }}, + }, nil +} + +func CreateOrder(bpacks, preBpacks, postBpacks []string, dockerCreds string, utils BuildUtils) (Order, error) { + if dockerCreds != "" { + os.Setenv("DOCKER_CONFIG", filepath.Dir(dockerCreds)) + } + + var order Order + var err error + if len(bpacks) == 0 { + order, err = loadExistingOrder(utils) + if err != nil { + return Order{}, err + } + } else { + order, err = newOrder(bpacks, utils) + if err != nil { + return Order{}, err + } + } + + for idx := range order.Order { + preMetadata, err := GetMetadata(preBpacks, utils) + if err != nil { + return Order{}, err + } + + postMetadata, err := GetMetadata(postBpacks, utils) + if err != nil { + return Order{}, err + } + + order.Order[idx].Group = append(preMetadata, order.Order[idx].Group...) + order.Order[idx].Group = append(order.Order[idx].Group, postMetadata...) + } + + return order, nil +} diff --git a/pkg/cnbutils/order_test.go b/pkg/cnbutils/order_test.go index fa75e1170b..3b6968428e 100644 --- a/pkg/cnbutils/order_test.go +++ b/pkg/cnbutils/order_test.go @@ -9,6 +9,8 @@ import ( "github.com/SAP/jenkins-library/pkg/cnbutils" "github.com/SAP/jenkins-library/pkg/mock" + v1 "github.com/google/go-containerregistry/pkg/v1" + "github.com/google/go-containerregistry/pkg/v1/fake" "github.com/stretchr/testify/assert" ) @@ -43,7 +45,7 @@ func TestOrderSave(t *testing.T) { assert.True(t, mockUtils.HasWrittenFile("/tmp/order.toml")) result, err := mockUtils.FileRead("/tmp/order.toml") assert.NoError(t, err) - assert.Equal(t, "\n[[order]]\n\n [[order.group]]\n id = \"paketo-buildpacks/sap-machine\"\n version = \"1.1.1\"\n\n [[order.group]]\n id = \"paketo-buildpacks/java\"\n version = \"2.2.2\"\n", string(result)) + assert.Equal(t, "[[order]]\n\n [[order.group]]\n id = \"paketo-buildpacks/sap-machine\"\n version = \"1.1.1\"\n\n [[order.group]]\n id = \"paketo-buildpacks/java\"\n version = \"2.2.2\"\n", string(result)) }) t.Run("raises an error if unable to write the file", func(t *testing.T) { @@ -64,3 +66,155 @@ func TestOrderSave(t *testing.T) { assert.False(t, mockUtils.HasWrittenFile("/tmp/order.toml")) }) } + +func TestCreateOrder(t *testing.T) { + imageStub := func(imageRef, target string) (v1.Image, error) { + fakeImage := &fake.FakeImage{} + var imageConfig v1.Config + switch imageRef { + case "pre-buildpack": + imageConfig = v1.Config{ + Labels: map[string]string{ + "io.buildpacks.buildpackage.metadata": "{\"id\": \"pre-testbuildpack\", \"version\": \"0.0.1\"}", + }, + } + case "post-buildpack": + imageConfig = v1.Config{ + Labels: map[string]string{ + "io.buildpacks.buildpackage.metadata": "{\"id\": \"post-testbuildpack\", \"version\": \"0.0.1\"}", + }, + } + default: + imageConfig = v1.Config{ + Labels: map[string]string{ + "io.buildpacks.buildpackage.metadata": "{\"id\": \"testbuildpack\", \"version\": \"0.0.1\"}", + }, + } + } + + fakeImage.ConfigFileReturns(&v1.ConfigFile{ + Config: imageConfig, + }, nil) + + return fakeImage, nil + } + + mockUtils := &cnbutils.MockUtils{ + FilesMock: &mock.FilesMock{}, + DownloadMock: &mock.DownloadMock{ + ImageContentStub: imageStub, + ImageInfoStub: func(imageRef string) (v1.Image, error) { + return imageStub(imageRef, "") + }, + }, + } + + mockUtils.AddFile(cnbutils.DefaultOrderPath, []byte(`[[order]] + [[order.group]] + id = "buildpacks/java" + version = "1.8.0" +[[order]] + [[order.group]] + id = "buildpacks/nodejs" + version = "1.6.0"`)) + + t.Run("successfully loads baked in order.toml", func(t *testing.T) { + order, err := cnbutils.CreateOrder(nil, nil, nil, "", mockUtils) + assert.NoError(t, err) + assert.Equal(t, []cnbutils.OrderEntry{ + { + Group: []cnbutils.BuildPackMetadata{ + { + ID: "buildpacks/java", + Version: "1.8.0", + }, + }, + }, + { + Group: []cnbutils.BuildPackMetadata{ + { + ID: "buildpacks/nodejs", + Version: "1.6.0", + }, + }, + }, + }, order.Order) + }) + + t.Run("successfully loads baked in order.toml and adds pre/post buildpacks", func(t *testing.T) { + order, err := cnbutils.CreateOrder(nil, []string{"pre-buildpack"}, []string{"post-buildpack"}, "", mockUtils) + assert.NoError(t, err) + assert.Equal(t, []cnbutils.OrderEntry{ + { + Group: []cnbutils.BuildPackMetadata{ + { + ID: "pre-testbuildpack", + Version: "0.0.1", + }, + { + ID: "buildpacks/java", + Version: "1.8.0", + }, + { + ID: "post-testbuildpack", + Version: "0.0.1", + }, + }, + }, + { + Group: []cnbutils.BuildPackMetadata{ + { + ID: "pre-testbuildpack", + Version: "0.0.1", + }, + { + ID: "buildpacks/nodejs", + Version: "1.6.0", + }, + { + ID: "post-testbuildpack", + Version: "0.0.1", + }, + }, + }, + }, order.Order) + }) + + t.Run("successfully creates new order with custom buildpacks", func(t *testing.T) { + order, err := cnbutils.CreateOrder([]string{"testbuildpack"}, nil, nil, "", mockUtils) + assert.NoError(t, err) + assert.Equal(t, []cnbutils.OrderEntry{ + { + Group: []cnbutils.BuildPackMetadata{ + { + ID: "testbuildpack", + Version: "0.0.1", + }, + }, + }, + }, order.Order) + }) + + t.Run("successfully creates new order with custom buildpacks and adds pre/post buildpacks", func(t *testing.T) { + order, err := cnbutils.CreateOrder([]string{"testbuildpack"}, []string{"pre-buildpack"}, []string{"post-buildpack"}, "", mockUtils) + assert.NoError(t, err) + assert.Equal(t, []cnbutils.OrderEntry{ + { + Group: []cnbutils.BuildPackMetadata{ + { + ID: "pre-testbuildpack", + Version: "0.0.1", + }, + { + ID: "testbuildpack", + Version: "0.0.1", + }, + { + ID: "post-testbuildpack", + Version: "0.0.1", + }, + }, + }, + }, order.Order) + }) +} diff --git a/pkg/cnbutils/project/descriptor.go b/pkg/cnbutils/project/descriptor.go index 71a544d6a6..620b27c79c 100644 --- a/pkg/cnbutils/project/descriptor.go +++ b/pkg/cnbutils/project/descriptor.go @@ -2,56 +2,40 @@ package project import ( - "errors" + "github.com/pkg/errors" + "github.com/BurntSushi/toml" "github.com/SAP/jenkins-library/pkg/cnbutils" + "github.com/SAP/jenkins-library/pkg/cnbutils/project/types" + v01 "github.com/SAP/jenkins-library/pkg/cnbutils/project/v01" + v02 "github.com/SAP/jenkins-library/pkg/cnbutils/project/v02" "github.com/SAP/jenkins-library/pkg/cnbutils/registry" piperhttp "github.com/SAP/jenkins-library/pkg/http" "github.com/SAP/jenkins-library/pkg/log" - "github.com/pelletier/go-toml" ignore "github.com/sabhiram/go-gitignore" ) -type script struct { - API string `toml:"api"` - Inline string `toml:"inline"` - Shell string `toml:"shell"` -} -type buildpack struct { - ID string `toml:"id"` - Version string `toml:"version"` - URI string `toml:"uri"` - Script script `toml:"script"` -} - -type envVar struct { - Name string `toml:"name"` - Value string `toml:"value"` +type project struct { + Version string `toml:"schema-version"` } -type build struct { - Include []string `toml:"include"` - Exclude []string `toml:"exclude"` - Buildpacks []buildpack `toml:"buildpacks"` - Env []envVar `toml:"env"` +type versionDescriptor struct { + Project project `toml:"_"` } -type project struct { - ID string `toml:"id"` -} - -type projectDescriptor struct { - Build build `toml:"build"` - Project project `toml:"project"` - Metadata map[string]interface{} `toml:"metadata"` +var parsers = map[string]func(string) (types.Descriptor, error){ + "0.1": v01.NewDescriptor, + "0.2": v02.NewDescriptor, } type Descriptor struct { - Exclude *ignore.GitIgnore - Include *ignore.GitIgnore - EnvVars map[string]interface{} - Buildpacks []string - ProjectID string + Exclude *ignore.GitIgnore + Include *ignore.GitIgnore + EnvVars map[string]interface{} + Buildpacks []string + PreBuildpacks []string + PostBuildpacks []string + ProjectID string } func ParseDescriptor(descriptorPath string, utils cnbutils.BuildUtils, httpClient piperhttp.Sender) (*Descriptor, error) { @@ -62,23 +46,45 @@ func ParseDescriptor(descriptorPath string, utils cnbutils.BuildUtils, httpClien return nil, err } - rawDescriptor := projectDescriptor{} - err = toml.Unmarshal(descriptorContent, &rawDescriptor) + var versionDescriptor versionDescriptor + _, err = toml.Decode(string(descriptorContent), &versionDescriptor) if err != nil { - return nil, err + return &Descriptor{}, errors.Wrapf(err, "parsing schema version") + } + + version := versionDescriptor.Project.Version + if version == "" { + version = "0.1" + } + + rawDescriptor, err := parsers[version](string(descriptorContent)) + if err != nil { + return &Descriptor{}, err } - if rawDescriptor.Build.Buildpacks != nil && len(rawDescriptor.Build.Buildpacks) > 0 { - buildpacksImg, err := rawDescriptor.Build.searchBuildpacks(httpClient) + if len(rawDescriptor.Build.Buildpacks) > 0 { + descriptor.Buildpacks, err = searchBuildpacks(rawDescriptor.Build.Buildpacks, httpClient) if err != nil { return nil, err } + } - descriptor.Buildpacks = buildpacksImg + if len(rawDescriptor.Build.Pre.Buildpacks) > 0 { + descriptor.PreBuildpacks, err = searchBuildpacks(rawDescriptor.Build.Pre.Buildpacks, httpClient) + if err != nil { + return nil, err + } + } + + if len(rawDescriptor.Build.Post.Buildpacks) > 0 { + descriptor.PostBuildpacks, err = searchBuildpacks(rawDescriptor.Build.Post.Buildpacks, httpClient) + if err != nil { + return nil, err + } } - if rawDescriptor.Build.Env != nil && len(rawDescriptor.Build.Env) > 0 { - descriptor.EnvVars = rawDescriptor.Build.envToMap() + if len(rawDescriptor.Build.Env) > 0 { + descriptor.EnvVars = envToMap(rawDescriptor.Build.Env) } if len(rawDescriptor.Build.Exclude) > 0 && len(rawDescriptor.Build.Include) > 0 { @@ -100,10 +106,10 @@ func ParseDescriptor(descriptorPath string, utils cnbutils.BuildUtils, httpClien return descriptor, nil } -func (b *build) envToMap() map[string]interface{} { +func envToMap(env []types.EnvVar) map[string]interface{} { envMap := map[string]interface{}{} - for _, e := range b.Env { + for _, e := range env { if len(e.Name) == 0 { continue } @@ -114,11 +120,11 @@ func (b *build) envToMap() map[string]interface{} { return envMap } -func (b *build) searchBuildpacks(httpClient piperhttp.Sender) ([]string, error) { +func searchBuildpacks(buildpacks []types.Buildpack, httpClient piperhttp.Sender) ([]string, error) { var bpackImg []string - for _, bpack := range b.Buildpacks { - if bpack.Script != (script{}) { + for _, bpack := range buildpacks { + if bpack.Script != (types.Script{}) { return nil, errors.New("inline buildpacks are not supported") } diff --git a/pkg/cnbutils/project/descriptor_test.go b/pkg/cnbutils/project/descriptor_test.go index 88cdbe17bb..33d03cb137 100644 --- a/pkg/cnbutils/project/descriptor_test.go +++ b/pkg/cnbutils/project/descriptor_test.go @@ -16,7 +16,7 @@ import ( ) func TestParseDescriptor(t *testing.T) { - t.Run("parses the project.toml file", func(t *testing.T) { + t.Run("parses the project.toml file v01", func(t *testing.T) { projectToml := `[project] id = "io.buildpacks.my-app" version = "0.1" @@ -41,6 +41,14 @@ value = "VAL2" name = "EMPTY" value = "" +[[build.pre.group]] +id = "paketo-buildpacks/java" +version = "5.9.1" + +[[build.post.group]] +id = "paketo-buildpacks/java" +version = "5.9.1" + [[build.buildpacks]] id = "paketo-buildpacks/java" version = "5.9.1" @@ -78,6 +86,94 @@ id = "paketo-buildpacks/nodejs" assert.Contains(t, descriptor.Buildpacks, "index.docker.io/test-java@5.9.1") assert.Contains(t, descriptor.Buildpacks, "index.docker.io/test-nodejs@1.1.1") + assert.Contains(t, descriptor.PreBuildpacks, "index.docker.io/test-java@5.9.1") + assert.Contains(t, descriptor.PostBuildpacks, "index.docker.io/test-java@5.9.1") + + assert.NotNil(t, descriptor.Include) + + t3 := descriptor.Include.MatchesPath("cmd/cobra.go") + assert.True(t, t3) + + t4 := descriptor.Include.MatchesPath("pkg/test/main.go") + assert.True(t, t4) + + t5 := descriptor.Include.MatchesPath("Makefile") + assert.False(t, t5) + }) + + t.Run("parses the project.toml file v02", func(t *testing.T) { + projectToml := `[_] +id = "io.buildpacks.my-app" +version = "0.1" +schema-version = "0.2" + +[io.buildpacks] +include = [ + "cmd/", + "go.mod", + "go.sum", + "*.go" +] + +[[io.buildpacks.build.env]] +name = "VAR1" +value = "VAL1" + +[[io.buildpacks.build.env]] +name = "VAR2" +value = "VAL2" + +[[io.buildpacks.build.env]] +name = "EMPTY" +value = "" + +[[io.buildpacks.pre.group]] +id = "paketo-buildpacks/java" +version = "5.9.1" + +[[io.buildpacks.post.group]] +id = "paketo-buildpacks/java" +version = "5.9.1" + +[[io.buildpacks.group]] +id = "paketo-buildpacks/java" +version = "5.9.1" + +[[io.buildpacks.group]] +id = "paketo-buildpacks/nodejs" +` + utils := &cnbutils.MockUtils{ + FilesMock: &mock.FilesMock{}, + } + + fakeJavaResponse := "{\"latest\":{\"version\":\"1.1.1\",\"namespace\":\"test\",\"name\":\"test\",\"description\":\"\",\"homepage\":\"\",\"licenses\":null,\"stacks\":[\"test\",\"test\"],\"id\":\"test\"},\"versions\":[{\"version\":\"5.9.1\",\"_link\":\"https://test-java/5.9.1\"}]}" + fakeNodeJsResponse := "{\"latest\":{\"version\":\"1.1.1\",\"namespace\":\"test\",\"name\":\"test\",\"description\":\"\",\"homepage\":\"\",\"licenses\":null,\"stacks\":[\"test\",\"test\"],\"id\":\"test\"},\"versions\":[{\"version\":\"1.1.1\",\"_link\":\"https://test-nodejs/1.1.1\"}]}" + + utils.AddFile("project.toml", []byte(projectToml)) + httpmock.Activate() + defer httpmock.DeactivateAndReset() + httpmock.RegisterResponder(http.MethodGet, "https://registry.buildpacks.io/api/v1/buildpacks/paketo-buildpacks/java", httpmock.NewStringResponder(200, fakeJavaResponse)) + httpmock.RegisterResponder(http.MethodGet, "https://registry.buildpacks.io/api/v1/buildpacks/paketo-buildpacks/nodejs", httpmock.NewStringResponder(200, fakeNodeJsResponse)) + + httpmock.RegisterResponder(http.MethodGet, "https://test-java/5.9.1", httpmock.NewStringResponder(200, "{\"addr\": \"index.docker.io/test-java@5.9.1\"}")) + httpmock.RegisterResponder(http.MethodGet, "https://test-nodejs/1.1.1", httpmock.NewStringResponder(200, "{\"addr\": \"index.docker.io/test-nodejs@1.1.1\"}")) + client := &piperhttp.Client{} + client.SetOptions(piperhttp.ClientOptions{MaxRetries: -1, UseDefaultTransport: true}) + + descriptor, err := ParseDescriptor("project.toml", utils, client) + + assert.NoError(t, err) + assert.Equal(t, "VAL1", descriptor.EnvVars["VAR1"]) + assert.Equal(t, "VAL2", descriptor.EnvVars["VAR2"]) + assert.Equal(t, "", descriptor.EnvVars["EMPTY"]) + + assert.Equal(t, "io.buildpacks.my-app", descriptor.ProjectID) + + assert.Contains(t, descriptor.Buildpacks, "index.docker.io/test-java@5.9.1") + assert.Contains(t, descriptor.Buildpacks, "index.docker.io/test-nodejs@1.1.1") + assert.Contains(t, descriptor.PreBuildpacks, "index.docker.io/test-java@5.9.1") + assert.Contains(t, descriptor.PostBuildpacks, "index.docker.io/test-java@5.9.1") + assert.NotNil(t, descriptor.Include) t3 := descriptor.Include.MatchesPath("cmd/cobra.go") @@ -160,6 +256,6 @@ exclude = [ _, err := ParseDescriptor("project.toml", utils, &piperhttp.Client{}) assert.Error(t, err) - assert.Equal(t, "(1, 8): was expecting token =, but got EOF instead", err.Error()) + assert.Equal(t, "parsing schema version: toml: line 0: unexpected EOF; expected key separator '='", err.Error()) }) } diff --git a/pkg/cnbutils/project/metadata/metadata.go b/pkg/cnbutils/project/metadata/metadata.go index 167469adac..abbb3498b4 100644 --- a/pkg/cnbutils/project/metadata/metadata.go +++ b/pkg/cnbutils/project/metadata/metadata.go @@ -5,11 +5,11 @@ import ( "bytes" "path/filepath" + "github.com/BurntSushi/toml" "github.com/SAP/jenkins-library/pkg/cnbutils" "github.com/SAP/jenkins-library/pkg/log" "github.com/SAP/jenkins-library/pkg/piperenv" "github.com/buildpacks/lifecycle/platform" - "github.com/pelletier/go-toml" ) var metadataFilePath = "/layers/project-metadata.toml" diff --git a/pkg/cnbutils/project/metadata/metadata_test.go b/pkg/cnbutils/project/metadata/metadata_test.go index 6310d8067b..cf36c9eaaa 100644 --- a/pkg/cnbutils/project/metadata/metadata_test.go +++ b/pkg/cnbutils/project/metadata/metadata_test.go @@ -15,16 +15,13 @@ import ( ) func TestWriteProjectMetadata(t *testing.T) { - expectedResult := ` -[source] + expectedResult := `[source] type = "git" - - [source.metadata] - refs = ["main"] - [source.version] commit = "012548" describe = "test-commit" + [source.metadata] + refs = ["main"] ` mockUtils := &cnbutils.MockUtils{ ExecMockRunner: &mock.ExecMockRunner{}, diff --git a/pkg/cnbutils/project/types/types.go b/pkg/cnbutils/project/types/types.go new file mode 100644 index 0000000000..a4f9521392 --- /dev/null +++ b/pkg/cnbutils/project/types/types.go @@ -0,0 +1,58 @@ +// Source: https://github.com/buildpacks/pack/blob/main/pkg/project/types/types.go +package types + +import ( + "github.com/buildpacks/lifecycle/api" +) + +type Script struct { + API string `toml:"api"` + Inline string `toml:"inline"` + Shell string `toml:"shell"` +} + +type Buildpack struct { + ID string `toml:"id"` + Version string `toml:"version"` + URI string `toml:"uri"` + Script Script `toml:"script"` +} + +type EnvVar struct { + Name string `toml:"name"` + Value string `toml:"value"` +} + +type Build struct { + Include []string `toml:"include"` + Exclude []string `toml:"exclude"` + Buildpacks []Buildpack `toml:"buildpacks"` + Env []EnvVar `toml:"env"` + Builder string `toml:"builder"` + Pre GroupAddition + Post GroupAddition +} + +type Project struct { + ID string `toml:"id"` + Name string `toml:"name"` + Version string `toml:"version"` + SourceURL string `toml:"source-url"` + Licenses []License `toml:"licenses"` +} + +type License struct { + Type string `toml:"type"` + URI string `toml:"uri"` +} + +type Descriptor struct { + Project Project `toml:"project"` + Build Build `toml:"build"` + Metadata map[string]interface{} `toml:"metadata"` + SchemaVersion *api.Version +} + +type GroupAddition struct { + Buildpacks []Buildpack `toml:"group"` +} diff --git a/pkg/cnbutils/project/v01/project.go b/pkg/cnbutils/project/v01/project.go new file mode 100644 index 0000000000..da41b98ad5 --- /dev/null +++ b/pkg/cnbutils/project/v01/project.go @@ -0,0 +1,30 @@ +// Source: https://github.com/buildpacks/pack/blob/main/pkg/project/v01/project.go +package v01 + +import ( + "github.com/BurntSushi/toml" + "github.com/SAP/jenkins-library/pkg/cnbutils/project/types" + "github.com/buildpacks/lifecycle/api" +) + +type Descriptor struct { + Project types.Project `toml:"project"` + Build types.Build `toml:"build"` + Metadata map[string]interface{} `toml:"metadata"` +} + +func NewDescriptor(projectTomlContents string) (types.Descriptor, error) { + versionedDescriptor := &Descriptor{} + + _, err := toml.Decode(projectTomlContents, versionedDescriptor) + if err != nil { + return types.Descriptor{}, err + } + + return types.Descriptor{ + Project: versionedDescriptor.Project, + Build: versionedDescriptor.Build, + Metadata: versionedDescriptor.Metadata, + SchemaVersion: api.MustParse("0.1"), + }, nil +} diff --git a/pkg/cnbutils/project/v02/project.go b/pkg/cnbutils/project/v02/project.go new file mode 100644 index 0000000000..96bf3c6861 --- /dev/null +++ b/pkg/cnbutils/project/v02/project.go @@ -0,0 +1,78 @@ +// Source: https://github.com/buildpacks/pack/blob/main/pkg/project/v02/project.go +package v02 + +import ( + "github.com/BurntSushi/toml" + "github.com/SAP/jenkins-library/pkg/cnbutils/project/types" + "github.com/buildpacks/lifecycle/api" +) + +type Buildpacks struct { + Include []string `toml:"include"` + Exclude []string `toml:"exclude"` + Group []types.Buildpack `toml:"group"` + Env Env `toml:"env"` + Build Build `toml:"build"` + Builder string `toml:"builder"` + Pre types.GroupAddition `toml:"pre"` + Post types.GroupAddition `toml:"post"` +} + +type Build struct { + Env []types.EnvVar `toml:"env"` +} + +// Env is deprecated: use `[[io.buildpacks.build.env]]` instead. see https://github.com/buildpacks/pack/pull/1479 +type Env struct { + Build []types.EnvVar `toml:"build"` +} + +type Project struct { + ID string `toml:"id"` + Name string `toml:"name"` + Licenses []types.License `toml:"licenses"` + Metadata map[string]interface{} `toml:"metadata"` + SchemaVersion string `toml:"schema-version"` +} + +type IO struct { + Buildpacks Buildpacks `toml:"buildpacks"` +} + +type Descriptor struct { + Project Project `toml:"_"` + IO IO `toml:"io"` +} + +func NewDescriptor(projectTomlContents string) (types.Descriptor, error) { + versionedDescriptor := &Descriptor{} + _, err := toml.Decode(projectTomlContents, &versionedDescriptor) + if err != nil { + return types.Descriptor{}, err + } + + // backward compatibility for incorrect key + env := versionedDescriptor.IO.Buildpacks.Build.Env + if env == nil { + env = versionedDescriptor.IO.Buildpacks.Env.Build + } + + return types.Descriptor{ + Project: types.Project{ + ID: versionedDescriptor.Project.ID, + Name: versionedDescriptor.Project.Name, + Licenses: versionedDescriptor.Project.Licenses, + }, + Build: types.Build{ + Include: versionedDescriptor.IO.Buildpacks.Include, + Exclude: versionedDescriptor.IO.Buildpacks.Exclude, + Buildpacks: versionedDescriptor.IO.Buildpacks.Group, + Env: env, + Builder: versionedDescriptor.IO.Buildpacks.Builder, + Pre: versionedDescriptor.IO.Buildpacks.Pre, + Post: versionedDescriptor.IO.Buildpacks.Post, + }, + Metadata: versionedDescriptor.Project.Metadata, + SchemaVersion: api.MustParse("0.2"), + }, nil +} diff --git a/pkg/cnbutils/report.go b/pkg/cnbutils/report.go index 0f5e99f491..8b0fe37280 100644 --- a/pkg/cnbutils/report.go +++ b/pkg/cnbutils/report.go @@ -3,8 +3,8 @@ package cnbutils import ( "fmt" + "github.com/BurntSushi/toml" "github.com/buildpacks/lifecycle/platform" - "github.com/pelletier/go-toml" ) const reportFile = "/layers/report.toml" diff --git a/pkg/cnbutils/report_test.go b/pkg/cnbutils/report_test.go index c82eee561e..4ac2711099 100644 --- a/pkg/cnbutils/report_test.go +++ b/pkg/cnbutils/report_test.go @@ -46,6 +46,6 @@ digest = "sha256:52eac630560210e5ae13eb10797c4246d6f02d425f32b9430ca00bde697c79e digest, err := cnbutils.DigestFromReport(mockUtils) assert.Empty(t, digest) - assert.EqualError(t, err, "(1, 1): parsing error: keys cannot contain { character") + assert.EqualError(t, err, "toml: line 1: expected '.' or '=', but got '{' instead") }) } diff --git a/pkg/mock/dockerClient.go b/pkg/mock/dockerClient.go index f2fb0689c8..2f1f972715 100644 --- a/pkg/mock/dockerClient.go +++ b/pkg/mock/dockerClient.go @@ -17,7 +17,9 @@ type DownloadMock struct { RemoteImageInfo v1.Image ReturnError string - Stub func(imageRef, targetDir string) (v1.Image, error) + Stub func(imageRef, targetDir string) (v1.Image, error) + ImageContentStub func(imageRef, targetFile string) (v1.Image, error) + ImageInfoStub func(imageRef string) (v1.Image, error) } // DownloadImage . @@ -40,6 +42,10 @@ func (c *DownloadMock) DownloadImageContent(imageRef, targetFile string) (v1.Ima c.ImageRef = imageRef c.FilePath = targetFile + if c.ImageContentStub != nil { + return c.ImageContentStub(imageRef, targetFile) + } + if len(c.ReturnError) > 0 { return nil, fmt.Errorf(c.ReturnError) } @@ -50,6 +56,10 @@ func (c *DownloadMock) DownloadImageContent(imageRef, targetFile string) (v1.Ima func (c *DownloadMock) GetRemoteImageInfo(imageRef string) (v1.Image, error) { c.RemoteImageRef = imageRef + if c.ImageInfoStub != nil { + return c.ImageInfoStub(imageRef) + } + if len(c.ReturnError) > 0 { return nil, fmt.Errorf(c.ReturnError) } diff --git a/resources/metadata/cnbBuild.yaml b/resources/metadata/cnbBuild.yaml index 9fe08524f7..ffd5c44fb8 100644 --- a/resources/metadata/cnbBuild.yaml +++ b/resources/metadata/cnbBuild.yaml @@ -97,7 +97,7 @@ spec: param: container/registryUrl - name: buildpacks type: "[]string" - description: List of custom buildpacks to use in the form of `$HOSTNAME/$REPO[:$TAG]`. + description: List of custom buildpacks to use in the form of `$HOSTNAME/$REPO[:$TAG]`. When this property is specified, buildpacks which are part of the builder will be ignored. scope: - PARAMETERS - STAGES @@ -105,6 +105,26 @@ spec: resourceRef: - name: commonPipelineEnvironment param: container/buildpacks + - name: preBuildpacks + type: "[]string" + description: Buildpacks to prepend to the groups in the builder's order. + scope: + - PARAMETERS + - STAGES + - STEPS + resourceRef: + - name: commonPipelineEnvironment + param: container/preBuildpacks + - name: postBuildpacks + type: "[]string" + description: Buildpacks to append to the groups in the builder's order. + scope: + - PARAMETERS + - STAGES + - STEPS + resourceRef: + - name: commonPipelineEnvironment + param: container/postBuildpacks - name: buildEnvVars type: "map[string]interface{}" description: |