diff --git a/.github/workflows/embedded-jar-test.yml b/.github/workflows/embedded-jar-test.yml new file mode 100644 index 000000000..aaa8e61b0 --- /dev/null +++ b/.github/workflows/embedded-jar-test.yml @@ -0,0 +1,29 @@ +# This test verifies that gradle-dep-tree.jar and maven-dep-tree.jar are kept up-to-date with the version specified in buildscripts/download-jars.js. +# It accomplishes this by downloading the JARs and executing a "git diff" command. +# In case there are any differences detected, the test will result in failure. +name: Embedded Jars Tests +on: + push: + branches: + - '**' + tags-ignore: + - '**' + pull_request: +jobs: + test: + runs-on: ubuntu-latest + env: + GOPROXY: direct + steps: + - uses: actions/checkout@v3 + + - name: Download JARs + run: buildscripts/download-jars.sh + + - name: Check Diff + run: git diff --exit-code + + - name: Log if Failure + run: echo "::warning::Please run ./buildscripts/download-jars to use compatible Maven and Gradle dependency tree JARs." + if: ${{ failure() }} + diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 62bec9d0b..e06472f52 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -8,11 +8,11 @@ on: pull_request: jobs: test: - runs-on: ${{ matrix.os }} + runs-on: ${{ matrix.os }}-latest strategy: fail-fast: false matrix: - os: [ ubuntu-latest, windows-latest, macOS-latest ] + os: [ ubuntu, windows, macOS ] env: GOPROXY: direct GRADLE_OPTS: -Dorg.gradle.daemon=false diff --git a/artifactory/commands/buildinfo/adddependencies.go b/artifactory/commands/buildinfo/adddependencies.go index 01593df63..1754a546c 100644 --- a/artifactory/commands/buildinfo/adddependencies.go +++ b/artifactory/commands/buildinfo/adddependencies.go @@ -256,7 +256,7 @@ func getLocalDependencies(addDepsParams *specutils.CommonParams) ([]string, erro func collectPatternMatchingFiles(addDepsParams *specutils.CommonParams, rootPath string) ([]string, error) { addDepsParams.SetPattern(clientutils.ConvertLocalPatternToRegexp(addDepsParams.Pattern, addDepsParams.GetPatternType())) - excludePathPattern := fspatterns.PrepareExcludePathPattern(addDepsParams) + excludePathPattern := fspatterns.PrepareExcludePathPattern(addDepsParams.Exclusions, addDepsParams.GetPatternType(), addDepsParams.IsRecursive()) patternRegex, err := regxp.Compile(addDepsParams.Pattern) if errorutils.CheckError(err) != nil { return nil, err @@ -286,6 +286,7 @@ func (badc *BuildAddDependenciesCommand) savePartialBuildInfo(dependencies []bui populateFunc := func(partial *buildinfo.Partial) { partial.ModuleType = buildinfo.Generic partial.Dependencies = dependencies + partial.ModuleId = badc.buildConfiguration.GetModule() } buildName, err := badc.buildConfiguration.GetBuildName() if err != nil { diff --git a/artifactory/commands/buildinfo/publish_test.go b/artifactory/commands/buildinfo/publish_test.go index 825cf6ed1..5a4b4923f 100644 --- a/artifactory/commands/buildinfo/publish_test.go +++ b/artifactory/commands/buildinfo/publish_test.go @@ -41,16 +41,16 @@ func TestPrintBuildInfoLink(t *testing.T) { config.ServerDetails{ArtifactoryUrl: "http://localhost:8082/artifactory/"}, "http://localhost:8082/ui/builds/test/1/" + buildTime + "/published?buildRepo=cli-build-info&projectKey=cli"}, } - for _, linkType := range linkTypes { + for i := range linkTypes { buildPubConf := &BuildPublishCommand{ - linkType.buildInfoConf, - &linkType.serverDetails, + linkTypes[i].buildInfoConf, + &linkTypes[i].serverDetails, nil, true, nil, } - buildPubComService, err := buildPubConf.getBuildInfoUiUrl(linkType.majorVersion, linkType.buildTime) + buildPubComService, err := buildPubConf.getBuildInfoUiUrl(linkTypes[i].majorVersion, linkTypes[i].buildTime) assert.NoError(t, err) - assert.Equal(t, buildPubComService, linkType.expected) + assert.Equal(t, buildPubComService, linkTypes[i].expected) } } diff --git a/artifactory/commands/golang/archive.go b/artifactory/commands/golang/archive.go index 674fa3c7e..b81ef3313 100644 --- a/artifactory/commands/golang/archive.go +++ b/artifactory/commands/golang/archive.go @@ -30,19 +30,15 @@ package golang // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. import ( - "archive/zip" - "bytes" - "fmt" + "github.com/jfrog/jfrog-client-go/artifactory/services/fspatterns" + "github.com/jfrog/jfrog-client-go/utils" + "golang.org/x/mod/module" + gozip "golang.org/x/mod/zip" "io" "os" - "path" "path/filepath" + "regexp" "strings" - "unicode" - "unicode/utf8" - - "github.com/jfrog/jfrog-client-go/utils/errorutils" - "golang.org/x/mod/module" ) // Package zip provides functions for creating and extracting module zip files. @@ -77,27 +73,16 @@ import ( // Note that this package does not provide hashing functionality. See // golang.org/x/mod/sumdb/dirhash. -const ( - // MaxZipFile is the maximum size in bytes of a module zip file. The - // go command will report an error if either the zip file or its extracted - // content is larger than this. - MaxZipFile = 500 << 20 - - // MaxGoMod is the maximum size in bytes of a go.mod file within a - // module zip file. - MaxGoMod = 16 << 20 - - // MaxLICENSE is the maximum size in bytes of a LICENSE file within a - // module zip file. - MaxLICENSE = 16 << 20 -) - // Archive project files according to the go project standard -func archiveProject(writer io.Writer, dir, mod, version string) error { +func archiveProject(writer io.Writer, dir, mod, version string, excludedPatterns []string) error { m := module.Version{Version: version, Path: mod} - var files []File - - err := filepath.Walk(dir, func(filePath string, info os.FileInfo, err error) error { + excludedPatterns, err := getAbsolutePaths(excludedPatterns) + if err != nil { + return err + } + excludePatternsStr := fspatterns.PrepareExcludePathPattern(excludedPatterns, utils.GetPatternType(utils.PatternTypes{RegExp: false, Ant: false}), true) + var files []gozip.File + err = filepath.Walk(dir, func(filePath string, info os.FileInfo, err error) error { if err != nil { return err } @@ -136,11 +121,17 @@ func archiveProject(writer io.Writer, dir, mod, version string) error { } if info.Mode().IsRegular() { if !isVendoredPackage(slashPath) { - files = append(files, dirFile{ - filePath: filePath, - slashPath: slashPath, - info: info, - }) + excluded, err := isPathExcluded(filePath, excludePatternsStr) + if err != nil { + return err + } + if !excluded { + files = append(files, dirFile{ + filePath: filePath, + slashPath: slashPath, + info: info, + }) + } } return nil } @@ -152,7 +143,34 @@ func archiveProject(writer io.Writer, dir, mod, version string) error { return err } - return Create(writer, m, files) + return gozip.Create(writer, m, files) +} + +func getAbsolutePaths(exclusionPatterns []string) ([]string, error) { + var absolutedPaths []string + for _, singleExclusion := range exclusionPatterns { + singleExclusion, err := filepath.Abs(singleExclusion) + if err != nil { + return nil, err + } + absolutedPaths = append(absolutedPaths, singleExclusion) + } + return absolutedPaths, nil +} + +// This function receives a path and a regexp. +// It returns trUe is the path received matches the regexp. +// Before the match, thw path is turned into an absolute. +func isPathExcluded(path string, excludePatternsRegexp string) (excluded bool, err error) { + var fullPath string + if len(excludePatternsRegexp) > 0 { + fullPath, err = filepath.Abs(path) + if err != nil { + return + } + excluded, err = regexp.MatchString(excludePatternsRegexp, fullPath) + } + return } func isVendoredPackage(name string) bool { @@ -178,146 +196,6 @@ func isVendoredPackage(name string) bool { return strings.Contains(name[i:], "/") } -// Create builds a zip archive for module m from an abstract list of files -// and writes it to w. -// -// Create verifies the restrictions described in the package documentation -// and should not produce an archive that Unzip cannot extract. Create does not -// include files in the output archive if they don't belong in the module zip. -// In particular, Create will not include files in modules found in -// subdirectories, most files in vendor directories, or irregular files (such -// as symbolic links) in the output archive. -func Create(w io.Writer, m module.Version, files []File) (err error) { - - // Check that the version is canonical, the module path is well-formed, and - // the major version suffix matches the major version. - if vers := module.CanonicalVersion(m.Version); vers != m.Version { - if vers == "" { - vers = "the version structure to be vX.Y.Z" - } - return fmt.Errorf("version %q is not canonical (expected %s)", m.Version, vers) - } - if err := module.Check(m.Path, m.Version); err != nil { - return err - } - - // Find directories containing go.mod files (other than the root). - // These directories will not be included in the output zip. - haveGoMod := make(map[string]bool) - for _, f := range files { - dir, base := path.Split(f.Path()) - if strings.EqualFold(base, "go.mod") { - info, err := f.Lstat() - if err != nil { - return err - } - if info.Mode().IsRegular() { - haveGoMod[dir] = true - } - } - } - - inSubmodule := func(p string) bool { - for { - dir, _ := path.Split(p) - if dir == "" { - return false - } - if haveGoMod[dir] { - return true - } - p = dir[:len(dir)-1] - } - } - - // Create the module zip file. - zw := zip.NewWriter(w) - prefix := fmt.Sprintf("%s@%s/", m.Path, m.Version) - - addFile := func(f File, path string, size int64) (addFileErr error) { - var rc io.ReadCloser - var w io.Writer - rc, addFileErr = f.Open() - if addFileErr != nil { - return errorutils.CheckError(addFileErr) - } - defer func() { - closeErr := errorutils.CheckError(rc.Close()) - if addFileErr != nil { - addFileErr = closeErr - } - }() - w, addFileErr = zw.Create(prefix + path) - if addFileErr != nil { - return errorutils.CheckError(addFileErr) - } - lr := &io.LimitedReader{R: rc, N: size + 1} - if _, addFileErr = io.Copy(w, lr); addFileErr != nil { - return addFileErr - } - if lr.N <= 0 { - return errorutils.CheckErrorf("file %q is larger than declared size", path) - } - return nil - } - - collisions := make(collisionChecker) - maxSize := int64(MaxZipFile) - for _, f := range files { - p := f.Path() - if p != path.Clean(p) { - return fmt.Errorf("file path %s is not clean", p) - } - if path.IsAbs(p) { - return fmt.Errorf("file path %s is not relative", p) - } - if isVendoredPackage(p) || inSubmodule(p) { - continue - } - if p == ".hg_archival.txt" { - // Inserted by hg archive. - // The go command drops this regardless of the VCS being used. - continue - } - if err := module.CheckFilePath(p); err != nil { - return err - } - if strings.ToLower(p) == "go.mod" && p != "go.mod" { - return fmt.Errorf("found file named %s, want all lower-case go.mod", p) - } - info, err := f.Lstat() - if err != nil { - return err - } - if err := collisions.check(p, info.IsDir()); err != nil { - return err - } - if !info.Mode().IsRegular() { - // Skip symbolic links (golang.org/issue/27093). - continue - } - size := info.Size() - if size < 0 || maxSize < size { - return fmt.Errorf("module source tree too large (max size is %d bytes)", MaxZipFile) - } - maxSize -= size - if p == "go.mod" && size > MaxGoMod { - return fmt.Errorf("go.mod file too large (max size is %d bytes)", MaxGoMod) - } - if p == "LICENSE" && size > MaxLICENSE { - return fmt.Errorf("LICENSE file too large (max size is %d bytes)", MaxLICENSE) - } - - if err := addFile(f, p, size); err != nil { - return err - } - } - if err := zw.Close(); err != nil { - return err - } - return -} - type dirFile struct { filePath, slashPath string info os.FileInfo @@ -327,18 +205,6 @@ func (f dirFile) Path() string { return f.slashPath } func (f dirFile) Lstat() (os.FileInfo, error) { return f.info, nil } func (f dirFile) Open() (io.ReadCloser, error) { return os.Open(f.filePath) } -// collisionChecker finds case-insensitive name collisions and paths that -// are listed as both files and directories. -// -// The keys of this map are processed with strToFold. pathInfo has the original -// path for each folded path. -type collisionChecker map[string]pathInfo - -type pathInfo struct { - path string - isDir bool -} - // File provides an abstraction for a file in a directory, zip, or anything // else that looks like a file. type File interface { @@ -354,67 +220,3 @@ type File interface { // an error if called on a directory or symbolic link. Open() (io.ReadCloser, error) } - -func (cc collisionChecker) check(p string, isDir bool) error { - fold := strToFold(p) - if other, ok := cc[fold]; ok { - if p != other.path { - return fmt.Errorf("case-insensitive file name collision: %q and %q", other.path, p) - } - if isDir != other.isDir { - return fmt.Errorf("entry %q is both a file and a directory", p) - } - if !isDir { - return fmt.Errorf("multiple entries for file %q", p) - } - // It's not an error if check is called with the same directory multiple - // times. check is called recursively on parent directories, so check - // may be called on the same directory many times. - } else { - cc[fold] = pathInfo{path: p, isDir: isDir} - } - - if parent := path.Dir(p); parent != "." { - return cc.check(parent, true) - } - return nil -} - -// strToFold returns a string with the property that -// strings.EqualFold(s, t) iff strToFold(s) == strToFold(t) -// This lets us test a large set of strings for fold-equivalent -// duplicates without making a quadratic number of calls -// to EqualFold. Note that strings.ToUpper and strings.ToLower -// do not have the desired property in some corner cases. -func strToFold(s string) string { - // Fast path: all ASCII, no upper case. - // Most paths look like this already. - for i := 0; i < len(s); i++ { - c := s[i] - if c >= utf8.RuneSelf || 'A' <= c && c <= 'Z' { - goto Slow - } - } - return s - -Slow: - var buf bytes.Buffer - for _, r := range s { - // SimpleFold(x) cycles to the next equivalent rune > x - // or wraps around to smaller values. Iterate until it wraps, - // and we've found the minimum value. - for { - r0 := r - r = unicode.SimpleFold(r0) - if r <= r0 { - break - } - } - // Exception to allow fast path above: A-Z => a-z - if 'A' <= r && r <= 'Z' { - r += 'a' - 'A' - } - buf.WriteRune(r) - } - return buf.String() -} diff --git a/artifactory/commands/golang/archive_test.go b/artifactory/commands/golang/archive_test.go index 51e03afda..784aef05f 100644 --- a/artifactory/commands/golang/archive_test.go +++ b/artifactory/commands/golang/archive_test.go @@ -2,6 +2,7 @@ package golang import ( "bytes" + "github.com/stretchr/testify/assert" "os" "path/filepath" "reflect" @@ -23,25 +24,45 @@ func TestArchiveProject(t *testing.T) { if err != nil { t.Error(err) } - buff := &bytes.Buffer{} - if err != nil { - t.Error(err) - } originalFolder := "test_.git_suffix" baseDir, dotGitPath := tests.PrepareDotGitDir(t, originalFolder, "testdata") - err = archiveProject(buff, filepath.Join(pwd, "testdata"), "myproject.com/module/name", "v1.0.0") - if err != nil { - t.Error(err) - } - expected := map[utils.Algorithm]string{utils.MD5: "28617d6e74fce3dd2bab21b1bd65009b", utils.SHA1: "410814fbf21afdfb9c5b550151a51c2e986447fa", utils.SHA256: "e877c07315d6d3ad69139035defc08c04b400b36cd069b35ea3c2960424f2dc6"} - actual, err := utils.CalcChecksums(buff) - if err != nil { - t.Error(err) + var archiveWithExclusion = []struct { + buff *bytes.Buffer + filePath string + mod string + version string + excludedPatterns []string + expected map[utils.Algorithm]string + }{ + {buff, filepath.Join(pwd, "testdata"), "myproject.com/module/name", "v1.0.0", nil, map[utils.Algorithm]string{utils.MD5: "5b3603a7bf637622516673b845249205", utils.SHA1: "7386685c432c39428c9cb8584a2b970139c5e626", utils.SHA256: "eefd8aa3f9ac89876c8442d5feebbc837666bf40114d201219e3e6d51c208949"}}, + {buff, filepath.Join(pwd, "testdata"), "myproject.com/module/name", "v1.0.0", []string{"./testdata/dir1/*"}, map[utils.Algorithm]string{utils.MD5: "c2eeb4ef958edee91570690bf4111fc7", utils.SHA1: "d77e10eaa9bd863a9ff3775d3e452041e6f5aa40", utils.SHA256: "ecf66c1256263b2b4386efc299fa0c389263608efda9d1d91af8a746e6c5709a"}}, + {buff, filepath.Join(pwd, "testdata"), "myproject.com/module/name", "v1.0.0", []string{"./testdata/dir2/*"}, map[utils.Algorithm]string{utils.MD5: "bbe78a98ba10c1428f3a364570015e11", utils.SHA1: "99fd22ea2fe9c2c48124e741881fc3a555458a7e", utils.SHA256: "e2299f3c4e1f22d36befba191a347783dc2047e8e38cf6b9b96c273090f6e25b"}}, + {buff, filepath.Join(pwd, "testdata"), "myproject.com/module/name", "v1.0.0", []string{"./testdata/dir2/*", "testdata/dir3/*"}, map[utils.Algorithm]string{utils.MD5: "28617d6e74fce3dd2bab21b1bd65009b", utils.SHA1: "410814fbf21afdfb9c5b550151a51c2e986447fa", utils.SHA256: "e877c07315d6d3ad69139035defc08c04b400b36cd069b35ea3c2960424f2dc6"}}, + {buff, filepath.Join(pwd, "testdata"), "myproject.com/module/name", "v1.0.0", []string{"./testdata/dir2/*", "./testdata/dir3/dir4/*"}, map[utils.Algorithm]string{utils.MD5: "46a3ded48ed7998b1b35c80fbe0ffab5", utils.SHA1: "a26e73e7d29e49dd5d9c87da8f7c93cf929750df", utils.SHA256: "cf224b12eca12de4a052ef0f444519d64b6cecaf7b06050a02998be190e88847"}}, + {buff, filepath.Join(pwd, "testdata"), "myproject.com/module/name", "v1.0.0", []string{"./testdata/dir3/*"}, map[utils.Algorithm]string{utils.MD5: "c2a2dd6a7af84c2d88a48caf0c3aec34", utils.SHA1: "193d761317a602d18566561678b7bddc4773385c", utils.SHA256: "3efcd8b0d88081ec64333ff98b43616d283c4d52ed26cd7c8df646d9ea452c31"}}, + {buff, filepath.Join(pwd, "testdata"), "myproject.com/module/name", "v1.0.0", []string{"*.txt"}, map[utils.Algorithm]string{utils.MD5: "e93953b4be84d7753e0f33589b7dc4ba", utils.SHA1: "280c7492f57262b6e0af56b06c9db6a128e32ab9", utils.SHA256: "e7357986c59bf670af1e2f4868edb1406a87d328b7681b15cf038491cdc7e88c"}}, + {buff, filepath.Join(pwd, "testdata"), "myproject.com/module/name", "v1.0.0", []string{"./*/dir4/*.txt"}, map[utils.Algorithm]string{utils.MD5: "785f0c0c7b20dfd716178856edb79834", utils.SHA1: "d07204277ece1d7bef6a9f289a56afb91d66125f", utils.SHA256: "6afa0dd70bfa7c6d3aca1a3dfcd6465c542d64136c6391fa611795e6fa5800ce"}}, } + for _, testData := range archiveWithExclusion { + err = archiveProject(testData.buff, testData.filePath, testData.mod, testData.version, testData.excludedPatterns) + assert.NoError(t, err) + actual, err := utils.CalcChecksums(buff) + assert.NoError(t, err) - if !reflect.DeepEqual(expected, actual) { - t.Errorf("Expecting: %v, Got: %v", expected, actual) + if !reflect.DeepEqual(testData.expected, actual) { + t.Errorf("Expecting: %v, Got: %v", testData.expected, actual) + } } tests.RenamePath(dotGitPath, filepath.Join(baseDir, originalFolder), t) } + +func TestGetAbsolutePaths(t *testing.T) { + testData := []string{filepath.Join(".", "dir1", "*"), "*.txt", filepath.Join("*", "dir2", "*")} + result, err := getAbsolutePaths(testData) + assert.NoError(t, err) + wd, err := os.Getwd() + assert.NoError(t, err) + expectedResults := []string{filepath.Join(wd, "dir1", "*"), filepath.Join(wd, "*.txt"), filepath.Join(wd, "*", "dir2", "*")} + assert.ElementsMatch(t, result, expectedResults) +} diff --git a/artifactory/commands/golang/gopublish.go b/artifactory/commands/golang/gopublish.go index 8b114d286..a9278097f 100644 --- a/artifactory/commands/golang/gopublish.go +++ b/artifactory/commands/golang/gopublish.go @@ -17,6 +17,7 @@ type GoPublishCommandArgs struct { buildConfiguration *utils.BuildConfiguration version string detailedSummary bool + excludedPatterns []string result *commandutils.Result utils.RepositoryConfig } @@ -40,6 +41,15 @@ func (gpc *GoPublishCommand) SetConfigFilePath(configFilePath string) *GoPublish return gpc } +func (gpc *GoPublishCommand) GetExcludedPatterns() []string { + return gpc.excludedPatterns +} + +func (gpc *GoPublishCommandArgs) SetExcludedPatterns(excludedPatterns []string) *GoPublishCommandArgs { + gpc.excludedPatterns = excludedPatterns + return gpc +} + func (gpc *GoPublishCommand) Run() error { err := validatePrerequisites() if err != nil { @@ -100,7 +110,7 @@ func (gpc *GoPublishCommand) Run() error { } // Publish the package to Artifactory. - summary, artifacts, err := publishPackage(gpc.version, gpc.TargetRepo(), buildName, buildNumber, project, serviceManager) + summary, artifacts, err := publishPackage(gpc.version, gpc.TargetRepo(), buildName, buildNumber, project, gpc.GetExcludedPatterns(), serviceManager) if err != nil { return err } diff --git a/artifactory/commands/golang/publish.go b/artifactory/commands/golang/publish.go index f16bca3e9..81277d332 100644 --- a/artifactory/commands/golang/publish.go +++ b/artifactory/commands/golang/publish.go @@ -25,7 +25,7 @@ import ( ) // Publish go project to Artifactory. -func publishPackage(packageVersion, targetRepo, buildName, buildNumber, projectKey string, servicesManager artifactory.ArtifactoryServicesManager) (summary *servicesutils.OperationSummary, artifacts []buildinfo.Artifact, err error) { +func publishPackage(packageVersion, targetRepo, buildName, buildNumber, projectKey string, excludedPatterns []string, servicesManager artifactory.ArtifactoryServicesManager) (summary *servicesutils.OperationSummary, artifacts []buildinfo.Artifact, err error) { projectPath, err := goutils.GetProjectRoot() if err != nil { return nil, nil, errorutils.CheckError(err) @@ -71,7 +71,7 @@ func publishPackage(packageVersion, targetRepo, buildName, buildNumber, projectK params.ModuleId = moduleName params.ModContent = modContent params.ModPath = filepath.Join(projectPath, "go.mod") - params.ZipPath, zipArtifact, err = archive(moduleName, packageVersion, projectPath, tempDirPath) + params.ZipPath, zipArtifact, err = archive(moduleName, packageVersion, projectPath, tempDirPath, excludedPatterns) if err != nil { return nil, nil, err } @@ -182,7 +182,7 @@ func readModFile(version, projectPath string, createArtifact bool) ([]byte, *bui // Archive the go project. // Returns the path of the temp archived project file. -func archive(moduleName, version, projectPath, tempDir string) (name string, zipArtifact *buildinfo.Artifact, err error) { +func archive(moduleName, version, projectPath, tempDir string, excludedPatterns []string) (name string, zipArtifact *buildinfo.Artifact, err error) { openedFile := false tempFile, err := os.CreateTemp(tempDir, "project.zip") if err != nil { @@ -197,8 +197,7 @@ func archive(moduleName, version, projectPath, tempDir string) (name string, zip } } }() - err = archiveProject(tempFile, projectPath, moduleName, version) - if err != nil { + if err = archiveProject(tempFile, projectPath, moduleName, version, excludedPatterns); err != nil { return "", nil, errorutils.CheckError(err) } // Double-check that the paths within the zip file are well-formed. diff --git a/artifactory/commands/golang/testdata/dir2/dir2.text b/artifactory/commands/golang/testdata/dir2/dir2.text new file mode 100644 index 000000000..bf245aeae --- /dev/null +++ b/artifactory/commands/golang/testdata/dir2/dir2.text @@ -0,0 +1 @@ +dir2.text \ No newline at end of file diff --git a/artifactory/commands/golang/testdata/dir3/c.txt b/artifactory/commands/golang/testdata/dir3/c.txt new file mode 100644 index 000000000..f632129c1 --- /dev/null +++ b/artifactory/commands/golang/testdata/dir3/c.txt @@ -0,0 +1 @@ +c.txt \ No newline at end of file diff --git a/artifactory/commands/golang/testdata/dir3/dir4/dir4.txt b/artifactory/commands/golang/testdata/dir3/dir4/dir4.txt new file mode 100644 index 000000000..1c563de15 --- /dev/null +++ b/artifactory/commands/golang/testdata/dir3/dir4/dir4.txt @@ -0,0 +1 @@ +dir4.txt \ No newline at end of file diff --git a/artifactory/commands/npm/npmcommand.go b/artifactory/commands/npm/npmcommand.go index cd1e7a35e..ca69e787b 100644 --- a/artifactory/commands/npm/npmcommand.go +++ b/artifactory/commands/npm/npmcommand.go @@ -296,7 +296,6 @@ func (ca *NpmCommand) Run() (err error) { if err = ca.collectDependencies(); err != nil { return } - log.Info(fmt.Sprintf("npm %s finished successfully.", ca.cmdName)) return } diff --git a/artifactory/commands/replication/create.go b/artifactory/commands/replication/create.go index 506113b61..efe3418f6 100644 --- a/artifactory/commands/replication/create.go +++ b/artifactory/commands/replication/create.go @@ -161,4 +161,5 @@ var writersMap = map[string]utils.AnswerWriter{ PathPrefix: utils.WriteStringAnswer, IncludePathPrefixPattern: utils.WriteStringAnswer, SocketTimeoutMillis: utils.WriteIntAnswer, + DisableProxy: utils.WriteBoolAnswer, } diff --git a/artifactory/commands/replication/template.go b/artifactory/commands/replication/template.go index 950988933..b7dd01f93 100644 --- a/artifactory/commands/replication/template.go +++ b/artifactory/commands/replication/template.go @@ -37,6 +37,7 @@ const ( PathPrefix = "pathPrefix" IncludePathPrefixPattern = "includePathPrefixPattern" SocketTimeoutMillis = "socketTimeoutMillis" + DisableProxy = "disableProxy" ) type ReplicationTemplateCommand struct { @@ -218,4 +219,5 @@ var suggestionMap = map[string]prompt.Suggest{ PathPrefix: {Text: PathPrefix}, IncludePathPrefixPattern: {Text: IncludePathPrefixPattern}, SocketTimeoutMillis: {Text: SocketTimeoutMillis}, + DisableProxy: {Text: DisableProxy}, } diff --git a/artifactory/commands/repository/repository.go b/artifactory/commands/repository/repository.go index 8633bfbe2..c82fae4a4 100644 --- a/artifactory/commands/repository/repository.go +++ b/artifactory/commands/repository/repository.go @@ -15,6 +15,12 @@ import ( "github.com/jfrog/jfrog-client-go/utils/errorutils" ) +const ( + // The actual field in the repository configuration is an array (plural) but in practice only one environment is allowed. + // This is why the question differs from the repository configuration. + environmentsKey = "environments" +) + type RepoCommand struct { serverDetails *config.ServerDetails templatePath string @@ -88,6 +94,7 @@ var writersMap = map[string]utils.AnswerWriter{ ExcludePatterns: utils.WriteStringAnswer, RepoLayoutRef: utils.WriteStringAnswer, ProjectKey: utils.WriteStringAnswer, + environmentsKey: utils.WriteStringArrayAnswer, HandleReleases: utils.WriteBoolAnswer, HandleSnapshots: utils.WriteBoolAnswer, MaxUniqueSnapshots: utils.WriteIntAnswer, @@ -221,7 +228,7 @@ var localRepoHandlers = map[string]repoHandler{ Pypi: localPypiHandler, Docker: localDockerHandler, Vagrant: localVagrantHandler, - Gitlfs: localGitlfsHandler, + Gitlfs: localGitLfsHandler, Go: localGoHandler, Yum: localYumHandler, Conan: localConanHandler, @@ -483,7 +490,7 @@ func localVagrantHandler(servicesManager artifactory.ArtifactoryServicesManager, return err } -func localGitlfsHandler(servicesManager artifactory.ArtifactoryServicesManager, jsonConfig []byte, isUpdate bool) error { +func localGitLfsHandler(servicesManager artifactory.ArtifactoryServicesManager, jsonConfig []byte, isUpdate bool) error { params := services.NewGitlfsLocalRepositoryParams() err := json.Unmarshal(jsonConfig, ¶ms) if errorutils.CheckError(err) != nil { @@ -612,9 +619,9 @@ var remoteRepoHandlers = map[string]repoHandler{ Bower: remoteBowerHandler, Debian: remoteDebianHandler, Composer: remoteComposerHandler, - Pypi: remotelPypiHandler, + Pypi: remotePypiHandler, Docker: remoteDockerHandler, - Gitlfs: remoteGitlfsHandler, + Gitlfs: remoteGitLfsHandler, Go: remoteGoHandler, Yum: remoteYumHandler, Conan: remoteConanHandler, @@ -837,7 +844,7 @@ func remoteComposerHandler(servicesManager artifactory.ArtifactoryServicesManage return err } -func remotelPypiHandler(servicesManager artifactory.ArtifactoryServicesManager, jsonConfig []byte, isUpdate bool) error { +func remotePypiHandler(servicesManager artifactory.ArtifactoryServicesManager, jsonConfig []byte, isUpdate bool) error { params := services.NewPypiRemoteRepositoryParams() err := json.Unmarshal(jsonConfig, ¶ms) if errorutils.CheckError(err) != nil { @@ -865,7 +872,7 @@ func remoteDockerHandler(servicesManager artifactory.ArtifactoryServicesManager, return err } -func remoteGitlfsHandler(servicesManager artifactory.ArtifactoryServicesManager, jsonConfig []byte, isUpdate bool) error { +func remoteGitLfsHandler(servicesManager artifactory.ArtifactoryServicesManager, jsonConfig []byte, isUpdate bool) error { params := services.NewGitlfsRemoteRepositoryParams() err := json.Unmarshal(jsonConfig, ¶ms) if errorutils.CheckError(err) != nil { @@ -1038,7 +1045,7 @@ var federatedRepoHandlers = map[string]repoHandler{ Pypi: federatedPypiHandler, Docker: federatedDockerHandler, Vagrant: federatedVagrantHandler, - Gitlfs: federatedGitlfsHandler, + Gitlfs: federatedGitLfsHandler, Go: federatedGoHandler, Conan: federatedConanHandler, Chef: federatedChefHandler, @@ -1264,7 +1271,7 @@ func federatedVagrantHandler(servicesManager artifactory.ArtifactoryServicesMana return servicesManager.CreateFederatedRepository().Vagrant(params) } -func federatedGitlfsHandler(servicesManager artifactory.ArtifactoryServicesManager, jsonConfig []byte, isUpdate bool) error { +func federatedGitLfsHandler(servicesManager artifactory.ArtifactoryServicesManager, jsonConfig []byte, isUpdate bool) error { params := services.NewGitlfsFederatedRepositoryParams() err := json.Unmarshal(jsonConfig, ¶ms) if errorutils.CheckError(err) != nil { @@ -1364,7 +1371,7 @@ var virtualRepoHandlers = map[string]repoHandler{ Debian: virtualDebianHandler, Pypi: virtualPypiHandler, Docker: virtualDockerHandler, - Gitlfs: virtualGitlfsHandler, + Gitlfs: virtualGitLfsHandler, Go: virtualGoHandler, Yum: virtualYumHandler, Conan: virtualConanHandler, @@ -1572,7 +1579,7 @@ func virtualDockerHandler(servicesManager artifactory.ArtifactoryServicesManager return err } -func virtualGitlfsHandler(servicesManager artifactory.ArtifactoryServicesManager, jsonConfig []byte, isUpdate bool) error { +func virtualGitLfsHandler(servicesManager artifactory.ArtifactoryServicesManager, jsonConfig []byte, isUpdate bool) error { params := services.NewGitlfsVirtualRepositoryParams() err := json.Unmarshal(jsonConfig, ¶ms) if errorutils.CheckError(err) != nil { diff --git a/artifactory/commands/repository/template.go b/artifactory/commands/repository/template.go index 690432b71..c227a60bf 100644 --- a/artifactory/commands/repository/template.go +++ b/artifactory/commands/repository/template.go @@ -19,11 +19,8 @@ type RepoTemplateCommand struct { } const ( - PathErrorSuffixMsg = " please enter a path, in which the new template file will be created" - // Strings for prompt questions - SelectConfigKeyMsg = "Select the next configuration key" + utils.PressTabMsg - InsertValuePromptMsg = "Insert the value for " + SelectConfigKeyMsg = "Select the next configuration key" + utils.PressTabMsg TemplateType = "templateType" Create = "create" @@ -41,6 +38,7 @@ const ( ExcludePatterns = "excludesPattern" RepoLayoutRef = "repoLayoutRef" ProjectKey = "projectKey" + Environment = "environment" // Mutual local and remote repository configuration JSON keys HandleReleases = "handleReleases" @@ -225,6 +223,7 @@ var optionalSuggestsMap = map[string]prompt.Suggest{ ExcludePatterns: {Text: ExcludePatterns}, RepoLayoutRef: {Text: RepoLayoutRef}, ProjectKey: {Text: ProjectKey}, + Environment: {Text: Environment}, HandleReleases: {Text: HandleReleases}, HandleSnapshots: {Text: HandleSnapshots}, MaxUniqueSnapshots: {Text: MaxUniqueSnapshots}, @@ -296,7 +295,7 @@ var optionalSuggestsMap = map[string]prompt.Suggest{ } var baseLocalRepoConfKeys = []string{ - Description, Notes, IncludePatterns, ExcludePatterns, RepoLayoutRef, ProjectKey, BlackedOut, XrayIndex, + Description, Notes, IncludePatterns, ExcludePatterns, RepoLayoutRef, ProjectKey, Environment, BlackedOut, XrayIndex, PropertySets, ArchiveBrowsingEnabled, OptionalIndexCompressionFormats, DownloadRedirect, BlockPushingSchema1, } @@ -321,7 +320,7 @@ var dockerLocalRepoConfKeys = []string{ } var baseRemoteRepoConfKeys = []string{ - Username, Password, Proxy, Description, Notes, IncludePatterns, ExcludePatterns, RepoLayoutRef, ProjectKey, HardFail, Offline, + Username, Password, Proxy, Description, Notes, IncludePatterns, ExcludePatterns, RepoLayoutRef, ProjectKey, Environment, HardFail, Offline, BlackedOut, XrayIndex, StoreArtifactsLocally, SocketTimeoutMillis, LocalAddress, RetrievalCachePeriodSecs, FailedRetrievalCachePeriodSecs, MissedRetrievalCachePeriodSecs, UnusedArtifactsCleanupEnabled, UnusedArtifactsCleanupPeriodHours, AssumedOfflinePeriodSecs, ShareConfiguration, SynchronizeProperties, BlockMismatchingMimeTypes, PropertySets, AllowAnyHostAuth, EnableCookieManagement, @@ -386,7 +385,7 @@ var vcsRemoteRepoConfKeys = []string{ } var baseVirtualRepoConfKeys = []string{ - Repositories, Description, Notes, IncludePatterns, ExcludePatterns, RepoLayoutRef, ProjectKey, ArtifactoryRequestsCanRetrieveRemoteArtifacts, + Repositories, Description, Notes, IncludePatterns, ExcludePatterns, RepoLayoutRef, ProjectKey, Environment, ArtifactoryRequestsCanRetrieveRemoteArtifacts, DefaultDeploymentRepo, } @@ -798,6 +797,12 @@ var questionMap = map[string]utils.QuestionInfo{ Writer: utils.WriteStringAnswer, Callback: projectKeyCallback, }, + Environment: { + PromptPrefix: "Insert the name of the environment to assign to >", + AllowVars: true, + MapKey: environmentsKey, + Writer: utils.WriteStringAnswer, + }, HandleReleases: BoolToStringQuestionInfo, HandleSnapshots: BoolToStringQuestionInfo, MaxUniqueSnapshots: IntToStringQuestionInfo, diff --git a/artifactory/commands/transfer/settings.go b/artifactory/commands/transfer/settings.go index 3e5425c93..b034412de 100644 --- a/artifactory/commands/transfer/settings.go +++ b/artifactory/commands/transfer/settings.go @@ -1,7 +1,6 @@ package transfer import ( - "errors" "fmt" "strconv" @@ -36,7 +35,7 @@ func (tst *TransferSettingsCommand) Run() error { ioutils.ScanFromConsole("Set the maximum number of working threads", &threadsNumberInput, currThreadsNumber) threadsNumber, err := strconv.Atoi(threadsNumberInput) if err != nil || threadsNumber < 1 || threadsNumber > MaxThreadsLimit { - return errorutils.CheckError(errors.New("the value must be a number between 1 and " + strconv.Itoa(MaxThreadsLimit))) + return errorutils.CheckErrorf("the value must be a number between 1 and " + strconv.Itoa(MaxThreadsLimit)) } conf := &utils.TransferSettings{ThreadsNumber: threadsNumber} err = utils.SaveTransferSettings(conf) diff --git a/artifactory/commands/transferconfig/transferconfig.go b/artifactory/commands/transferconfig/transferconfig.go index 8760181c7..1b37ed67c 100644 --- a/artifactory/commands/transferconfig/transferconfig.go +++ b/artifactory/commands/transferconfig/transferconfig.go @@ -4,13 +4,13 @@ import ( "bytes" "context" "fmt" - "github.com/jfrog/gofrog/version" "net/http" "os" "strings" "time" - "github.com/jfrog/gofrog/datastructures" + "github.com/jfrog/gofrog/version" + "github.com/jfrog/jfrog-cli-core/v2/artifactory/commands/generic" "github.com/jfrog/jfrog-cli-core/v2/artifactory/commands/transferconfig/configxmlutils" commandsUtils "github.com/jfrog/jfrog-cli-core/v2/artifactory/commands/utils" @@ -152,9 +152,6 @@ func (tcc *TransferConfigCommand) Run() (err error) { } tcc.LogTitle("Phase 5/5 - Import repositories to the target Artifactory") - if err = tcc.deleteConflictingRepositories(selectedRepos); err != nil { - return - } if err = tcc.TransferRepositoriesToTarget(selectedRepos, remoteRepos); err != nil { return } @@ -477,6 +474,10 @@ func (tcc *TransferConfigCommand) createImportPollingAction(rtDetails *httputils if err != nil { return true, nil, err } + tcc.TargetAccessManager, err = utils.CreateAccessServiceManager(newServerDetails, false) + if err != nil { + return true, nil, err + } rtDetails, err = commandsUtils.CreateArtifactoryClientDetails(tcc.TargetArtifactoryManager) if err != nil { return true, nil, err @@ -528,65 +529,51 @@ func (tcc *TransferConfigCommand) getWorkingDirParam() string { return "" } -func (tcc *TransferConfigCommand) deleteConflictingRepositories(selectedRepos map[utils.RepoType][]string) error { - log.Info("Deleting conflicting repositories in the target Artifactory server, if any exist...") - targetRepos, err := tcc.TargetArtifactoryManager.GetAllRepositories() - if err != nil { - return err - } - allSourceRepos := datastructures.MakeSet[string]() - for _, selectedReposWithType := range selectedRepos { - for _, selectedRepo := range selectedReposWithType { - allSourceRepos.Add(selectedRepo) - } - } - - for _, targetRepo := range *targetRepos { - if allSourceRepos.Exists(targetRepo.Key) { - if err = tcc.TargetArtifactoryManager.DeleteRepository(targetRepo.Key); err != nil { - return err - } - } - } - log.Info("Done deleting conflicting repositories") - return nil -} - // Make sure that the source Artifactory version is sufficient. // Returns the source Artifactory version. -func (tcc *TransferConfigCommand) validateMinVersion() error { +func (tcc *TransferConfigCommand) validateMinVersion() (sourceArtifactoryVersion string, err error) { log.Info("Verifying minimum version of the source server...") - sourceArtifactoryVersion, err := tcc.SourceArtifactoryManager.GetVersion() + sourceArtifactoryVersion, err = tcc.SourceArtifactoryManager.GetVersion() if err != nil { - return err + return } - targetArtifactoryVersion, err := tcc.TargetArtifactoryManager.GetVersion() + var targetArtifactoryVersion string + targetArtifactoryVersion, err = tcc.TargetArtifactoryManager.GetVersion() if err != nil { - return err + return } // Validate minimal Artifactory version in the source server err = coreutils.ValidateMinimumVersion(coreutils.Artifactory, sourceArtifactoryVersion, minTransferConfigArtifactoryVersion) if err != nil { - return err + return } // Validate that the target Artifactory server version is >= than the source Artifactory server version if !version.NewVersion(targetArtifactoryVersion).AtLeast(sourceArtifactoryVersion) { - return errorutils.CheckErrorf("The source Artifactory version (%s) can't be higher than the target Artifactory version (%s).", sourceArtifactoryVersion, targetArtifactoryVersion) + err = errorutils.CheckErrorf("The source Artifactory version (%s) can't be higher than the target Artifactory version (%s).", sourceArtifactoryVersion, targetArtifactoryVersion) } - return nil + return } -func (tcc *TransferConfigCommand) validateServerPrerequisites() error { +func (tcc *TransferConfigCommand) validateServerPrerequisites() (err error) { + var sourceArtifactoryVersion string // Make sure that the source Artifactory version is sufficient. - if err := tcc.validateMinVersion(); err != nil { - return err + if sourceArtifactoryVersion, err = tcc.validateMinVersion(); err != nil { + return } + + // Check connectivity to JFrog Access if the source Artifactory version is >= 7.0.0 + if versionErr := coreutils.ValidateMinimumVersion(coreutils.Projects, sourceArtifactoryVersion, commandsUtils.MinJFrogProjectsArtifactoryVersion); versionErr == nil { + if err = tcc.ValidateAccessServerConnection(tcc.SourceServerDetails, tcc.SourceAccessManager); err != nil { + return + } + } + // Make sure source and target Artifactory URLs are different - if err := tcc.ValidateDifferentServers(); err != nil { - return err + if err = tcc.ValidateDifferentServers(); err != nil { + return } // Make sure that the target Artifactory is empty and the config-import plugin is installed return tcc.validateTargetServer() diff --git a/artifactory/commands/transferconfig/transferconfig_test.go b/artifactory/commands/transferconfig/transferconfig_test.go index 4f06e548c..443183196 100644 --- a/artifactory/commands/transferconfig/transferconfig_test.go +++ b/artifactory/commands/transferconfig/transferconfig_test.go @@ -255,12 +255,48 @@ func TestValidateMinVersion(t *testing.T) { t.Run(testCase.testName, func(t *testing.T) { sourceRtVersion = testCase.sourceVersion targetRtVersion = testCase.targetVersion - err := createTransferConfigCommand(t, sourceServerDetails, targetServerDetails).validateMinVersion() + actualSourceRtVersion, err := createTransferConfigCommand(t, sourceServerDetails, targetServerDetails).validateMinVersion() if testCase.expectedError == "" { assert.NoError(t, err) + assert.Equal(t, sourceRtVersion, actualSourceRtVersion) } else { assert.ErrorContains(t, err, testCase.expectedError) } }) } } + +func TestValidateAccessServerConnection(t *testing.T) { + // Create transfer config command + testServer, serverDetails, accessManager := commonTests.CreateAccessRestsMockServer(t, func(w http.ResponseWriter, r *http.Request) { + switch r.RequestURI { + case "/access/api/v1/system/ping": + w.WriteHeader(http.StatusOK) + default: + assert.Fail(t, "Unexpected request URI: "+r.RequestURI) + } + }) + defer testServer.Close() + + transferConfigCmd := createTransferConfigCommand(t, nil, nil) + err := transferConfigCmd.ValidateAccessServerConnection(serverDetails, accessManager) + assert.NoError(t, err) +} + +func TestValidateAccessServerConnectionForbidden(t *testing.T) { + // Create transfer config command + testServer, serverDetails, accessManager := commonTests.CreateAccessRestsMockServer(t, func(w http.ResponseWriter, r *http.Request) { + switch r.RequestURI { + case "/access/api/v1/system/ping": + w.WriteHeader(http.StatusForbidden) + default: + assert.Fail(t, "Unexpected request URI: "+r.RequestURI) + } + }) + defer testServer.Close() + + transferConfigCmd := createTransferConfigCommand(t, nil, nil) + // Assert access token invalid error + err := transferConfigCmd.ValidateAccessServerConnection(serverDetails, accessManager) + assert.ErrorContains(t, err, "the 'test-server' instance Access Token is not valid. Please provide a valid access token by running the 'jf c edit test-server'") +} diff --git a/artifactory/commands/transferconfigmerge/transferconfigmerge.go b/artifactory/commands/transferconfigmerge/transferconfigmerge.go index 8a4cf8d13..8ef78eb1e 100644 --- a/artifactory/commands/transferconfigmerge/transferconfigmerge.go +++ b/artifactory/commands/transferconfigmerge/transferconfigmerge.go @@ -10,10 +10,8 @@ import ( "github.com/jfrog/jfrog-cli-core/v2/artifactory/utils" "github.com/jfrog/jfrog-cli-core/v2/utils/config" "github.com/jfrog/jfrog-cli-core/v2/utils/coreutils" - "github.com/jfrog/jfrog-client-go/access" accessServices "github.com/jfrog/jfrog-client-go/access/services" "github.com/jfrog/jfrog-client-go/artifactory/services" - "github.com/jfrog/jfrog-client-go/utils/errorutils" "github.com/jfrog/jfrog-client-go/utils/log" "golang.org/x/exp/slices" ) @@ -21,10 +19,9 @@ import ( type ConflictType string const ( - Repository ConflictType = "Repository" - Project ConflictType = "Project" - logFilePrefix = "transfer-config-conflicts" - minJFrogProjectsArtifactoryVersion = "7.0.0" + Repository ConflictType = "Repository" + Project ConflictType = "Project" + logFilePrefix = "transfer-config-conflicts" ) // Repository key that should be filtered when comparing repositories (all must be lowercase) @@ -34,8 +31,6 @@ type TransferConfigMergeCommand struct { commandsUtils.TransferConfigBase includeProjectsPatterns []string excludeProjectsPatterns []string - sourceAccessManager access.AccessServicesManager - targetAccessManager access.AccessServicesManager } func NewTransferConfigMergeCommand(sourceServer, targetServer *config.ServerDetails) *TransferConfigMergeCommand { @@ -104,7 +99,7 @@ func (tcmc *TransferConfigMergeCommand) initServiceManagersAndValidateServers() return } // Check if JFrog Projects supported by Source Artifactory version - versionErr := coreutils.ValidateMinimumVersion(coreutils.Projects, sourceArtifactoryVersion, minJFrogProjectsArtifactoryVersion) + versionErr := coreutils.ValidateMinimumVersion(coreutils.Projects, sourceArtifactoryVersion, commandsUtils.MinJFrogProjectsArtifactoryVersion) if versionErr != nil { // Projects not supported by Source Artifactory version return @@ -112,40 +107,16 @@ func (tcmc *TransferConfigMergeCommand) initServiceManagersAndValidateServers() projectsSupported = true - tcmc.sourceAccessManager, err = createAccessManagerAndValidateToken(tcmc.SourceServerDetails) - if err != nil { + if err = tcmc.ValidateAccessServerConnection(tcmc.SourceServerDetails, tcmc.SourceAccessManager); err != nil { return } - - tcmc.targetAccessManager, err = createAccessManagerAndValidateToken(tcmc.TargetServerDetails) - if err != nil { + if err = tcmc.ValidateAccessServerConnection(tcmc.TargetServerDetails, tcmc.TargetAccessManager); err != nil { return } return } -func createAccessManagerAndValidateToken(serverDetails *config.ServerDetails) (accessManager access.AccessServicesManager, err error) { - if serverDetails.Password != "" { - err = fmt.Errorf("it looks like you configured the '%[1]s' instance with username and password.\n"+ - "The transfer-config-merge command can be used with admin Access Token only.\n"+ - "Please use the 'jf c edit %[1]s' command to configure the Access Token, and then re-run the command", serverDetails.ServerId) - return - } - - manager, err := utils.CreateAccessServiceManager(serverDetails, false) - if err != nil { - return - } - - if _, err = manager.Ping(); err != nil { - err = errorutils.CheckErrorf("The '%[1]s' instance Access Token is not valid. Please provide a valid access token by running the 'jf c edit %[1]s'", serverDetails.ServerId) - return - } - accessManager = *manager - return -} - func (tcmc *TransferConfigMergeCommand) mergeEntities(projectsSupported bool) (mergeEntities mergeEntities, csvPath string, err error) { conflicts := []Conflict{} if projectsSupported { @@ -202,12 +173,12 @@ func (tcmc *TransferConfigMergeCommand) transferEntities(mergeEntities mergeEnti func (tcmc *TransferConfigMergeCommand) mergeProjects(conflicts *[]Conflict) (projectsToTransfer []accessServices.Project, err error) { log.Info("Getting all Projects from the source ...") - sourceProjects, err := tcmc.sourceAccessManager.GetAllProjects() + sourceProjects, err := tcmc.SourceAccessManager.GetAllProjects() if err != nil { return } log.Info("Getting all Projects from the target ...") - targetProjects, err := tcmc.targetAccessManager.GetAllProjects() + targetProjects, err := tcmc.TargetAccessManager.GetAllProjects() if err != nil { return } @@ -373,7 +344,7 @@ func compareInterfaces(first, second interface{}, filteredKeys ...string) (diff func (tcmc *TransferConfigMergeCommand) transferProjectsToTarget(reposToTransfer []accessServices.Project) (err error) { for _, project := range reposToTransfer { log.Info(fmt.Sprintf("Transferring project '%s' ...", project.DisplayName)) - if err = tcmc.targetAccessManager.CreateProject(accessServices.ProjectParams{ProjectDetails: project}); err != nil { + if err = tcmc.TargetAccessManager.CreateProject(accessServices.ProjectParams{ProjectDetails: project}); err != nil { return } } diff --git a/artifactory/commands/transferfiles/delayedartifactshandler.go b/artifactory/commands/transferfiles/delayedartifactshandler.go index 0d20057a0..4609b63fb 100644 --- a/artifactory/commands/transferfiles/delayedartifactshandler.go +++ b/artifactory/commands/transferfiles/delayedartifactshandler.go @@ -337,7 +337,12 @@ func (w *SplitContentWriter) closeCurrentFile() error { return err } if w.writer.GetFilePath() != "" { - fullPath := filepath.Join(w.dirPath, fmt.Sprintf("%s-%d.json", w.filePrefix, w.fileIndex)) + fullPath, err := getUniqueErrorOrDelayFilePath(w.dirPath, func() string { + return w.filePrefix + }) + if err != nil { + return err + } log.Debug(fmt.Sprintf("Saving split content JSON file to: %s.", fullPath)) if err := fileutils.MoveFile(w.writer.GetFilePath(), fullPath); err != nil { return fmt.Errorf("saving file failed! failed moving %s to %s: %w", w.writer.GetFilePath(), fullPath, err) diff --git a/artifactory/commands/transferfiles/errorshandler.go b/artifactory/commands/transferfiles/errorshandler.go index d7e5e8262..f716f2a2e 100644 --- a/artifactory/commands/transferfiles/errorshandler.go +++ b/artifactory/commands/transferfiles/errorshandler.go @@ -11,7 +11,6 @@ import ( "github.com/jfrog/jfrog-client-go/utils/io/fileutils" "github.com/jfrog/jfrog-client-go/utils/log" "os" - "path/filepath" "time" ) @@ -42,9 +41,7 @@ type TransferErrorsMng struct { type errorWriter struct { writer *content.ContentWriter errorCount int - // In case we have multiple errors files - we index them - fileIndex int - filePath string + filePath string } type errorWriterMng struct { @@ -116,7 +113,7 @@ func (mng *TransferErrorsMng) start() (err error) { if err != nil { return err } - writerRetry, retryFilePath, err := mng.newContentWriter(retryablePath, 0) + writerRetry, retryFilePath, err := mng.newUniqueContentWriter(retryablePath) if err != nil { return err } @@ -126,14 +123,14 @@ func (mng *TransferErrorsMng) start() (err error) { err = e } }() - writerMng.retryable = errorWriter{writer: writerRetry, fileIndex: 0, filePath: retryFilePath} + writerMng.retryable = errorWriter{writer: writerRetry, filePath: retryFilePath} // Init the content writer which is responsible for writing 'skipped errors' into files. // In the next run we won't retry and upload those files. skippedPath, err := getJfrogTransferRepoSkippedDir(mng.repoKey) if err != nil { return err } - writerSkip, skipFilePath, err := mng.newContentWriter(skippedPath, 0) + writerSkip, skipFilePath, err := mng.newUniqueContentWriter(skippedPath) if err != nil { return err } @@ -143,7 +140,7 @@ func (mng *TransferErrorsMng) start() (err error) { err = e } }() - writerMng.skipped = errorWriter{writer: writerSkip, fileIndex: 0, filePath: skipFilePath} + writerMng.skipped = errorWriter{writer: writerSkip, filePath: skipFilePath} mng.errorWriterMng = writerMng // Read errors from channel and write them to files. @@ -156,17 +153,22 @@ func (mng *TransferErrorsMng) start() (err error) { return } -func (mng *TransferErrorsMng) newContentWriter(dirPath string, index int) (*content.ContentWriter, string, error) { +func (mng *TransferErrorsMng) newUniqueContentWriter(dirPath string) (*content.ContentWriter, string, error) { writer, err := content.NewContentWriter("errors", true, false) if err != nil { return nil, "", err } - errorsFilePath := filepath.Join(dirPath, getErrorsFileName(mng.repoKey, mng.phaseId, mng.phaseStartTime, index)) + errorsFilePath, err := getUniqueErrorOrDelayFilePath(dirPath, func() string { + return getErrorsFileNamePrefix(mng.repoKey, mng.phaseId, mng.phaseStartTime) + }) + if err != nil { + return nil, "", err + } return writer, errorsFilePath, nil } -func getErrorsFileName(repoKey string, phaseId int, phaseStartTime string, index int) string { - return fmt.Sprintf("%s-%d-%s-%d.json", repoKey, phaseId, phaseStartTime, index) +func getErrorsFileNamePrefix(repoKey string, phaseId int, phaseStartTime string) string { + return fmt.Sprintf("%s-%d-%s", repoKey, phaseId, phaseStartTime) } func (mng *TransferErrorsMng) writeErrorContent(e ExtendedFileUploadStatusResponse) error { @@ -197,12 +199,11 @@ func (mng *TransferErrorsMng) writeSkippedErrorContent(e ExtendedFileUploadStatu return err } // Initialize variables for new errors file - mng.errorWriterMng.skipped.fileIndex++ dirPath, err := getJfrogTransferRepoSkippedDir(mng.repoKey) if err != nil { return err } - mng.errorWriterMng.skipped.writer, mng.errorWriterMng.skipped.filePath, err = mng.newContentWriter(dirPath, mng.errorWriterMng.skipped.fileIndex) + mng.errorWriterMng.skipped.writer, mng.errorWriterMng.skipped.filePath, err = mng.newUniqueContentWriter(dirPath) if err != nil { return err } @@ -222,12 +223,11 @@ func (mng *TransferErrorsMng) writeRetryableErrorContent(e ExtendedFileUploadSta return err } // Initialize variables for new errors file - mng.errorWriterMng.retryable.fileIndex++ dirPath, err := getJfrogTransferRepoRetryableDir(mng.repoKey) if err != nil { return err } - mng.errorWriterMng.retryable.writer, mng.errorWriterMng.retryable.filePath, err = mng.newContentWriter(dirPath, mng.errorWriterMng.retryable.fileIndex) + mng.errorWriterMng.retryable.writer, mng.errorWriterMng.retryable.filePath, err = mng.newUniqueContentWriter(dirPath) if err != nil { return err } diff --git a/artifactory/commands/transferfiles/errorshandler_test.go b/artifactory/commands/transferfiles/errorshandler_test.go index 9888534ba..3c3d70868 100644 --- a/artifactory/commands/transferfiles/errorshandler_test.go +++ b/artifactory/commands/transferfiles/errorshandler_test.go @@ -164,6 +164,6 @@ func writeEmptyErrorsFile(t *testing.T, repoKey string, retryable bool, phase, c assert.NoError(t, err) assert.NoError(t, fileutils.CreateDirIfNotExist(errorsDirPath)) - fileName := getErrorsFileName(repoKey, phase, state.ConvertTimeToEpochMilliseconds(time.Now()), counter) + fileName := fmt.Sprintf("%s-%d.json", getErrorsFileNamePrefix(repoKey, phase, state.ConvertTimeToEpochMilliseconds(time.Now())), counter) assert.NoError(t, os.WriteFile(filepath.Join(errorsDirPath, fileName), nil, 0644)) } diff --git a/artifactory/commands/transferfiles/filediff_test.go b/artifactory/commands/transferfiles/filediff_test.go new file mode 100644 index 000000000..3ee7d12ff --- /dev/null +++ b/artifactory/commands/transferfiles/filediff_test.go @@ -0,0 +1,34 @@ +package transferfiles + +import ( + "testing" + + "github.com/jfrog/jfrog-cli-core/v2/artifactory/commands/transferfiles/api" + servicesUtils "github.com/jfrog/jfrog-client-go/artifactory/services/utils" + "github.com/stretchr/testify/assert" +) + +var convertResultsToFileRepresentationTestCases = []struct { + input servicesUtils.ResultItem + expectedOutput api.FileRepresentation +}{ + { + servicesUtils.ResultItem{Repo: repo1Key, Path: "path-in-repo", Name: "file-name", Type: "file", Size: 100}, + api.FileRepresentation{Repo: repo1Key, Path: "path-in-repo", Name: "file-name", Size: 100}, + }, + { + servicesUtils.ResultItem{Repo: repo1Key, Path: "path-in-repo", Name: "folder-name", Type: "folder"}, + api.FileRepresentation{Repo: repo1Key, Path: "path-in-repo/folder-name"}, + }, + { + servicesUtils.ResultItem{Repo: repo1Key, Path: ".", Name: "folder-name", Type: "folder"}, + api.FileRepresentation{Repo: repo1Key, Path: "folder-name"}, + }, +} + +func TestConvertResultsToFileRepresentation(t *testing.T) { + for _, testCase := range convertResultsToFileRepresentationTestCases { + files := convertResultsToFileRepresentation([]servicesUtils.ResultItem{testCase.input}) + assert.Equal(t, []api.FileRepresentation{testCase.expectedOutput}, files) + } +} diff --git a/artifactory/commands/transferfiles/filesdiff.go b/artifactory/commands/transferfiles/filesdiff.go index e19f42429..b905fa7c4 100644 --- a/artifactory/commands/transferfiles/filesdiff.go +++ b/artifactory/commands/transferfiles/filesdiff.go @@ -2,6 +2,7 @@ package transferfiles import ( "fmt" + "path" "time" "github.com/jfrog/gofrog/parallel" @@ -163,12 +164,26 @@ func (f *filesDiffPhase) handleTimeFrameFilesDiff(pcWrapper *producerConsumerWra func convertResultsToFileRepresentation(results []servicesUtils.ResultItem) (files []api.FileRepresentation) { for _, result := range results { - files = append(files, api.FileRepresentation{ - Repo: result.Repo, - Path: result.Path, - Name: result.Name, - Size: result.Size, - }) + switch result.Type { + case "folder": + var pathInRepo string + if result.Path == "." { + pathInRepo = result.Name + } else { + pathInRepo = path.Join(result.Path, result.Name) + } + files = append(files, api.FileRepresentation{ + Repo: result.Repo, + Path: pathInRepo, + }) + default: + files = append(files, api.FileRepresentation{ + Repo: result.Repo, + Path: result.Path, + Name: result.Name, + Size: result.Size, + }) + } } return } @@ -248,7 +263,7 @@ func (f *filesDiffPhase) getDockerTimeFrameFilesDiff(fromTimestamp, toTimestamp func generateDiffAqlQuery(repoKey, fromTimestamp, toTimestamp string, paginationOffset int) string { query := fmt.Sprintf(`items.find({"$and":[{"modified":{"$gte":"%s"}},{"modified":{"$lt":"%s"}},{"repo":"%s","type":"any"}]})`, fromTimestamp, toTimestamp, repoKey) - query += `.include("repo","path","name","modified","size")` + query += `.include("repo","path","name","type","modified","size")` query += fmt.Sprintf(`.sort({"$asc":["modified"]}).offset(%d).limit(%d)`, paginationOffset*AqlPaginationLimit, AqlPaginationLimit) return query } @@ -271,7 +286,7 @@ func generateGetDirContentAqlQuery(repoKey string, paths []string) string { func generateDockerManifestAqlQuery(repoKey, fromTimestamp, toTimestamp string, paginationOffset int) string { query := `items.find({"$and":` query += fmt.Sprintf(`[{"repo":"%s"},{"modified":{"$gte":"%s"}},{"modified":{"$lt":"%s"}},{"$or":[{"name":"manifest.json"},{"name":"list.manifest.json"}]}`, repoKey, fromTimestamp, toTimestamp) - query += `]}).include("repo","path","name","modified")` + query += `]}).include("repo","path","name","type","modified")` query += fmt.Sprintf(`.sort({"$asc":["modified"]}).offset(%d).limit(%d)`, paginationOffset*AqlPaginationLimit, AqlPaginationLimit) return query } diff --git a/artifactory/commands/transferfiles/fulltransfer.go b/artifactory/commands/transferfiles/fulltransfer.go index 773ccc24f..3229982d9 100644 --- a/artifactory/commands/transferfiles/fulltransfer.go +++ b/artifactory/commands/transferfiles/fulltransfer.go @@ -132,28 +132,25 @@ func (m *fullTransferPhase) transferFolder(params folderParams, logMsgPrefix str log.Debug(logMsgPrefix+"Handling folder:", path.Join(m.repoKey, params.relativePath)) // Get the directory's node from the snapshot manager, and use information from previous transfer attempts if such exist. - node, done, previousChildren, err := m.getAndHandleDirectoryNode(params, logMsgPrefix) + node, done, err := m.getAndHandleDirectoryNode(params, logMsgPrefix) if err != nil || done { return err } curUploadChunk, err := m.searchAndHandleFolderContents(params, pcWrapper, - uploadChunkChan, delayHelper, errorsChannelMng, - node, previousChildren) + uploadChunkChan, delayHelper, errorsChannelMng, node) if err != nil { return } // Mark that no more results are expected for the current folder. - err = node.MarkDoneExploring() - if err != nil { + if err = node.MarkDoneExploring(); err != nil { return err } // Chunk didn't reach full size. Upload the remaining files. if len(curUploadChunk.UploadCandidates) > 0 { - _, err = pcWrapper.chunkUploaderProducerConsumer.AddTaskWithError(uploadChunkWhenPossibleHandler(&m.phaseBase, curUploadChunk, uploadChunkChan, errorsChannelMng), pcWrapper.errorsQueue.AddError) - if err != nil { + if _, err = pcWrapper.chunkUploaderProducerConsumer.AddTaskWithError(uploadChunkWhenPossibleHandler(&m.phaseBase, curUploadChunk, uploadChunkChan, errorsChannelMng), pcWrapper.errorsQueue.AddError); err != nil { return } } @@ -163,7 +160,7 @@ func (m *fullTransferPhase) transferFolder(params folderParams, logMsgPrefix str func (m *fullTransferPhase) searchAndHandleFolderContents(params folderParams, pcWrapper producerConsumerWrapper, uploadChunkChan chan UploadedChunk, delayHelper delayUploadHelper, errorsChannelMng *ErrorsChannelMng, - node *reposnapshot.Node, previousChildren []*reposnapshot.Node) (curUploadChunk api.UploadChunk, err error) { + node *reposnapshot.Node) (curUploadChunk api.UploadChunk, err error) { curUploadChunk = api.UploadChunk{ TargetAuth: createTargetAuth(m.targetRtDetails, m.proxyKey), CheckExistenceInFilestore: m.checkExistenceInFilestore, @@ -201,8 +198,7 @@ func (m *fullTransferPhase) searchAndHandleFolderContents(params folderParams, p switch item.Type { case "folder": err = m.handleFoundChildFolder(params, pcWrapper, - uploadChunkChan, delayHelper, errorsChannelMng, - node, previousChildren, item) + uploadChunkChan, delayHelper, errorsChannelMng, item) case "file": err = m.handleFoundFile(pcWrapper, uploadChunkChan, delayHelper, errorsChannelMng, @@ -220,13 +216,9 @@ func (m *fullTransferPhase) searchAndHandleFolderContents(params folderParams, p func (m *fullTransferPhase) handleFoundChildFolder(params folderParams, pcWrapper producerConsumerWrapper, uploadChunkChan chan UploadedChunk, delayHelper delayUploadHelper, errorsChannelMng *ErrorsChannelMng, - node *reposnapshot.Node, previousChildren []*reposnapshot.Node, item servicesUtils.ResultItem) (err error) { + item servicesUtils.ResultItem) (err error) { newRelativePath := getFolderRelativePath(item.Name, params.relativePath) - // Add a node for the found folder, as a child for the current folder in the snapshot manager. - err = node.AddChildNode(item.Name, previousChildren) - if err != nil { - return - } + folderHandler := m.createFolderFullTransferHandlerFunc(pcWrapper, uploadChunkChan, delayHelper, errorsChannelMng) _, err = pcWrapper.chunkBuilderProducerConsumer.AddTaskWithError(folderHandler(folderParams{relativePath: newRelativePath}), pcWrapper.errorsQueue.AddError) return @@ -289,11 +281,12 @@ func generateFolderContentAqlQuery(repoKey, relativePath string, paginationOffse // node - A node in the repository snapshot tree, which represents the current directory. // completed - Whether handling the node directory was completed. If it wasn't fully transferred, we start exploring and transferring it from scratch. // previousChildren - If the directory requires exploring, previously known children will be added from this map in order to preserve their states and references. -func (m *fullTransferPhase) getAndHandleDirectoryNode(params folderParams, logMsgPrefix string) (node *reposnapshot.Node, completed bool, previousChildren []*reposnapshot.Node, err error) { +func (m *fullTransferPhase) getAndHandleDirectoryNode(params folderParams, logMsgPrefix string) (node *reposnapshot.Node, completed bool, err error) { node, err = m.stateManager.LookUpNode(params.relativePath) if err != nil { return } + // If data was not loaded from snapshot, we know that the node is visited for the first time and was not explored. loadedFromSnapshot, err := m.stateManager.WasSnapshotLoaded() if err != nil || !loadedFromSnapshot { @@ -306,21 +299,10 @@ func (m *fullTransferPhase) getAndHandleDirectoryNode(params folderParams, logMs } if completed { log.Debug(logMsgPrefix+"Skipping completed folder:", path.Join(m.repoKey, params.relativePath)) - return nil, true, nil, nil - } - // If the node was not completed, we will start exploring it from the beginning. - previousChildren, err = m.handleNodeRequiresExploring(node) - return -} - -func (m *fullTransferPhase) handleNodeRequiresExploring(node *reposnapshot.Node) (previousChildren []*reposnapshot.Node, err error) { - // Return old children map to add every found child with its previous data and references. - previousChildren, err = node.GetChildren() - if err != nil { return } + // If the node was not completed, we will start exploring it from the beginning. // Remove all files names because we will begin exploring from the beginning. - // Clear children map to avoid handling directories that may have been deleted. err = node.RestartExploring() return } diff --git a/artifactory/commands/transferfiles/manager.go b/artifactory/commands/transferfiles/manager.go index 0a85fd124..2b825f88b 100644 --- a/artifactory/commands/transferfiles/manager.go +++ b/artifactory/commands/transferfiles/manager.go @@ -259,19 +259,25 @@ func pollUploads(phaseBase *phaseBase, srcUpService *srcUserPluginService, uploa if phaseBase != nil { timeEstMng = &phaseBase.stateManager.TimeEstimationManager } - for { + for i := 0; ; i++ { if ShouldStop(phaseBase, nil, errorsChannelMng) { return } time.Sleep(waitTimeBetweenChunkStatusSeconds * time.Second) - // 'Working threads' are determined by how many upload chunks are currently being processed by the source Artifactory instance. - if err := phaseBase.stateManager.SetWorkingThreads(curProcessedUploadChunks); err != nil { - log.Error("Couldn't set the current number of working threads:", err.Error()) + // Run once per 3 minutes + if i%60 == 0 { + // 'Working threads' are determined by how many upload chunks are currently being processed by the source Artifactory instance. + if err := phaseBase.stateManager.SetWorkingThreads(curProcessedUploadChunks); err != nil { + log.Error("Couldn't set the current number of working threads:", err.Error()) + } } - // Each uploading thread receive a token and a node id from the source via the uploadChunkChan, so this go routine can poll on its status. + // Each uploading thread receives a token and a node id from the source via the uploadChunkChan, so this go routine can poll on its status. fillChunkDataBatch(&chunksLifeCycleManager, uploadChunkChan) + if err := chunksLifeCycleManager.StoreStaleChunks(phaseBase.stateManager); err != nil { + log.Error("Couldn't store the stale chunks:", err.Error()) + } // When totalChunks size is zero, it means that all the tokens are uploaded, // we received 'DONE' for all of them, and we notified the source that they can be deleted from the memory. // If during the polling some chunks data were lost due to network issues, either on the client or on the source, diff --git a/artifactory/commands/transferfiles/state/runstatus.go b/artifactory/commands/transferfiles/state/runstatus.go index df1db470a..1b9ffa9d6 100644 --- a/artifactory/commands/transferfiles/state/runstatus.go +++ b/artifactory/commands/transferfiles/state/runstatus.go @@ -38,6 +38,19 @@ type TransferRunStatus struct { WorkingThreads int `json:"working_threads,omitempty"` TransferFailures uint `json:"transfer_failures,omitempty"` TimeEstimationManager `json:"time_estimation,omitempty"` + StaleChunks []StaleChunks `json:"stale_chunks,omitempty"` +} + +// This structure contains a collection of chunks that have been undergoing processing for over 30 minutes +type StaleChunks struct { + NodeID string `json:"node_id,omitempty"` + Chunks []StaleChunk `json:"stale_node_chunks,omitempty"` +} + +type StaleChunk struct { + ChunkID string `json:"chunk_id,omitempty"` + Files []string `json:"files,omitempty"` + Sent int64 `json:"sent,omitempty"` } func (ts *TransferRunStatus) action(action ActionOnStatusFunc) error { diff --git a/artifactory/commands/transferfiles/state/statemanager.go b/artifactory/commands/transferfiles/state/statemanager.go index 389631f96..5425832e1 100644 --- a/artifactory/commands/transferfiles/state/statemanager.go +++ b/artifactory/commands/transferfiles/state/statemanager.go @@ -302,6 +302,20 @@ func (ts *TransferStateManager) GetWorkingThreads() (workingThreads int, err err }) } +func (ts *TransferStateManager) SetStaleChunks(staleChunks []StaleChunks) error { + return ts.action(func(transferRunStatus *TransferRunStatus) error { + transferRunStatus.StaleChunks = staleChunks + return nil + }) +} + +func (ts *TransferStateManager) GetStaleChunks() (staleChunks []StaleChunks, err error) { + return staleChunks, ts.action(func(transferRunStatus *TransferRunStatus) error { + staleChunks = transferRunStatus.StaleChunks + return nil + }) +} + func (ts *TransferStateManager) SaveStateAndSnapshots() error { ts.TransferState.lastSaveTimestamp = time.Now() if err := ts.persistTransferState(false); err != nil { @@ -361,7 +375,7 @@ func GetRunningTime() (runningTime string, isRunning bool, err error) { return } runningSecs := int64(time.Since(time.Unix(0, startTimestamp)).Seconds()) - return secondsToLiteralTime(runningSecs, ""), true, nil + return SecondsToLiteralTime(runningSecs, ""), true, nil } func UpdateChunkInState(stateManager *TransferStateManager, chunk *api.ChunkStatus) (err error) { diff --git a/artifactory/commands/transferfiles/state/timeestimation.go b/artifactory/commands/transferfiles/state/timeestimation.go index 5fdf82089..77dc88d7c 100644 --- a/artifactory/commands/transferfiles/state/timeestimation.go +++ b/artifactory/commands/transferfiles/state/timeestimation.go @@ -2,6 +2,7 @@ package state import ( "fmt" + "github.com/jfrog/jfrog-cli-core/v2/artifactory/commands/transferfiles/api" "github.com/jfrog/jfrog-cli-core/v2/artifactory/utils" @@ -77,6 +78,10 @@ func (tem *TimeEstimationManager) addDataChunkStatus(chunkStatus api.ChunkStatus tem.LastSpeedsSum -= tem.LastSpeeds[0] tem.LastSpeeds = tem.LastSpeeds[1:] } + if len(tem.LastSpeeds) == 0 { + tem.SpeedsAverage = 0 + return + } // Calculate speed in bytes/ms tem.SpeedsAverage = tem.LastSpeedsSum / float64(len(tem.LastSpeeds)) } @@ -185,7 +190,7 @@ func (tem *TimeEstimationManager) GetEstimatedRemainingTimeString() string { return err.Error() } - return secondsToLiteralTime(remainingTimeSec, "About ") + return SecondsToLiteralTime(remainingTimeSec, "About ") } func (tem *TimeEstimationManager) isTimeEstimationAvailable() bool { diff --git a/artifactory/commands/transferfiles/state/utils.go b/artifactory/commands/transferfiles/state/utils.go index 789c6cf46..6f87a5711 100644 --- a/artifactory/commands/transferfiles/state/utils.go +++ b/artifactory/commands/transferfiles/state/utils.go @@ -2,14 +2,15 @@ package state import ( "fmt" - "github.com/jfrog/build-info-go/utils" - "github.com/jfrog/jfrog-cli-core/v2/utils/coreutils" - "github.com/jfrog/jfrog-client-go/utils/errorutils" - "github.com/jfrog/jfrog-client-go/utils/io/fileutils" "path/filepath" "strconv" "strings" "time" + + "github.com/jfrog/build-info-go/utils" + "github.com/jfrog/jfrog-cli-core/v2/utils/coreutils" + "github.com/jfrog/jfrog-client-go/utils/errorutils" + "github.com/jfrog/jfrog-client-go/utils/io/fileutils" ) const ( @@ -36,9 +37,9 @@ func ConvertTimeToEpochMilliseconds(timeToConvert time.Time) string { return strconv.FormatInt(timeToConvert.UnixMilli(), 10) } -// secondsToLiteralTime converts a number of seconds to an easy-to-read string. +// SecondsToLiteralTime converts a number of seconds to an easy-to-read string. // Prefix is not taken into account if the time is less than a minute. -func secondsToLiteralTime(secondsToConvert int64, prefix string) string { +func SecondsToLiteralTime(secondsToConvert int64, prefix string) string { daysTime := secondsToConvert / secondsInDay daysTimeInSecs := daysTime * secondsInDay hoursTime := (secondsToConvert - daysTimeInSecs) / secondsInHour diff --git a/artifactory/commands/transferfiles/state/utils_test.go b/artifactory/commands/transferfiles/state/utils_test.go index d8375f196..89fb980de 100644 --- a/artifactory/commands/transferfiles/state/utils_test.go +++ b/artifactory/commands/transferfiles/state/utils_test.go @@ -1,8 +1,9 @@ package state import ( - "github.com/stretchr/testify/assert" "testing" + + "github.com/stretchr/testify/assert" ) func TestSecondsToLiteralTime(t *testing.T) { @@ -32,7 +33,7 @@ func TestSecondsToLiteralTime(t *testing.T) { for _, testCase := range testCases { t.Run(testCase.name, func(t *testing.T) { - assert.Equal(t, testCase.expected, secondsToLiteralTime(testCase.secsToConvert, testCase.prefix)) + assert.Equal(t, testCase.expected, SecondsToLiteralTime(testCase.secsToConvert, testCase.prefix)) }) } } diff --git a/artifactory/commands/transferfiles/status.go b/artifactory/commands/transferfiles/status.go index 4917993c1..db95454cd 100644 --- a/artifactory/commands/transferfiles/status.go +++ b/artifactory/commands/transferfiles/status.go @@ -5,6 +5,7 @@ import ( "path/filepath" "strconv" "strings" + "time" "github.com/jfrog/jfrog-cli-core/v2/artifactory/commands/transferfiles/api" "github.com/jfrog/jfrog-cli-core/v2/artifactory/commands/transferfiles/state" @@ -23,7 +24,7 @@ func ShowStatus() error { return err } if !isRunning { - addString(&output, "🔴", "Status", "Not running", 0, coreutils.IsWindows()) + addString(&output, "🔴", "Status", "Not running", 0) log.Output(output.String()) return nil } @@ -32,7 +33,7 @@ func ShowStatus() error { return err } if isStopping { - addString(&output, "🟡", "Status", "Stopping", 0, coreutils.IsWindows()) + addString(&output, "🟡", "Status", "Stopping", 0) log.Output(output.String()) return nil } @@ -54,6 +55,7 @@ func ShowStatus() error { output.WriteString("\n") setRepositoryStatus(stateManager, &output) } + addStaleChunks(stateManager, &output) log.Output(output.String()) return nil } @@ -68,20 +70,19 @@ func isStopping() (bool, error) { } func addOverallStatus(stateManager *state.TransferStateManager, output *strings.Builder, runningTime string) { - windows := coreutils.IsWindows() addTitle(output, "Overall Transfer Status") - addString(output, coreutils.RemoveEmojisIfNonSupportedTerminal("🟢"), "Status", "Running", 3, windows) - addString(output, "🏃", "Running for", runningTime, 3, windows) - addString(output, "🗄 ", "Storage", sizeToString(stateManager.OverallTransfer.TransferredSizeBytes)+" / "+sizeToString(stateManager.OverallTransfer.TotalSizeBytes)+calcPercentageInt64(stateManager.OverallTransfer.TransferredSizeBytes, stateManager.OverallTransfer.TotalSizeBytes), 3, windows) - addString(output, "📦", "Repositories", fmt.Sprintf("%d / %d", stateManager.TotalRepositories.TransferredUnits, stateManager.TotalRepositories.TotalUnits)+calcPercentageInt64(stateManager.TotalRepositories.TransferredUnits, stateManager.TotalRepositories.TotalUnits), 2, windows) - addString(output, "🧵", "Working threads", strconv.Itoa(stateManager.WorkingThreads), 2, windows) - addString(output, "⚡", "Transfer speed", stateManager.GetSpeedString(), 2, windows) - addString(output, "⌛", "Estimated time remaining", stateManager.GetEstimatedRemainingTimeString(), 1, windows) + addString(output, coreutils.RemoveEmojisIfNonSupportedTerminal("🟢"), "Status", "Running", 3) + addString(output, "🏃", "Running for", runningTime, 3) + addString(output, "🗄 ", "Storage", sizeToString(stateManager.OverallTransfer.TransferredSizeBytes)+" / "+sizeToString(stateManager.OverallTransfer.TotalSizeBytes)+calcPercentageInt64(stateManager.OverallTransfer.TransferredSizeBytes, stateManager.OverallTransfer.TotalSizeBytes), 3) + addString(output, "📦", "Repositories", fmt.Sprintf("%d / %d", stateManager.TotalRepositories.TransferredUnits, stateManager.TotalRepositories.TotalUnits)+calcPercentageInt64(stateManager.TotalRepositories.TransferredUnits, stateManager.TotalRepositories.TotalUnits), 2) + addString(output, "🧵", "Working threads", strconv.Itoa(stateManager.WorkingThreads), 2) + addString(output, "⚡", "Transfer speed", stateManager.GetSpeedString(), 2) + addString(output, "⌛", "Estimated time remaining", stateManager.GetEstimatedRemainingTimeString(), 1) failureTxt := strconv.FormatUint(uint64(stateManager.TransferFailures), 10) if stateManager.TransferFailures > 0 { failureTxt += " (" + "In Phase 3 and in subsequent executions, we'll retry transferring the failed files." + ")" } - addString(output, "❌", "Transfer failures", failureTxt, 2, windows) + addString(output, "❌", "Transfer failures", failureTxt, 2) } func calcPercentageInt64(transferred, total int64) string { @@ -92,21 +93,41 @@ func calcPercentageInt64(transferred, total int64) string { } func setRepositoryStatus(stateManager *state.TransferStateManager, output *strings.Builder) { - windows := coreutils.IsWindows() addTitle(output, "Current Repository Status") - addString(output, "🏷 ", "Name", stateManager.CurrentRepoKey, 2, windows) + addString(output, "🏷 ", "Name", stateManager.CurrentRepoKey, 2) currentRepo := stateManager.CurrentRepo switch stateManager.CurrentRepoPhase { case api.Phase1, api.Phase3: if stateManager.CurrentRepoPhase == api.Phase1 { - addString(output, "🔢", "Phase", "Transferring all files in the repository (1/3)", 2, windows) + addString(output, "🔢", "Phase", "Transferring all files in the repository (1/3)", 2) } else { - addString(output, "🔢", "Phase", "Retrying transfer failures (3/3)", 2, windows) + addString(output, "🔢", "Phase", "Retrying transfer failures (3/3)", 2) } - addString(output, "🗄 ", "Storage", sizeToString(currentRepo.Phase1Info.TransferredSizeBytes)+" / "+sizeToString(currentRepo.Phase1Info.TotalSizeBytes)+calcPercentageInt64(currentRepo.Phase1Info.TransferredSizeBytes, currentRepo.Phase1Info.TotalSizeBytes), 2, windows) - addString(output, "📄", "Files", fmt.Sprintf("%d / %d", currentRepo.Phase1Info.TransferredUnits, currentRepo.Phase1Info.TotalUnits)+calcPercentageInt64(currentRepo.Phase1Info.TransferredUnits, currentRepo.Phase1Info.TotalUnits), 2, windows) + addString(output, "🗄 ", "Storage", sizeToString(currentRepo.Phase1Info.TransferredSizeBytes)+" / "+sizeToString(currentRepo.Phase1Info.TotalSizeBytes)+calcPercentageInt64(currentRepo.Phase1Info.TransferredSizeBytes, currentRepo.Phase1Info.TotalSizeBytes), 2) + addString(output, "📄", "Files", fmt.Sprintf("%d / %d", currentRepo.Phase1Info.TransferredUnits, currentRepo.Phase1Info.TotalUnits)+calcPercentageInt64(currentRepo.Phase1Info.TransferredUnits, currentRepo.Phase1Info.TotalUnits), 2) case api.Phase2: - addString(output, "🔢", "Phase", "Transferring newly created and modified files (2/3)", 2, windows) + addString(output, "🔢", "Phase", "Transferring newly created and modified files (2/3)", 2) + } +} + +func addStaleChunks(stateManager *state.TransferStateManager, output *strings.Builder) { + if len(stateManager.StaleChunks) == 0 { + return + } + output.WriteString("\n") + addTitle(output, "File Chunks in Transit for More than 30 Minutes") + + for _, nodeStaleChunks := range stateManager.StaleChunks { + addString(output, "🏷️ ", "Node ID", nodeStaleChunks.NodeID, 1) + for _, staleChunks := range nodeStaleChunks.Chunks { + addString(output, " 🏷️ ", "Chunk ID", staleChunks.ChunkID, 1) + sent := time.Unix(staleChunks.Sent, 0) + runningSecs := int64(time.Since(sent).Seconds()) + addString(output, " ⏱️ ", "Sent", sent.Format(time.DateTime)+" ("+state.SecondsToLiteralTime(runningSecs, "")+")", 1) + for _, file := range staleChunks.Files { + output.WriteString("\t\t📄 " + file + "\n") + } + } } } @@ -114,13 +135,13 @@ func addTitle(output *strings.Builder, title string) { output.WriteString(coreutils.PrintBoldTitle(title + "\n")) } -func addString(output *strings.Builder, emoji, key, value string, tabsCount int, windows bool) { +func addString(output *strings.Builder, emoji, key, value string, tabsCount int) { indentation := strings.Repeat("\t", tabsCount) if indentation == "" { indentation = " " } if len(emoji) > 0 { - if windows { + if coreutils.IsWindows() { emoji = "●" } emoji += " " diff --git a/artifactory/commands/transferfiles/status_test.go b/artifactory/commands/transferfiles/status_test.go index 8cb2f0983..f1c7226ab 100644 --- a/artifactory/commands/transferfiles/status_test.go +++ b/artifactory/commands/transferfiles/status_test.go @@ -3,6 +3,7 @@ package transferfiles import ( "bytes" "testing" + "time" "github.com/jfrog/build-info-go/utils" "github.com/jfrog/jfrog-cli-core/v2/artifactory/commands/transferfiles/api" @@ -53,7 +54,7 @@ func TestShowStatus(t *testing.T) { defer cleanUp() // Create state manager and persist to file system - createStateManager(t, api.Phase1, false) + createStateManager(t, api.Phase1, false, false) // Run show status and check output assert.NoError(t, ShowStatus()) @@ -83,7 +84,7 @@ func TestShowStatusDiffPhase(t *testing.T) { defer cleanUp() // Create state manager and persist to file system - createStateManager(t, api.Phase2, false) + createStateManager(t, api.Phase2, false, false) // Run show status and check output assert.NoError(t, ShowStatus()) @@ -113,7 +114,7 @@ func TestShowBuildInfoRepo(t *testing.T) { defer cleanUp() // Create state manager and persist to file system - createStateManager(t, api.Phase3, true) + createStateManager(t, api.Phase3, true, false) // Run show status and check output assert.NoError(t, ShowStatus()) @@ -138,10 +139,30 @@ func TestShowBuildInfoRepo(t *testing.T) { assert.Contains(t, results, "Files: 500 / 10000 (5.0%)") } +func TestShowStaleChunks(t *testing.T) { + buffer, cleanUp := initStatusTest(t) + defer cleanUp() + + // Create state manager and persist to file system + createStateManager(t, api.Phase1, false, true) + + // Run show status and check output + assert.NoError(t, ShowStatus()) + results := buffer.String() + + // Check stale chunks + assert.Contains(t, results, "File Chunks in Transit for More than 30 Minutes") + assert.Contains(t, results, "Node ID:\tnode-id-1") + assert.Contains(t, results, "Sent:\t") + assert.Contains(t, results, "(31 minutes)") + assert.Contains(t, results, "a/b/c") + assert.Contains(t, results, "d/e/f") +} + // Create state manager and persist in the file system. // t - The testing object // phase - Phase ID -func createStateManager(t *testing.T, phase int, buildInfoRepo bool) { +func createStateManager(t *testing.T, phase int, buildInfoRepo bool, staleChunks bool) { stateManager, err := state.NewTransferStateManager(false) assert.NoError(t, err) assert.NoError(t, stateManager.TryLockTransferStateManager()) @@ -159,6 +180,19 @@ func createStateManager(t *testing.T, phase int, buildInfoRepo bool) { stateManager.TimeEstimationManager.LastSpeedsSum = 12 stateManager.TimeEstimationManager.SpeedsAverage = 12 + if staleChunks { + stateManager.StaleChunks = append(stateManager.StaleChunks, state.StaleChunks{ + NodeID: staleChunksNodeIdOne, + Chunks: []state.StaleChunk{ + { + ChunkID: staleChunksChunkId, + Sent: time.Now().Add(-time.Minute * 31).Unix(), + Files: []string{"a/b/c", "d/e/f"}, + }, + }, + }) + } + // Increment transferred size and files. This action also persists the run status. assert.NoError(t, stateManager.IncTransferredSizeAndFilesPhase1(500, 5000)) diff --git a/artifactory/commands/transferfiles/transfer.go b/artifactory/commands/transferfiles/transfer.go index e6cddf974..c597a85f4 100644 --- a/artifactory/commands/transferfiles/transfer.go +++ b/artifactory/commands/transferfiles/transfer.go @@ -520,11 +520,13 @@ func (tdc *TransferFilesCommand) getAllLocalRepos(serverDetails *config.ServerDe if err != nil { return []string{}, []string{}, err } - localRepos, err := utils.GetFilteredRepositoriesByNameAndType(serviceManager, tdc.includeReposPatterns, tdc.excludeReposPatterns, utils.Local) + excludeRepoPatternsWithBuildInfo := tdc.excludeReposPatterns + excludeRepoPatternsWithBuildInfo = append(excludeRepoPatternsWithBuildInfo, "*-build-info") + localRepos, err := utils.GetFilteredRepositoriesByNameAndType(serviceManager, tdc.includeReposPatterns, excludeRepoPatternsWithBuildInfo, utils.Local) if err != nil { return []string{}, []string{}, err } - federatedRepos, err := utils.GetFilteredRepositoriesByNameAndType(serviceManager, tdc.includeReposPatterns, tdc.excludeReposPatterns, utils.Federated) + federatedRepos, err := utils.GetFilteredRepositoriesByNameAndType(serviceManager, tdc.includeReposPatterns, excludeRepoPatternsWithBuildInfo, utils.Federated) if err != nil { return []string{}, []string{}, err } diff --git a/artifactory/commands/transferfiles/transfer_test.go b/artifactory/commands/transferfiles/transfer_test.go index b388a86cd..e940d19b9 100644 --- a/artifactory/commands/transferfiles/transfer_test.go +++ b/artifactory/commands/transferfiles/transfer_test.go @@ -339,7 +339,8 @@ func TestGetAllLocalRepositories(t *testing.T) { case "/api/repositories?type=federated&packageType=": // Response for GetWithFilter w.WriteHeader(http.StatusOK) - response := &[]services.RepositoryDetails{{Key: "federated-repo-1"}, {Key: "federated-repo-2"}} + // We add a build info repository to the response to cover cases whereby a federated build-info repository is returned + response := &[]services.RepositoryDetails{{Key: "federated-repo-1"}, {Key: "federated-repo-2"}, {Key: "proj-build-info"}} bytes, err := json.Marshal(response) assert.NoError(t, err) _, err = w.Write(bytes) diff --git a/artifactory/commands/transferfiles/utils.go b/artifactory/commands/transferfiles/utils.go index f33283bf6..668bebc1d 100644 --- a/artifactory/commands/transferfiles/utils.go +++ b/artifactory/commands/transferfiles/utils.go @@ -7,6 +7,7 @@ import ( "fmt" "io" "os" + "path" "path/filepath" "strconv" "strings" @@ -92,6 +93,37 @@ func (clcm *ChunksLifeCycleManager) GetInProgressTokensSliceByNodeId(nodeId node return inProgressTokens } +// Save in the TransferRunStatus the chunks that have been in transit for more than 30 minutes. +// This allows them to be displayed using the '--status' option. +// stateManager - Transfer state manager +func (clcm *ChunksLifeCycleManager) StoreStaleChunks(stateManager *state.TransferStateManager) error { + var staleChunks []state.StaleChunks + for nodeId, chunkIdToData := range clcm.nodeToChunksMap { + staleNodeChunks := state.StaleChunks{NodeID: string(nodeId)} + for chunkId, uploadedChunkData := range chunkIdToData { + if time.Since(uploadedChunkData.TimeSent).Hours() < 0.5 { + continue + } + staleNodeChunk := state.StaleChunk{ + ChunkID: string(chunkId), + Sent: uploadedChunkData.TimeSent.Unix(), + } + for _, file := range uploadedChunkData.ChunkFiles { + var sizeStr string + if file.Size > 0 { + sizeStr = " (" + utils.ConvertIntToStorageSizeString(file.Size) + ")" + } + staleNodeChunk.Files = append(staleNodeChunk.Files, path.Join(file.Repo, file.Path, file.Name)+sizeStr) + } + staleNodeChunks.Chunks = append(staleNodeChunks.Chunks, staleNodeChunk) + } + if len(staleNodeChunks.Chunks) > 0 { + staleChunks = append(staleChunks, staleNodeChunks) + } + } + return stateManager.SetStaleChunks(staleChunks) +} + type InterruptionErr struct{} func (m *InterruptionErr) Error() string { @@ -682,3 +714,21 @@ func getErrorOrDelayFiles(repoKeys []string, getDirPathFunc func(string) (string } return } + +// Increments index until the file path is unique. +func getUniqueErrorOrDelayFilePath(dirPath string, getFileNamePrefix func() string) (delayFilePath string, err error) { + var exists bool + index := 0 + for { + delayFilePath = filepath.Join(dirPath, fmt.Sprintf("%s-%d.json", getFileNamePrefix(), index)) + exists, err = fileutils.IsFileExists(delayFilePath, false) + if err != nil { + return "", err + } + if !exists { + break + } + index++ + } + return +} diff --git a/artifactory/commands/transferfiles/utils_test.go b/artifactory/commands/transferfiles/utils_test.go index f5b2d8e90..0f38b2100 100644 --- a/artifactory/commands/transferfiles/utils_test.go +++ b/artifactory/commands/transferfiles/utils_test.go @@ -8,9 +8,13 @@ import ( "net/http" "net/http/httptest" "os" + "strconv" "strings" "testing" + "time" + "github.com/jfrog/jfrog-cli-core/v2/artifactory/commands/transferfiles/api" + "github.com/jfrog/jfrog-cli-core/v2/artifactory/commands/transferfiles/state" "github.com/jfrog/jfrog-cli-core/v2/utils/config" "github.com/jfrog/jfrog-cli-core/v2/utils/tests" "github.com/jfrog/jfrog-client-go/artifactory/services" @@ -40,8 +44,16 @@ const runningNodesResponse = ` } ` +const ( + staleChunksNodeIdOne = "node-id-1" + staleChunksNodeIdTwo = "node-id-2" + staleChunksChunkId = "chunk-id" + staleChunksPath = "path-in-repo" + staleChunksName = "file-name" +) + func TestGetRunningNodes(t *testing.T) { - testServer, serverDetails, _ := createMockServer(t, func(w http.ResponseWriter, r *http.Request) { + testServer, serverDetails, _ := createMockServer(t, func(w http.ResponseWriter, _ *http.Request) { w.WriteHeader(http.StatusOK) _, err := w.Write([]byte(runningNodesResponse)) assert.NoError(t, err) @@ -56,7 +68,7 @@ func TestGetRunningNodes(t *testing.T) { func TestStopTransferOnArtifactoryNodes(t *testing.T) { stoppedNodeOne, stoppedNodeTwo := false, false requestNumber := 0 - testServer, _, srcUpService := createMockServer(t, func(w http.ResponseWriter, r *http.Request) { + testServer, _, srcUpService := createMockServer(t, func(w http.ResponseWriter, _ *http.Request) { w.WriteHeader(http.StatusOK) var nodeId string if requestNumber == 0 { @@ -244,6 +256,109 @@ func TestInterruptIfRequested(t *testing.T) { assert.Equal(t, os.Interrupt, actualSignal) } +func TestStoreStaleChunksEmpty(t *testing.T) { + // Init state manager + stateManager, cleanUp := state.InitStateTest(t) + defer cleanUp() + + // Store empty stale chunks + chunksLifeCycleManager := ChunksLifeCycleManager{ + nodeToChunksMap: make(map[nodeId]map[api.ChunkId]UploadedChunkData), + } + assert.NoError(t, chunksLifeCycleManager.StoreStaleChunks(stateManager)) + + // Make sure no chunks + staleChunks, err := stateManager.GetStaleChunks() + assert.NoError(t, err) + assert.Empty(t, staleChunks) +} + +func TestStoreStaleChunksNoStale(t *testing.T) { + // Init state manager + stateManager, cleanUp := state.InitStateTest(t) + defer cleanUp() + + // Store chunk that is not stale + chunksLifeCycleManager := ChunksLifeCycleManager{ + nodeToChunksMap: map[nodeId]map[api.ChunkId]UploadedChunkData{ + staleChunksNodeIdOne: { + staleChunksChunkId: { + TimeSent: time.Now().Add(-time.Minute), + ChunkFiles: []api.FileRepresentation{{Repo: repo1Key, Path: staleChunksPath, Name: staleChunksName}}, + }, + }, + }, + } + assert.NoError(t, chunksLifeCycleManager.StoreStaleChunks(stateManager)) + + // Make sure no chunks + staleChunks, err := stateManager.GetStaleChunks() + assert.NoError(t, err) + assert.Empty(t, staleChunks) +} + +func TestStoreStaleChunksStale(t *testing.T) { + // Init state manager + stateManager, cleanUp := state.InitStateTest(t) + defer cleanUp() + + // Store stale chunk + sent := time.Now().Add(-time.Hour) + chunksLifeCycleManager := ChunksLifeCycleManager{ + nodeToChunksMap: map[nodeId]map[api.ChunkId]UploadedChunkData{ + staleChunksNodeIdOne: { + staleChunksChunkId: { + TimeSent: sent, + ChunkFiles: []api.FileRepresentation{{Repo: repo1Key, Path: staleChunksPath, Name: staleChunksName, Size: 100}}, + }, + }, + }, + } + assert.NoError(t, chunksLifeCycleManager.StoreStaleChunks(stateManager)) + + // Make sure the stale chunk was stored in the state + staleChunks, err := stateManager.GetStaleChunks() + assert.NoError(t, err) + assert.Len(t, staleChunks, 1) + assert.Equal(t, staleChunksNodeIdOne, staleChunks[0].NodeID) + assert.Len(t, staleChunks[0].Chunks, 1) + assert.Equal(t, staleChunksChunkId, staleChunks[0].Chunks[0].ChunkID) + assert.Equal(t, sent.Unix(), staleChunks[0].Chunks[0].Sent) + assert.Len(t, staleChunks[0].Chunks[0].Files, 1) + assert.Equal(t, fmt.Sprintf("%s/%s/%s (0.1KB)", repo1Key, staleChunksPath, staleChunksName), staleChunks[0].Chunks[0].Files[0]) +} + +func TestStoreStaleChunksTwoNodes(t *testing.T) { + // Init state manager + stateManager, cleanUp := state.InitStateTest(t) + defer cleanUp() + + // Store 1 stale chunk and 1 non-stale chunk + chunksLifeCycleManager := ChunksLifeCycleManager{ + nodeToChunksMap: map[nodeId]map[api.ChunkId]UploadedChunkData{ + staleChunksNodeIdOne: { + staleChunksChunkId: { + TimeSent: time.Now().Add(-time.Hour), // Older than 0.5 hours + ChunkFiles: []api.FileRepresentation{{Repo: repo1Key, Path: staleChunksPath, Name: staleChunksName, Size: 1024}}, + }, + }, + staleChunksNodeIdTwo: { + staleChunksChunkId: { + TimeSent: time.Now(), // Less than 0.5 hours + ChunkFiles: []api.FileRepresentation{{Repo: repo2Key, Path: staleChunksPath, Name: staleChunksName, Size: 0}}, + }, + }, + }, + } + assert.NoError(t, chunksLifeCycleManager.StoreStaleChunks(stateManager)) + + // Make sure only the stale chunk was stored in the state + staleChunks, err := stateManager.GetStaleChunks() + assert.NoError(t, err) + assert.Len(t, staleChunks, 1) + assert.Equal(t, staleChunksNodeIdOne, staleChunks[0].NodeID) +} + // Create mock server to test transfer config commands // t - The testing object // testHandler - The HTTP handler of the test @@ -255,3 +370,24 @@ func createMockServer(t *testing.T, testHandler transferFilesHandler) (*httptest assert.NoError(t, err) return testServer, serverDetails, serviceManager } + +func TestGetUniqueErrorOrDelayFilePath(t *testing.T) { + tmpDir, err := os.MkdirTemp("", "unique_file_path_test") + assert.NoError(t, err) + + createUniqueFileAndAssertCounter(t, tmpDir, "prefix", 0) + // A file with 0 already exists, so new counter should be 1. + createUniqueFileAndAssertCounter(t, tmpDir, "prefix", 1) + // Unique prefix, so counter should be 0. + createUniqueFileAndAssertCounter(t, tmpDir, "new", 0) + +} + +func createUniqueFileAndAssertCounter(t *testing.T, tmpDir, prefix string, expectedCounter int) { + filePath, err := getUniqueErrorOrDelayFilePath(tmpDir, func() string { + return prefix + }) + assert.NoError(t, err) + assert.NoError(t, os.WriteFile(filePath, nil, 0644)) + assert.True(t, strings.HasSuffix(filePath, strconv.Itoa(expectedCounter)+".json")) +} diff --git a/artifactory/commands/transferinstall/datatransferinstall.go b/artifactory/commands/transferinstall/datatransferinstall.go index 499d29613..0c4ac6371 100644 --- a/artifactory/commands/transferinstall/datatransferinstall.go +++ b/artifactory/commands/transferinstall/datatransferinstall.go @@ -10,7 +10,6 @@ import ( "github.com/jfrog/jfrog-client-go/utils/errorutils" "github.com/jfrog/jfrog-client-go/utils/io/fileutils" "github.com/jfrog/jfrog-client-go/utils/log" - "github.com/pkg/errors" "net/http" "net/url" "os" @@ -37,9 +36,9 @@ var ( originalDirPath = FileItem{"etc", "plugins"} v7DirPath = FileItem{"var", "etc", "artifactory", "plugins"} // Error types - notValidDestinationErr = errors.Errorf("can't find the directory in which to install the data-transfer plugin. Please ensure you're running this command on the machine on which Artifactory is installed. You can also use the --home-dir option to specify the directory.") + notValidDestinationErr = fmt.Errorf("can't find the directory in which to install the data-transfer plugin. Please ensure you're running this command on the machine on which Artifactory is installed. You can also use the --home-dir option to specify the directory.") downloadConnectionErr = func(baseUrl, fileName, err string) error { - return errors.Errorf("Could not download the plugin file - '%s' from '%s' due to the following error: '%s'. If this machine has no network access to the download URL, you can download these files from another machine and place them in a directory on this machine. You can then run this command again with the --dir command option, with the directory containing the files as the value.", fileName, baseUrl, err) + return fmt.Errorf("Could not download the plugin file - '%s' from '%s' due to the following error: '%s'. If this machine has no network access to the download URL, you can download these files from another machine and place them in a directory on this machine. You can then run this command again with the --dir command option, with the directory containing the files as the value.", fileName, baseUrl, err) } // Plugin files transferPluginFiles = PluginFiles{ @@ -217,8 +216,9 @@ func (idtp *InstallDataTransferPluginCommand) getPluginDirDestination() (target // Flag override if idtp.localJFrogHomePath != "" { - log.Debug(fmt.Sprintf("Searching for the 'plugins' directory in the JFrog home directory '%s'.", idtp.localJFrogHomePath)) - if exists, target, err = idtp.transferManger.findDestination(idtp.localJFrogHomePath); err != nil || exists { + jfrogHomeDir := strings.TrimSpace(idtp.localJFrogHomePath) + log.Debug(fmt.Sprintf("Searching for the 'plugins' directory in the JFrog home directory '%s'.", jfrogHomeDir)) + if exists, target, err = idtp.transferManger.findDestination(jfrogHomeDir); err != nil || exists { return } if !exists { @@ -228,8 +228,9 @@ func (idtp *InstallDataTransferPluginCommand) getPluginDirDestination() (target } // Environment variable override if envVal, exists = os.LookupEnv(jfrogHomeEnvVar); exists { - log.Debug(fmt.Sprintf("Searching for the 'plugins' directory in the JFrog home directory '%s' retrieved from the '%s' environment variable.", envVal, jfrogHomeEnvVar)) - if exists, target, err = idtp.transferManger.findDestination(envVal); err != nil || exists { + jfrogHomeDir := strings.TrimSpace(envVal) + log.Debug(fmt.Sprintf("Searching for the 'plugins' directory in the JFrog home directory '%s' retrieved from the '%s' environment variable.", jfrogHomeDir, jfrogHomeEnvVar)) + if exists, target, err = idtp.transferManger.findDestination(jfrogHomeDir); err != nil || exists { return } } diff --git a/artifactory/commands/utils/checkrunner_test.go b/artifactory/commands/utils/checkrunner_test.go index 66a2cba3b..22533f1cf 100644 --- a/artifactory/commands/utils/checkrunner_test.go +++ b/artifactory/commands/utils/checkrunner_test.go @@ -2,7 +2,7 @@ package utils import ( "context" - "github.com/pkg/errors" + "fmt" "github.com/stretchr/testify/assert" "testing" ) @@ -18,7 +18,7 @@ func TestChecks(t *testing.T) { func TestRunChecks(t *testing.T) { // Init - expectedErr := errors.Errorf("CHECK_ERROR") + expectedErr := fmt.Errorf("CHECK_ERROR") nSuccess := 3 nFail := 2 runner := NewPreChecksRunner() diff --git a/artifactory/commands/utils/configfile.go b/artifactory/commands/utils/configfile.go index e6fe1751d..146356d84 100644 --- a/artifactory/commands/utils/configfile.go +++ b/artifactory/commands/utils/configfile.go @@ -15,7 +15,7 @@ import ( "github.com/urfave/cli" "golang.org/x/text/cases" "golang.org/x/text/language" - "gopkg.in/yaml.v2" + "gopkg.in/yaml.v3" ) const BuildConfVersion = 1 diff --git a/artifactory/commands/utils/questionnaire.go b/artifactory/commands/utils/questionnaire.go index c28347be2..ea48069af 100644 --- a/artifactory/commands/utils/questionnaire.go +++ b/artifactory/commands/utils/questionnaire.go @@ -11,7 +11,7 @@ import ( ) const ( - InsertValuePromptMsg = "Insert the value for " + insertValuePromptMsg = "Insert the value for " DummyDefaultAnswer = "-" ) @@ -22,7 +22,7 @@ const ( // * We will ask all the questions in MandatoryQuestionsKeys list one after the other. // 2. Optional questions: // * We have to provide a slice of prompt.Suggest, in which each suggest.Text is a key of a question in the map. -// * After a suggest was chosen from the list, the corresponding question from the map will be asked. +// * After a suggestion was chosen from the list, the corresponding question from the map will be asked. // * Each answer is written to the configMap using its writer, under the MapKey specified in the questionInfo. // * We will execute the previous step until the SaveAndExit string was inserted. type InteractiveQuestionnaire struct { @@ -316,13 +316,23 @@ func OptionalKeyCallback(iq *InteractiveQuestionnaire, key string) (value string if key != SaveAndExit { valueQuestion := iq.QuestionsMap[key] // Since we are using default question in most of the cases we set the map key here. - valueQuestion.MapKey = key - valueQuestion.PromptPrefix = InsertValuePromptMsg + key - if valueQuestion.Options != nil { - valueQuestion.PromptPrefix += PressTabMsg + if valueQuestion.MapKey == "" { + valueQuestion.MapKey = key } - valueQuestion.PromptPrefix += " >" + editOptionalQuestionPromptPrefix(&valueQuestion, key) value, err = iq.AskQuestion(valueQuestion) } return value, err } + +func editOptionalQuestionPromptPrefix(question *QuestionInfo, key string) { + if question.PromptPrefix == "" { + question.PromptPrefix = insertValuePromptMsg + key + } + if question.Options != nil { + question.PromptPrefix += PressTabMsg + } + if !strings.HasSuffix(question.PromptPrefix, " >") { + question.PromptPrefix += " >" + } +} diff --git a/artifactory/commands/utils/templateutils.go b/artifactory/commands/utils/templateutils.go index 4607c86ec..db54f833f 100644 --- a/artifactory/commands/utils/templateutils.go +++ b/artifactory/commands/utils/templateutils.go @@ -10,7 +10,7 @@ import ( "github.com/jfrog/jfrog-client-go/utils/errorutils" ) -const PathErrorSuffixMsg = " please enter a path, in which the new template file will be created" +const pathErrorSuffixMsg = " please enter a path, in which the new template file will be created" type TemplateUserCommand interface { // Returns the file path. @@ -52,14 +52,14 @@ func ValidateTemplatePath(templatePath string) error { return errorutils.CheckError(err) } if exists || strings.HasSuffix(templatePath, string(os.PathSeparator)) { - return errorutils.CheckErrorf("path cannot be a directory," + PathErrorSuffixMsg) + return errorutils.CheckErrorf("path cannot be a directory," + pathErrorSuffixMsg) } exists, err = fileutils.IsFileExists(templatePath, false) if err != nil { return errorutils.CheckError(err) } if exists { - return errorutils.CheckErrorf("file already exists," + PathErrorSuffixMsg) + return errorutils.CheckErrorf("file already exists," + pathErrorSuffixMsg) } return nil } diff --git a/artifactory/commands/utils/transferconfigbase.go b/artifactory/commands/utils/transferconfigbase.go index 133776982..6f88adda6 100644 --- a/artifactory/commands/utils/transferconfigbase.go +++ b/artifactory/commands/utils/transferconfigbase.go @@ -2,11 +2,15 @@ package utils import ( "encoding/json" + "errors" "fmt" + "strings" + "github.com/jfrog/gofrog/datastructures" "github.com/jfrog/jfrog-cli-core/v2/artifactory/utils" "github.com/jfrog/jfrog-cli-core/v2/utils/config" "github.com/jfrog/jfrog-cli-core/v2/utils/coreutils" + "github.com/jfrog/jfrog-client-go/access" "github.com/jfrog/jfrog-client-go/artifactory" clientUtils "github.com/jfrog/jfrog-client-go/utils" "github.com/jfrog/jfrog-client-go/utils/errorutils" @@ -16,8 +20,9 @@ import ( ) const ( - defaultAdminUsername = "admin" - defaultAdminPassword = "password" + MinJFrogProjectsArtifactoryVersion = "7.0.0" + defaultAdminUsername = "admin" + defaultAdminPassword = "password" ) type TransferConfigBase struct { @@ -25,6 +30,8 @@ type TransferConfigBase struct { TargetServerDetails *config.ServerDetails SourceArtifactoryManager artifactory.ArtifactoryServicesManager TargetArtifactoryManager artifactory.ArtifactoryServicesManager + SourceAccessManager *access.AccessServicesManager + TargetAccessManager *access.AccessServicesManager IncludeReposPatterns []string ExcludeReposPatterns []string FederatedMembersRemoved bool @@ -55,12 +62,35 @@ func (tcb *TransferConfigBase) GetRepoFilter() *utils.IncludeExcludeFilter { } func (tcb *TransferConfigBase) CreateServiceManagers(dryRun bool) (err error) { - tcb.SourceArtifactoryManager, err = utils.CreateServiceManager(tcb.SourceServerDetails, -1, 0, dryRun) - if err != nil { - return err + if tcb.SourceArtifactoryManager, err = utils.CreateServiceManager(tcb.SourceServerDetails, -1, 0, dryRun); err != nil { + return + } + if tcb.TargetArtifactoryManager, err = utils.CreateServiceManager(tcb.TargetServerDetails, -1, 0, dryRun); err != nil { + return + } + if tcb.SourceAccessManager, err = utils.CreateAccessServiceManager(tcb.SourceServerDetails, false); err != nil { + return + } + if tcb.TargetAccessManager, err = utils.CreateAccessServiceManager(tcb.TargetServerDetails, false); err != nil { + return } - tcb.TargetArtifactoryManager, err = utils.CreateServiceManager(tcb.TargetServerDetails, -1, 0, dryRun) - return err + return +} + +// Make sure that the server is configured with a valid admin Access Token. +// serverDetails - The server to check +// accessManager - Access Manager to run ping +func (tcb *TransferConfigBase) ValidateAccessServerConnection(serverDetails *config.ServerDetails, accessManager *access.AccessServicesManager) error { + if serverDetails.Password != "" { + return errorutils.CheckErrorf("it looks like you configured the '%[1]s' instance with username and password.\n"+ + "This command can be used with admin Access Token only.\n"+ + "Please use the 'jf c edit %[1]s' command to configure the Access Token, and then re-run the command", serverDetails.ServerId) + } + + if _, err := accessManager.Ping(); err != nil { + return errors.Join(err, fmt.Errorf("the '%[1]s' instance Access Token is not valid. Please provide a valid access token by running the 'jf c edit %[1]s'", serverDetails.ServerId)) + } + return nil } // Make sure source and target Artifactory URLs are different. @@ -76,6 +106,11 @@ func (tcb *TransferConfigBase) ValidateDifferentServers() error { // Create a map between the repository types to the list of repositories to transfer. func (tcb *TransferConfigBase) GetSelectedRepositories() (map[utils.RepoType][]string, error) { + allTargetRepos, err := tcb.getAllTargetRepositories() + if err != nil { + return nil, err + } + result := make(map[utils.RepoType][]string, len(utils.RepoTypes)+1) sourceRepos, err := tcb.SourceArtifactoryManager.GetAllRepositories() if err != nil { @@ -87,6 +122,10 @@ func (tcb *TransferConfigBase) GetSelectedRepositories() (map[utils.RepoType][]s if shouldIncludeRepo, err := includeExcludeFilter.ShouldIncludeRepository(sourceRepo.Key); err != nil { return nil, err } else if shouldIncludeRepo { + if allTargetRepos.Exists(sourceRepo.Key) { + log.Info("Repository '" + sourceRepo.Key + "' already exists in the target Artifactory server. Skipping.") + continue + } repoType := utils.RepoTypeFromString(sourceRepo.Type) result[repoType] = append(result[repoType], sourceRepo.Key) } @@ -112,7 +151,7 @@ func (tcb *TransferConfigBase) DeactivateKeyEncryption() (reactivateKeyEncryptio func (tcb *TransferConfigBase) TransferRepositoriesToTarget(reposToTransfer map[utils.RepoType][]string, remoteRepositories []interface{}) (err error) { // Transfer remote repositories for i, remoteRepositoryName := range reposToTransfer[utils.Remote] { - if err = tcb.TargetArtifactoryManager.CreateRepositoryWithParams(remoteRepositories[i], remoteRepositoryName); err != nil { + if err = tcb.createRepositoryAndAssignToProject(remoteRepositories[i], remoteRepositoryName); err != nil { return } } @@ -132,6 +171,19 @@ func (tcb *TransferConfigBase) TransferRepositoriesToTarget(reposToTransfer map[ return tcb.transferVirtualRepositoriesToTarget(reposToTransfer[utils.Virtual]) } +// Get a set of all repositories in the target Artifactory server. +func (tcb *TransferConfigBase) getAllTargetRepositories() (*datastructures.Set[string], error) { + targetRepos, err := tcb.TargetArtifactoryManager.GetAllRepositories() + if err != nil { + return nil, err + } + allTargetRepos := datastructures.MakeSet[string]() + for _, targetRepo := range *targetRepos { + allTargetRepos.Add(targetRepo.Key) + } + return allTargetRepos, nil +} + // Transfer local, federated, unknown, or virtual repositories // reposToTransfer - Repositories names to transfer // repoType - Repository type @@ -146,7 +198,7 @@ func (tcb *TransferConfigBase) transferSpecificRepositoriesToTarget(reposToTrans return } } - if err = tcb.TargetArtifactoryManager.CreateRepositoryWithParams(params, repoKey); err != nil { + if err = tcb.createRepositoryAndAssignToProject(params, repoKey); err != nil { return } } @@ -174,7 +226,7 @@ func (tcb *TransferConfigBase) transferVirtualRepositoriesToTarget(reposToTransf // Create virtual repository without included repositories repositories := singleRepoParamsMap["repositories"] delete(singleRepoParamsMap, "repositories") - if err = tcb.TargetArtifactoryManager.CreateRepositoryWithParams(singleRepoParamsMap, repoKey); err != nil { + if err = tcb.createRepositoryAndAssignToProject(singleRepoParamsMap, repoKey); err != nil { return } @@ -265,14 +317,64 @@ func (tcb *TransferConfigBase) removeFederatedMembers(federatedRepoParams interf if _, exist := repoMap["members"]; exist { delete(repoMap, "members") tcb.FederatedMembersRemoved = true + } else { + return federatedRepoParams, nil + } + return MapToInterface(repoMap) +} + +// Create a repository in the target server and assign the repository to the required project, if any. +// repoParams - Repository parameters +// repoKey - Repository key +func (tcb *TransferConfigBase) createRepositoryAndAssignToProject(repoParams interface{}, repoKey string) (err error) { + var projectKey string + if repoParams, projectKey, err = removeProjectKeyIfNeeded(repoParams, repoKey); err != nil { + return + } + if projectKey != "" { + // Workaround - It's possible that the repository could be assigned to a project in the access.bootstrap.json. + // This is why we make sure to detach it before actually creating the repository. + // If the project isn't linked to the repository, an error might come up, but we ignore it because we can't + // be certain whether the repository was actually assigned to the project or not. + _ = tcb.TargetAccessManager.UnassignRepoFromProject(repoKey) + } + if err = tcb.TargetArtifactoryManager.CreateRepositoryWithParams(repoParams, repoKey); err != nil { + return + } + if projectKey != "" { + return tcb.TargetAccessManager.AssignRepoToProject(repoKey, projectKey, true) } - repoBytes, err := json.Marshal(repoMap) + return +} + +// Remove non-default project key from the repository parameters if existed and the repository key does not start with it. +// This is needed to allow creating repository assigned to projects so that the repository name is not starting with the project key prefix. +// Returns the updated repository params, the project key if removed and an error if any. +func removeProjectKeyIfNeeded(repoParams interface{}, repoKey string) (interface{}, string, error) { + var projectKey string + repoMap, err := InterfaceToMap(repoParams) if err != nil { - return nil, errorutils.CheckError(err) + return nil, "", err } - var response interface{} - err = json.Unmarshal(repoBytes, &response) - return response, errorutils.CheckError(err) + if value, exist := repoMap["projectKey"]; exist { + var ok bool + if projectKey, ok = value.(string); !ok { + return nil, "", errorutils.CheckErrorf("couldn't parse the 'projectKey' value '%v' of repository '%s'", value, repoKey) + } + if projectKey == "default" || strings.HasPrefix(repoKey, projectKey+"-") { + // The repository key is starting with the project key prefix: + // - + return repoParams, "", nil + } + delete(repoMap, "projectKey") + } else { + return repoParams, "", nil + } + response, err := MapToInterface(repoMap) + if err != nil { + return nil, "", err + } + return response, projectKey, errorutils.CheckError(err) } // During the transfer-config commands we remove federated members, if existed. @@ -296,3 +398,15 @@ func InterfaceToMap(jsonInterface interface{}) (map[string]interface{}, error) { err = errorutils.CheckError(json.Unmarshal(b, &newMap)) return newMap, err } + +// Convert the input map to JSON interface. +// mapToTransfer - Map of string to interface, such as repository name +func MapToInterface(mapToTransfer map[string]interface{}) (interface{}, error) { + repoBytes, err := json.Marshal(mapToTransfer) + if err != nil { + return nil, errorutils.CheckError(err) + } + var response interface{} + err = json.Unmarshal(repoBytes, &response) + return response, errorutils.CheckError(err) +} diff --git a/artifactory/commands/utils/transferconfigbase_test.go b/artifactory/commands/utils/transferconfigbase_test.go index 138f61200..1d0cdc7d0 100644 --- a/artifactory/commands/utils/transferconfigbase_test.go +++ b/artifactory/commands/utils/transferconfigbase_test.go @@ -16,6 +16,11 @@ import ( "github.com/stretchr/testify/assert" ) +const ( + repoKey = "repoKey" + projectKey = "test-proj" +) + var transferConfigTestDir = filepath.Join("testdata", "transferconfig") func TestIsDefaultCredentialsDefault(t *testing.T) { @@ -119,10 +124,10 @@ func TestValidateDifferentServers(t *testing.T) { } func TestGetSelectedRepositories(t *testing.T) { - testServer, serverDetails, _ := commonTests.CreateRtRestsMockServer(t, func(w http.ResponseWriter, r *http.Request) { + sourceTestServer, sourceServerDetails, _ := commonTests.CreateRtRestsMockServer(t, func(w http.ResponseWriter, _ *http.Request) { w.WriteHeader(http.StatusOK) repositories := &[]services.RepositoryDetails{ - {Key: "generic-local", Type: "local"}, {Key: "generic-local-filter", Type: "local"}, + {Key: "generic-local", Type: "local"}, {Key: "generic-local-filter", Type: "local"}, {Key: "generic-local-existed", Type: "local"}, {Key: "generic-remote", Type: "remote"}, {Key: "generic-filter-remote", Type: "remote"}, {Key: "generic-virtual", Type: "virtual"}, {Key: "filter-generic-virtual", Type: "virtual"}, {Key: "generic-federated", Type: "federated"}, {Key: "generic-federated-filter", Type: "federated"}, @@ -132,9 +137,18 @@ func TestGetSelectedRepositories(t *testing.T) { _, err = w.Write(reposBytes) assert.NoError(t, err) }) - defer testServer.Close() + defer sourceTestServer.Close() + targetTestServer, targetServerDetails, _ := commonTests.CreateRtRestsMockServer(t, func(w http.ResponseWriter, _ *http.Request) { + w.WriteHeader(http.StatusOK) + repositories := &[]services.RepositoryDetails{{Key: "generic-local-existed", Type: "local"}} + reposBytes, err := json.Marshal(repositories) + assert.NoError(t, err) + _, err = w.Write(reposBytes) + assert.NoError(t, err) + }) + defer targetTestServer.Close() - transferConfigBase := createTransferConfigBase(t, serverDetails, serverDetails) + transferConfigBase := createTransferConfigBase(t, sourceServerDetails, targetServerDetails) transferConfigBase.SetExcludeReposPatterns([]string{"*filter*"}) selectedRepos, err := transferConfigBase.GetSelectedRepositories() assert.NoError(t, err) @@ -262,6 +276,80 @@ func testDeactivateKeyEncryption(t *testing.T, wasEncrypted bool) { assert.Equal(t, reactivated, wasEncrypted) } +var removeProjectKeyCases = []struct { + repoKey string + projectKey string + expectedProjectKey string +}{ + {repoKey: repoKey, projectKey: "", expectedProjectKey: ""}, + {repoKey: repoKey, projectKey: "default", expectedProjectKey: ""}, + {repoKey: repoKey, projectKey: projectKey, expectedProjectKey: projectKey}, + {repoKey: projectKey + "-" + repoKey, projectKey: projectKey, expectedProjectKey: ""}, +} + +func TestRemoveProjectKey(t *testing.T) { + repoParams := services.NewLocalRepositoryBaseParams() + + for _, testCase := range removeProjectKeyCases { + t.Run(testCase.repoKey, func(t *testing.T) { + repoParams.ProjectKey = testCase.projectKey + actualRepoParams, actualProjectKey, err := removeProjectKeyIfNeeded(repoParams, testCase.repoKey) + assert.NoError(t, err) + assert.Equal(t, testCase.expectedProjectKey, actualProjectKey) + if testCase.expectedProjectKey == "" { + assert.Equal(t, repoParams, actualRepoParams) + } else { + assert.NotEqual(t, repoParams, actualRepoParams) + } + }) + } +} + +func TestRemoveProjectKeyIllegal(t *testing.T) { + type illegalRepoParamsStruct struct { + ProjectKey int `json:"projectKey"` + } + illegalRepoParams := &illegalRepoParamsStruct{ProjectKey: 7} + actualRepoParams, projectKey, err := removeProjectKeyIfNeeded(illegalRepoParams, repoKey) + assert.ErrorContains(t, err, "couldn't parse the 'projectKey' value '7' of repository 'repoKey'") + assert.Empty(t, projectKey) + assert.Nil(t, actualRepoParams) +} + +func TestCreateRepositoryAndAssignToProject(t *testing.T) { + projectUnassigned := false + repositoryCreated := false + projectAssigned := false + testServer, serverDetails, _ := commonTests.CreateRtRestsMockServer(t, func(w http.ResponseWriter, r *http.Request) { + switch r.RequestURI { + case "/access/api/v1/projects/_/attach/repositories/local-repo": + projectUnassigned = true + case "/api/repositories/local-repo": + repositoryCreated = true + body, err := io.ReadAll(r.Body) + assert.NoError(t, err) + _, exist := getRepoParamsMap(t, body)["projectKey"] + assert.False(t, exist) + case "/access/api/v1/projects/_/attach/repositories/local-repo/test-proj?force=true": + projectAssigned = true + default: + assert.Fail(t, "Unexpected request URI: "+r.RequestURI) + } + w.WriteHeader(http.StatusOK) + }) + defer testServer.Close() + transferConfigBase := createTransferConfigBase(t, serverDetails, serverDetails) + + repoParams := services.NewLocalRepositoryBaseParams() + repoParams.Key = "local-repo" + repoParams.ProjectKey = projectKey + err := transferConfigBase.createRepositoryAndAssignToProject(repoParams, repoParams.Key) + assert.NoError(t, err) + assert.True(t, projectUnassigned) + assert.True(t, repositoryCreated) + assert.True(t, projectAssigned) +} + func createTransferConfigBase(t *testing.T, sourceServerDetails, targetServerDetails *config.ServerDetails) *TransferConfigBase { transferConfigBase := NewTransferConfigBase(sourceServerDetails, targetServerDetails) assert.NoError(t, transferConfigBase.CreateServiceManagers(false)) diff --git a/artifactory/commands/yarn/yarn.go b/artifactory/commands/yarn/yarn.go index df940b8a8..2d46b6dde 100644 --- a/artifactory/commands/yarn/yarn.go +++ b/artifactory/commands/yarn/yarn.go @@ -22,9 +22,10 @@ import ( ) const ( - YarnrcFileName = ".yarnrc.yml" - YarnrcBackupFileName = "jfrog.yarnrc.backup" - NpmScopesConfigName = "npmScopes" + YarnrcFileName = ".yarnrc.yml" + YarnrcBackupFileName = "jfrog.yarnrc.backup" + NpmScopesConfigName = "npmScopes" + //#nosec G101 yarnNpmRegistryServerEnv = "YARN_NPM_REGISTRY_SERVER" yarnNpmAuthIndent = "YARN_NPM_AUTH_IDENT" yarnNpmAlwaysAuth = "YARN_NPM_ALWAYS_AUTH" diff --git a/artifactory/utils/container/buildinfo_test.go b/artifactory/utils/container/buildinfo_test.go index cb62c1d49..2b9bc5939 100644 --- a/artifactory/utils/container/buildinfo_test.go +++ b/artifactory/utils/container/buildinfo_test.go @@ -61,7 +61,7 @@ func TestManifestConfig(t *testing.T) { } func createManifestConfig() (map[string]*utils.ResultItem, string) { - config := make(map[string]*utils.ResultItem, 0) + config := make(map[string]*utils.ResultItem) config["manifest.json"] = dummySearchResults config["sha__123"] = dummySearchResults return config, "sha:123" @@ -73,7 +73,7 @@ func TestManifestConfigNoManifestFound(t *testing.T) { } func createEmptyManifestConfig() (map[string]*utils.ResultItem, string) { - config := make(map[string]*utils.ResultItem, 0) + config := make(map[string]*utils.ResultItem) return config, "sha:123" } @@ -83,7 +83,7 @@ func TestManifestConfigNoLayer(t *testing.T) { } func createManifestConfigWithNoLayer() (map[string]*utils.ResultItem, string) { - config := make(map[string]*utils.ResultItem, 0) + config := make(map[string]*utils.ResultItem) config["manifest.json"] = dummySearchResults return config, "sha:123" } @@ -102,7 +102,7 @@ func createManifestConfigWithLayer() (map[string]*utils.ResultItem, *manifest) { MediaType: "MediaType", }}, } - searchResults := make(map[string]*utils.ResultItem, 0) + searchResults := make(map[string]*utils.ResultItem) searchResults["manifest.json"] = dummySearchResults searchResults["sha__1"] = dummySearchResults searchResults["sha__2"] = dummySearchResults @@ -129,7 +129,7 @@ func createManifestConfigWithMissingLayer() (map[string]*utils.ResultItem, *mani }, }, } - searchResults := make(map[string]*utils.ResultItem, 0) + searchResults := make(map[string]*utils.ResultItem) searchResults["manifest.json"] = dummySearchResults searchResults["sha__1"] = dummySearchResults return searchResults, manifest @@ -156,7 +156,7 @@ func createManifestConfigWithForeignLayer() (map[string]*utils.ResultItem, *mani }, }, } - searchResults := make(map[string]*utils.ResultItem, 0) + searchResults := make(map[string]*utils.ResultItem) searchResults["manifest.json"] = dummySearchResults searchResults["sha__1"] = dummySearchResults return searchResults, manifest diff --git a/artifactory/utils/dependenciesutils.go b/artifactory/utils/dependenciesutils.go index 950508647..7cfe5c4bf 100644 --- a/artifactory/utils/dependenciesutils.go +++ b/artifactory/utils/dependenciesutils.go @@ -79,7 +79,7 @@ func DownloadAnalyzerManagerIfNeeded() error { } } // Download & unzip the analyzer manager files - log.Info("The 'Analyzer Manager' app is not cached locally. Downloading it now...") + log.Debug("The 'Analyzer Manager' app is not cached locally. Downloading it now...") if err = DownloadDependency(artDetails, remotePath, filepath.Join(analyzerManagerDir, xrayutils.AnalyzerManagerZipName), true); err != nil { return err } @@ -112,21 +112,19 @@ func createChecksumFile(targetPath, checksum string) (err error) { return } -// The GetExtractorsRemoteDetails function is responsible for retrieving the server details necessary to download the build-info extractors. +// GetExtractorsRemoteDetails retrieves the server details necessary to download the build-info extractors from a remote repository. // downloadPath - specifies the path in the remote repository from which the extractors will be downloaded. func GetExtractorsRemoteDetails(downloadPath string) (server *config.ServerDetails, remoteRepo string, err error) { - server, remoteRepo, err = getRemoteDetailsFromEnv(downloadPath) - if remoteRepo != "" || err != nil { - return + // Download from the remote repository that proxies https://releases.jfrog.io + server, remoteRepo, err = getExtractorsRemoteDetailsFromEnv(downloadPath) + if remoteRepo == "" && err == nil { + // Fallback to the deprecated JFROG_CLI_EXTRACTORS_REMOTE environment variable + server, remoteRepo, err = getExtractorsRemoteDetailsFromLegacyEnv(downloadPath) } - // Fallback to the deprecated JFROG_CLI_EXTRACTORS_REMOTE environment variable - server, remoteRepo, err = getLegacyRemoteDetailsFromEnv(downloadPath) if remoteRepo != "" || err != nil { - log.Warn(fmt.Sprintf("You are using the deprecated %q environment variable. Use %q instead.\nRead more about it at %sjfrog-cli/downloading-the-maven-and-gradle-extractor-jars", - coreutils.DeprecatedExtractorsRemoteEnv, coreutils.ReleasesRemoteEnv, coreutils.JFrogHelpUrl)) return } - + // Download directly from https://releases.jfrog.io log.Info("The build-info-extractor jar is not cached locally. Downloading it now...\n" + "You can set the repository from which this jar is downloaded.\n" + "Read more about it at " + coreutils.JFrogHelpUrl + "jfrog-cli/downloading-the-maven-and-gradle-extractor-jars") @@ -135,12 +133,22 @@ func GetExtractorsRemoteDetails(downloadPath string) (server *config.ServerDetai return &config.ServerDetails{ArtifactoryUrl: coreutils.JfrogReleasesUrl}, path.Join("oss-release-local", downloadPath), nil } -func getRemoteDetailsFromEnv(downloadPath string) (server *config.ServerDetails, remoteRepo string, err error) { - return getRemoteDetails(downloadPath, coreutils.ReleasesRemoteEnv) +func getExtractorsRemoteDetailsFromEnv(downloadPath string) (server *config.ServerDetails, remoteRepo string, err error) { + server, remoteRepo, err = getRemoteDetails(coreutils.ReleasesRemoteEnv) + if remoteRepo != "" && err == nil { + remoteRepo = getFullExtractorsPathInArtifactory(remoteRepo, coreutils.ReleasesRemoteEnv, downloadPath) + } + return } -func getLegacyRemoteDetailsFromEnv(downloadPath string) (server *config.ServerDetails, remoteRepo string, err error) { - return getRemoteDetails(downloadPath, coreutils.DeprecatedExtractorsRemoteEnv) +func getExtractorsRemoteDetailsFromLegacyEnv(downloadPath string) (server *config.ServerDetails, remoteRepo string, err error) { + server, remoteRepo, err = getRemoteDetails(coreutils.DeprecatedExtractorsRemoteEnv) + if remoteRepo != "" && err == nil { + log.Warn(fmt.Sprintf("You are using the deprecated %q environment variable. Use %q instead.\nRead more about it at %sjfrog-cli/downloading-the-maven-and-gradle-extractor-jars", + coreutils.DeprecatedExtractorsRemoteEnv, coreutils.ReleasesRemoteEnv, coreutils.JFrogHelpUrl)) + remoteRepo = getFullExtractorsPathInArtifactory(remoteRepo, coreutils.DeprecatedExtractorsRemoteEnv, downloadPath) + } + return } // getRemoteDetails function retrieves the server details and downloads path for the build-info extractor file. @@ -148,20 +156,16 @@ func getLegacyRemoteDetailsFromEnv(downloadPath string) (server *config.ServerDe // downloadPath - specifies the path in the remote repository from which the extractors will be downloaded. // remoteEnv - the relevant environment variable that was used: releasesRemoteEnv/ExtractorsRemoteEnv. // The function returns the server that matches the given server ID, the complete path of the build-info extractor concatenated with the specified remote repository, and an error if occurred. -func getRemoteDetails(downloadPath, remoteEnv string) (server *config.ServerDetails, fullRemoteRepoPath string, err error) { +func getRemoteDetails(remoteEnv string) (server *config.ServerDetails, repoName string, err error) { serverID, repoName, err := coreutils.GetServerIdAndRepo(remoteEnv) if err != nil { return } if serverID == "" && repoName == "" { - // Remote details weren't configured. Assuming that https://releases.jfro.io should be used. + // Remote details weren't configured. Assuming that https://releases.jfrog.io should be used. return } server, err = config.GetSpecificConfig(serverID, false, true) - if err != nil { - return - } - fullRemoteRepoPath = getFullExtractorsPathInArtifactory(repoName, remoteEnv, downloadPath) return } @@ -244,9 +248,14 @@ func createHttpClient(artDetails *config.ServerDetails) (rtHttpClient *jfroghttp return } -func getAnalyzerManagerRemoteDetails(downloadPath string) (server *config.ServerDetails, remoteRepo string, err error) { - server, remoteRepo, err = getRemoteDetailsFromEnv(downloadPath) - if remoteRepo != "" || err != nil { +func getAnalyzerManagerRemoteDetails(downloadPath string) (server *config.ServerDetails, fullRemotePath string, err error) { + var remoteRepo string + server, remoteRepo, err = getRemoteDetails(coreutils.ReleasesRemoteEnv) + if err != nil { + return + } + if remoteRepo != "" { + fullRemotePath = path.Join(remoteRepo, "artifactory", downloadPath) return } log.Debug("'" + coreutils.ReleasesRemoteEnv + "' environment variable is not configured. The Analyzer Manager app will be downloaded directly from releases.jfrog.io if needed.") diff --git a/artifactory/utils/utils.go b/artifactory/utils/utils.go index aecd1c7d1..f20ed3c01 100644 --- a/artifactory/utils/utils.go +++ b/artifactory/utils/utils.go @@ -4,19 +4,7 @@ import ( "context" "encoding/json" "errors" - "io" - "net/http" - "net/url" - "os" - "path" - "path/filepath" - "github.com/jfrog/build-info-go/build" - clientutils "github.com/jfrog/jfrog-client-go/utils" - "github.com/jfrog/jfrog-client-go/utils/log" - - clientio "github.com/jfrog/jfrog-client-go/utils/io" - "github.com/jfrog/jfrog-cli-core/v2/utils/config" "github.com/jfrog/jfrog-cli-core/v2/utils/coreutils" "github.com/jfrog/jfrog-client-go/access" @@ -25,7 +13,17 @@ import ( clientConfig "github.com/jfrog/jfrog-client-go/config" "github.com/jfrog/jfrog-client-go/distribution" "github.com/jfrog/jfrog-client-go/http/httpclient" + "github.com/jfrog/jfrog-client-go/lifecycle" + clientUtils "github.com/jfrog/jfrog-client-go/utils" "github.com/jfrog/jfrog-client-go/utils/errorutils" + ioUtils "github.com/jfrog/jfrog-client-go/utils/io" + "github.com/jfrog/jfrog-client-go/utils/log" + "io" + "net/http" + "net/url" + "os" + "path" + "path/filepath" ) func GetProjectDir(global bool) (string, error) { @@ -82,7 +80,7 @@ func GetEncryptedPasswordFromArtifactory(artifactoryAuth auth.ServiceDetails, in return "", errorutils.CheckErrorf(message) } - return "", errorutils.CheckErrorf("Artifactory response: " + resp.Status + "\n" + clientutils.IndentJson(body)) + return "", errorutils.CheckErrorf("Artifactory response: " + resp.Status + "\n" + clientUtils.IndentJson(body)) } func CreateServiceManager(serverDetails *config.ServerDetails, httpRetries, httpRetryWaitMilliSecs int, isDryRun bool) (artifactory.ArtifactoryServicesManager, error) { @@ -104,27 +102,27 @@ func CreateServiceManagerWithContext(context context.Context, serverDetails *con if err != nil { return nil, err } - config := clientConfig.NewConfigBuilder(). + configBuilder := clientConfig.NewConfigBuilder(). SetServiceDetails(artAuth). SetCertificatesPath(certsPath). SetInsecureTls(serverDetails.InsecureTls). SetDryRun(isDryRun). SetContext(context) if httpRetries >= 0 { - config.SetHttpRetries(httpRetries) - config.SetHttpRetryWaitMilliSecs(httpRetryWaitMilliSecs) + configBuilder.SetHttpRetries(httpRetries) + configBuilder.SetHttpRetryWaitMilliSecs(httpRetryWaitMilliSecs) } if threads > 0 { - config.SetThreads(threads) + configBuilder.SetThreads(threads) } - serviceConfig, err := config.Build() + serviceConfig, err := configBuilder.Build() if err != nil { return nil, err } return artifactory.New(serviceConfig) } -func CreateServiceManagerWithProgressBar(serverDetails *config.ServerDetails, threads, httpRetries, httpRetryWaitMilliSecs int, dryRun bool, progressBar clientio.ProgressMgr) (artifactory.ArtifactoryServicesManager, error) { +func CreateServiceManagerWithProgressBar(serverDetails *config.ServerDetails, threads, httpRetries, httpRetryWaitMilliSecs int, dryRun bool, progressBar ioUtils.ProgressMgr) (artifactory.ArtifactoryServicesManager, error) { certsPath, err := coreutils.GetJfrogCertsDir() if err != nil { return nil, err @@ -191,6 +189,27 @@ func CreateAccessServiceManager(serviceDetails *config.ServerDetails, isDryRun b return access.New(serviceConfig) } +func CreateLifecycleServiceManager(serviceDetails *config.ServerDetails, isDryRun bool) (*lifecycle.LifecycleServicesManager, error) { + certsPath, err := coreutils.GetJfrogCertsDir() + if err != nil { + return nil, err + } + lcAuth, err := serviceDetails.CreateLifecycleAuthConfig() + if err != nil { + return nil, err + } + serviceConfig, err := clientConfig.NewConfigBuilder(). + SetServiceDetails(lcAuth). + SetCertificatesPath(certsPath). + SetInsecureTls(serviceDetails.InsecureTls). + SetDryRun(isDryRun). + Build() + if err != nil { + return nil, err + } + return lifecycle.New(serviceConfig) +} + // This error indicates that the build was scanned by Xray, but Xray found issues with the build. // If Xray failed to scan the build, for example due to a networking issue, a regular error should be returned. var errBuildScan = errors.New("issues found during xray build scan") @@ -246,11 +265,11 @@ func createServiceManager(serviceDetails auth.ServiceDetails) (artifactory.Artif if err != nil { return nil, err } - config := clientConfig.NewConfigBuilder(). + serviceConfig, err := clientConfig.NewConfigBuilder(). SetServiceDetails(serviceDetails). SetCertificatesPath(certsPath). - SetDryRun(false) - serviceConfig, err := config.Build() + SetDryRun(false). + Build() if err != nil { return nil, err } diff --git a/artifactory/utils/weblogin.go b/artifactory/utils/weblogin.go new file mode 100644 index 000000000..a9b79b76b --- /dev/null +++ b/artifactory/utils/weblogin.go @@ -0,0 +1,59 @@ +package utils + +import ( + "errors" + "github.com/google/uuid" + "github.com/jfrog/jfrog-cli-core/v2/utils/config" + "github.com/jfrog/jfrog-client-go/auth" + clientUtils "github.com/jfrog/jfrog-client-go/utils" + "github.com/jfrog/jfrog-client-go/utils/errorutils" + "github.com/jfrog/jfrog-client-go/utils/log" + "github.com/pkg/browser" + "time" +) + +func DoWebLogin(serverDetails *config.ServerDetails) (token auth.CommonTokenParams, err error) { + if err = sendUnauthenticatedPing(serverDetails); err != nil { + return + } + + uuidToken, err := uuid.NewRandom() + if errorutils.CheckError(err) != nil { + return + } + uuidStr := uuidToken.String() + accessManager, err := CreateAccessServiceManager(serverDetails, false) + if err != nil { + return + } + if err = accessManager.SendLoginAuthenticationRequest(uuidStr); err != nil { + err = errors.Join(err, + errorutils.CheckErrorf("The 'Web Login' functionality is only supported for Artifactory version 7.64.0 and above. "+ + "Make sure the details you entered are correct and that Artifactory meets the version requirement.")) + return + } + log.Info("Please log in to the JFrog platform using the opened browser.") + if err = browser.OpenURL(clientUtils.AddTrailingSlashIfNeeded(serverDetails.Url) + "ui/login?jfClientSession=" + uuidStr + "&jfClientName=JFrogCLI"); err != nil { + return + } + time.Sleep(1 * time.Second) + log.Debug("Attempting to get the authentication token...") + token, err = accessManager.GetLoginAuthenticationToken(uuidStr) + if err != nil { + return + } + if token.AccessToken == "" { + return token, errorutils.CheckErrorf("failed getting authentication token after web log") + } + log.Info("You're now logged in!") + return +} + +func sendUnauthenticatedPing(serverDetails *config.ServerDetails) error { + artifactoryManager, err := CreateServiceManager(serverDetails, 3, 0, false) + if err != nil { + return err + } + _, err = artifactoryManager.Ping() + return err +} diff --git a/buildscripts/download-jars.sh b/buildscripts/download-jars.sh new file mode 100755 index 000000000..f60fb47d0 --- /dev/null +++ b/buildscripts/download-jars.sh @@ -0,0 +1,14 @@ +#!/bin/sh + +# Please use this script to download the JAR files for maven-dep-tree and gradle-dep-tree into the directory xray/audit/java/. +# These JARs allow us to build Maven and Gradle dependency trees efficiently and without compilation. +# Learn more about them here: +# https://github.com/jfrog/gradle-dep-tree +# https://github.com/jfrog/maven-dep-tree + +# Once you have updated the versions mentioned below, please execute this script from the root directory of the jfrog-cli-core to ensure the JAR files are updated. +GRADLE_DEP_TREE_VERSION="2.2.0" +MAVEN_DEP_TREE_VERSION="1.0.0" + +curl -fL https://releases.jfrog.io/artifactory/oss-release-local/com/jfrog/gradle-dep-tree/${GRADLE_DEP_TREE_VERSION}/gradle-dep-tree-${GRADLE_DEP_TREE_VERSION}.jar -o xray/audit/java/gradle-dep-tree.jar +# curl -fL https://releases.jfrog.io/artifactory/oss-release-local/com/jfrog/maven-dep-tree/${MAVEN_DEP_TREE_VERSION}/maven-dep-tree-${MAVEN_DEP_TREE_VERSION}.jar -o xray/audit/java/maven-dep-tree.jar diff --git a/common/commands/config.go b/common/commands/config.go index dbf4f0d08..a0802387c 100644 --- a/common/commands/config.go +++ b/common/commands/config.go @@ -3,26 +3,23 @@ package commands import ( "errors" "fmt" - "github.com/jfrog/jfrog-client-go/http/httpclient" - "net/url" - "os" - "reflect" - "strconv" - "strings" - "sync" - + "github.com/jfrog/jfrog-cli-core/v2/artifactory/utils" + "github.com/jfrog/jfrog-cli-core/v2/utils/config" "github.com/jfrog/jfrog-cli-core/v2/utils/coreutils" "github.com/jfrog/jfrog-cli-core/v2/utils/ioutils" "github.com/jfrog/jfrog-cli-core/v2/utils/lock" - "github.com/jfrog/jfrog-client-go/auth" - - "github.com/jfrog/jfrog-cli-core/v2/artifactory/utils" - "github.com/jfrog/jfrog-cli-core/v2/utils/config" - clientutils "github.com/jfrog/jfrog-client-go/utils" + "github.com/jfrog/jfrog-client-go/http/httpclient" + clientUtils "github.com/jfrog/jfrog-client-go/utils" "github.com/jfrog/jfrog-client-go/utils/errorutils" "github.com/jfrog/jfrog-client-go/utils/io/fileutils" "github.com/jfrog/jfrog-client-go/utils/log" + "net/url" + "os" + "reflect" + "strconv" + "strings" + "sync" ) type ConfigAction string @@ -40,6 +37,7 @@ const ( AccessToken AuthenticationMethod = "Access Token" BasicAuth AuthenticationMethod = "Username and Password / API Key" MTLS AuthenticationMethod = "Mutual TLS" + WebLogin AuthenticationMethod = "Web Login" ) // Internal golang locking for the same process. @@ -52,6 +50,10 @@ type ConfigCommand struct { encPassword bool useBasicAuthOnly bool serverId string + // Preselected web login authentication method, supported on an interactive command only. + useWebLogin bool + // Forcibly make the configured server default. + makeDefault bool // For unit tests disablePrompts bool cmdType ConfigAction @@ -76,6 +78,16 @@ func (cc *ConfigCommand) SetUseBasicAuthOnly(useBasicAuthOnly bool) *ConfigComma return cc } +func (cc *ConfigCommand) SetUseWebLogin(useWebLogin bool) *ConfigCommand { + cc.useWebLogin = useWebLogin + return cc +} + +func (cc *ConfigCommand) SetMakeDefault(makeDefault bool) *ConfigCommand { + cc.makeDefault = makeDefault + return cc +} + func (cc *ConfigCommand) SetInteractive(interactive bool) *ConfigCommand { cc.interactive = interactive return cc @@ -153,77 +165,94 @@ func (cc *ConfigCommand) config() error { } if cc.interactive { err = cc.getConfigurationFromUser() - if err != nil { - return err - } } else { - // Non-interactive config - - if cc.details.Url != "" { - if fileutils.IsSshUrl(cc.details.Url) { - coreutils.SetIfEmpty(&cc.details.ArtifactoryUrl, cc.details.Url) - } else { - cc.details.Url = clientutils.AddTrailingSlashIfNeeded(cc.details.Url) - // Derive JFrog services URLs from platform URL - coreutils.SetIfEmpty(&cc.details.ArtifactoryUrl, cc.details.Url+"artifactory/") - coreutils.SetIfEmpty(&cc.details.DistributionUrl, cc.details.Url+"distribution/") - coreutils.SetIfEmpty(&cc.details.XrayUrl, cc.details.Url+"xray/") - coreutils.SetIfEmpty(&cc.details.MissionControlUrl, cc.details.Url+"mc/") - coreutils.SetIfEmpty(&cc.details.PipelinesUrl, cc.details.Url+"pipelines/") - } - } + err = cc.getConfigurationNonInteractively() + } + if err != nil { + return err + } + cc.addTrailingSlashes() + cc.lowerUsername() + cc.setDefaultIfNeeded(configurations) + if err = assertSingleAuthMethod(cc.details); err != nil { + return err + } + if err = cc.assertUrlsSafe(); err != nil { + return err + } + if err = cc.encPasswordIfNeeded(); err != nil { + return err + } + cc.configRefreshableTokenIfPossible() + return config.SaveServersConf(configurations) +} - // Some package managers support basic authentication only. - // To support them we try to extract the username from the access token - if cc.details.AccessToken != "" && cc.details.User == "" { - err = cc.validateTokenIsNotApiKey() - if err != nil { - return err - } +func (cc *ConfigCommand) getConfigurationNonInteractively() error { + if cc.details.Url != "" { + if fileutils.IsSshUrl(cc.details.Url) { + coreutils.SetIfEmpty(&cc.details.ArtifactoryUrl, cc.details.Url) + } else { + cc.details.Url = clientUtils.AddTrailingSlashIfNeeded(cc.details.Url) + // Derive JFrog services URLs from platform URL + coreutils.SetIfEmpty(&cc.details.ArtifactoryUrl, cc.details.Url+"artifactory/") + coreutils.SetIfEmpty(&cc.details.DistributionUrl, cc.details.Url+"distribution/") + coreutils.SetIfEmpty(&cc.details.XrayUrl, cc.details.Url+"xray/") + coreutils.SetIfEmpty(&cc.details.MissionControlUrl, cc.details.Url+"mc/") + coreutils.SetIfEmpty(&cc.details.PipelinesUrl, cc.details.Url+"pipelines/") + } + } - // Try extracting username from Access Token (non-possible on reference token) - cc.details.User = auth.ExtractUsernameFromAccessToken(cc.details.AccessToken) + if cc.details.AccessToken != "" && cc.details.User == "" { + if err := cc.validateTokenIsNotApiKey(); err != nil { + return err } + cc.tryExtractingUsernameFromAccessToken() } - cc.details.ArtifactoryUrl = clientutils.AddTrailingSlashIfNeeded(cc.details.ArtifactoryUrl) - cc.details.DistributionUrl = clientutils.AddTrailingSlashIfNeeded(cc.details.DistributionUrl) - cc.details.XrayUrl = clientutils.AddTrailingSlashIfNeeded(cc.details.XrayUrl) - cc.details.MissionControlUrl = clientutils.AddTrailingSlashIfNeeded(cc.details.MissionControlUrl) - cc.details.PipelinesUrl = clientutils.AddTrailingSlashIfNeeded(cc.details.PipelinesUrl) + return nil +} + +func (cc *ConfigCommand) addTrailingSlashes() { + cc.details.ArtifactoryUrl = clientUtils.AddTrailingSlashIfNeeded(cc.details.ArtifactoryUrl) + cc.details.DistributionUrl = clientUtils.AddTrailingSlashIfNeeded(cc.details.DistributionUrl) + cc.details.XrayUrl = clientUtils.AddTrailingSlashIfNeeded(cc.details.XrayUrl) + cc.details.MissionControlUrl = clientUtils.AddTrailingSlashIfNeeded(cc.details.MissionControlUrl) + cc.details.PipelinesUrl = clientUtils.AddTrailingSlashIfNeeded(cc.details.PipelinesUrl) +} - // Artifactory expects the username to be lower-cased. In case it is not, - // Artifactory will silently save it lower-cased, but the token creation - // REST API will fail with a non-lower-cased username. +// Artifactory expects the username to be lower-cased. In case it is not, +// Artifactory will silently save it lower-cased, but the token creation +// REST API will fail with a non-lower-cased username. +func (cc *ConfigCommand) lowerUsername() { cc.details.User = strings.ToLower(cc.details.User) +} +func (cc *ConfigCommand) setDefaultIfNeeded(configurations []*config.ServerDetails) { if len(configurations) == 1 { cc.details.IsDefault = true + return } - - err = checkSingleAuthMethod(cc.details) - if err != nil { - return err - } - - if err = cc.assertUrlsSafe(); err != nil { - return err + if cc.makeDefault { + for i := range configurations { + configurations[i].IsDefault = false + } + cc.details.IsDefault = true } +} +func (cc *ConfigCommand) encPasswordIfNeeded() error { if cc.encPassword && cc.details.ArtifactoryUrl != "" { - err = cc.encryptPassword() + err := cc.encryptPassword() if err != nil { return errorutils.CheckErrorf("The following error was received while trying to encrypt your password: %s ", err) } } - - if !cc.useBasicAuthOnly { - cc.configRefreshableToken() - } - - return config.SaveServersConf(configurations) + return nil } -func (cc *ConfigCommand) configRefreshableToken() { +func (cc *ConfigCommand) configRefreshableTokenIfPossible() { + if cc.useBasicAuthOnly { + return + } // If username and password weren't provided, then the artifactoryToken refresh mechanism isn't set. if cc.details.User == "" || cc.details.Password == "" { return @@ -247,27 +276,26 @@ func (cc *ConfigCommand) prepareConfigurationData() ([]*config.ServerDetails, er return configurations, err } - // Get default server details - if cc.defaultDetails == nil { - cc.defaultDetails, err = config.GetDefaultConfiguredConf(configurations) - if err != nil { - return configurations, errorutils.CheckError(err) - } - } - // Get server id if cc.interactive && cc.serverId == "" { - ioutils.ScanFromConsole("Choose a server ID", &cc.serverId, cc.defaultDetails.ServerId) + defaultServerId := "" + if cc.defaultDetails != nil { + defaultServerId = cc.defaultDetails.ServerId + } + ioutils.ScanFromConsole("Enter a unique server identifier", &cc.serverId, defaultServerId) } cc.details.ServerId = cc.resolveServerId() // Remove and get the server details from the configurations list tempConfiguration, configurations := config.GetAndRemoveConfiguration(cc.details.ServerId, configurations) - // Change default server details if the server existed in the configurations list + // Set default server details if the server existed in the configurations list. + // Otherwise, if default details were not set, initialize empty default details. if tempConfiguration != nil { cc.defaultDetails = tempConfiguration cc.details.IsDefault = tempConfiguration.IsDefault + } else if cc.defaultDetails == nil { + cc.defaultDetails = new(config.ServerDetails) } // Append the configuration to the configurations list @@ -287,101 +315,91 @@ func (cc *ConfigCommand) resolveServerId() string { if cc.details.ServerId != "" { return cc.details.ServerId } - if cc.defaultDetails.ServerId != "" { + if cc.defaultDetails != nil && cc.defaultDetails.ServerId != "" { return cc.defaultDetails.ServerId } return config.DefaultServerId } func (cc *ConfigCommand) getConfigurationFromUser() (err error) { - disallowUsingSavedPassword := false + if cc.disablePrompts { + cc.fillSpecificUrlsFromPlatform() + return nil + } + + // If using web login on existing server with platform URL, avoid prompts and skip directly to login. + if cc.useWebLogin && cc.defaultDetails.Url != "" { + cc.fillSpecificUrlsFromPlatform() + return cc.handleWebLogin() + } if cc.details.Url == "" { ioutils.ScanFromConsole("JFrog Platform URL", &cc.details.Url, cc.defaultDetails.Url) } - if fileutils.IsSshUrl(cc.details.Url) { - coreutils.SetIfEmpty(&cc.details.ArtifactoryUrl, cc.details.Url) - } else { - cc.details.Url = clientutils.AddTrailingSlashIfNeeded(cc.details.Url) - disallowUsingSavedPassword = coreutils.SetIfEmpty(&cc.details.DistributionUrl, cc.details.Url+"distribution/") || disallowUsingSavedPassword - disallowUsingSavedPassword = coreutils.SetIfEmpty(&cc.details.ArtifactoryUrl, cc.details.Url+"artifactory/") || disallowUsingSavedPassword - disallowUsingSavedPassword = coreutils.SetIfEmpty(&cc.details.XrayUrl, cc.details.Url+"xray/") || disallowUsingSavedPassword - disallowUsingSavedPassword = coreutils.SetIfEmpty(&cc.details.MissionControlUrl, cc.details.Url+"mc/") || disallowUsingSavedPassword - disallowUsingSavedPassword = coreutils.SetIfEmpty(&cc.details.PipelinesUrl, cc.details.Url+"pipelines/") || disallowUsingSavedPassword + if fileutils.IsSshUrl(cc.details.Url) || fileutils.IsSshUrl(cc.details.ArtifactoryUrl) { + return cc.handleSsh() } - if fileutils.IsSshUrl(cc.details.ArtifactoryUrl) { - if err = getSshKeyPath(cc.details); err != nil { - return - } - } else if !cc.disablePrompts { - if err = cc.promptUrls(&disallowUsingSavedPassword); err != nil { - return - } - // Password/Access-Token/MTLS Certificate - if cc.details.Password == "" && cc.details.AccessToken == "" { - var authMethod AuthenticationMethod - authMethod, err = promptAuthMethods() - if err != nil { - return - } - switch authMethod { - case BasicAuth: - err = ioutils.ReadCredentialsFromConsole(cc.details, cc.defaultDetails, disallowUsingSavedPassword) - if err != nil { - return - } - case AccessToken: - err = readAccessTokenFromConsole(cc.details) - if err != nil { - return - } - err = cc.validateTokenIsNotApiKey() - if err != nil { - return err - } - if cc.details.User == "" { - // Try extracting username from Access Token (non-possible on reference token) - cc.details.User = auth.ExtractUsernameFromAccessToken(cc.details.AccessToken) - if cc.details.User == "" { - ioutils.ScanFromConsole("JFrog username (optional)", &cc.details.User, "") - } - } - - case MTLS: - checkCertificateForMTLS(cc) - log.Warn("Please notice that authentication using client certificates (mTLS) is not supported by commands which integrate with package managers.") - } - } + disallowUsingSavedPassword := cc.fillSpecificUrlsFromPlatform() + if err = cc.promptUrls(&disallowUsingSavedPassword); err != nil { + return + } - checkClientCertForReverseProxy(cc) + var clientCertChecked bool + if cc.details.Password == "" && cc.details.AccessToken == "" { + clientCertChecked, err = cc.promptForCredentials(disallowUsingSavedPassword) + if err != nil { + return err + } + } + if !clientCertChecked { + cc.checkClientCertForReverseProxy() } + return +} +func (cc *ConfigCommand) handleSsh() error { + coreutils.SetIfEmpty(&cc.details.ArtifactoryUrl, cc.details.Url) + return getSshKeyPath(cc.details) +} + +func (cc *ConfigCommand) fillSpecificUrlsFromPlatform() (disallowUsingSavedPassword bool) { + cc.details.Url = clientUtils.AddTrailingSlashIfNeeded(cc.details.Url) + disallowUsingSavedPassword = coreutils.SetIfEmpty(&cc.details.DistributionUrl, cc.details.Url+"distribution/") || disallowUsingSavedPassword + disallowUsingSavedPassword = coreutils.SetIfEmpty(&cc.details.ArtifactoryUrl, cc.details.Url+"artifactory/") || disallowUsingSavedPassword + disallowUsingSavedPassword = coreutils.SetIfEmpty(&cc.details.XrayUrl, cc.details.Url+"xray/") || disallowUsingSavedPassword + disallowUsingSavedPassword = coreutils.SetIfEmpty(&cc.details.MissionControlUrl, cc.details.Url+"mc/") || disallowUsingSavedPassword + disallowUsingSavedPassword = coreutils.SetIfEmpty(&cc.details.PipelinesUrl, cc.details.Url+"pipelines/") || disallowUsingSavedPassword return } -func checkCertificateForMTLS(cc *ConfigCommand) { +func (cc *ConfigCommand) checkCertificateForMTLS() { if cc.details.ClientCertPath != "" && cc.details.ClientCertKeyPath != "" { return } cc.readClientCertInfoFromConsole() } -func promptAuthMethods() (method AuthenticationMethod, err error) { +func (cc *ConfigCommand) promptAuthMethods() (selectedMethod AuthenticationMethod, err error) { + if cc.useWebLogin { + return WebLogin, nil + } + var selected string - authMethod := []AuthenticationMethod{ + authMethods := []AuthenticationMethod{ BasicAuth, AccessToken, MTLS, + WebLogin, } var selectableItems []ioutils.PromptItem - for _, method := range authMethod { - selectableItems = append(selectableItems, ioutils.PromptItem{Option: string(method), TargetValue: &selected}) + for _, curMethod := range authMethods { + selectableItems = append(selectableItems, ioutils.PromptItem{Option: string(curMethod), TargetValue: &selected}) } err = ioutils.SelectString(selectableItems, "Select one of the following authentication methods:", false, func(item ioutils.PromptItem) { *item.TargetValue = item.Option - method = AuthenticationMethod(*item.TargetValue) + selectedMethod = AuthenticationMethod(*item.TargetValue) }) return } @@ -400,6 +418,52 @@ func (cc *ConfigCommand) promptUrls(disallowUsingSavedPassword *bool) error { }) } +func (cc *ConfigCommand) promptForCredentials(disallowUsingSavedPassword bool) (clientCertChecked bool, err error) { + var authMethod AuthenticationMethod + authMethod, err = cc.promptAuthMethods() + if err != nil { + return + } + switch authMethod { + case BasicAuth: + return false, ioutils.ReadCredentialsFromConsole(cc.details, cc.defaultDetails, disallowUsingSavedPassword) + case AccessToken: + return false, cc.promptForAccessToken() + case MTLS: + cc.checkCertificateForMTLS() + log.Warn("Please notice that authentication using client certificates (mTLS) is not supported by commands which integrate with package managers.") + return true, nil + case WebLogin: + // Web login sends requests, so certificates must be obtained first if they are required. + cc.checkClientCertForReverseProxy() + return true, cc.handleWebLogin() + default: + return false, errorutils.CheckErrorf("unexpected authentication method") + } +} + +func (cc *ConfigCommand) promptForAccessToken() error { + if err := readAccessTokenFromConsole(cc.details); err != nil { + return err + } + if err := cc.validateTokenIsNotApiKey(); err != nil { + return err + } + if cc.details.User == "" { + cc.tryExtractingUsernameFromAccessToken() + if cc.details.User == "" { + ioutils.ScanFromConsole("JFrog username (optional)", &cc.details.User, "") + } + } + return nil +} + +// Some package managers support basic authentication only. To support them, we try to extract the username from the access token. +// This is not feasible with reference token. +func (cc *ConfigCommand) tryExtractingUsernameFromAccessToken() { + cc.details.User = auth.ExtractUsernameFromAccessToken(cc.details.AccessToken) +} + func (cc *ConfigCommand) readClientCertInfoFromConsole() { if cc.details.ClientCertPath == "" { ioutils.ScanFromConsole("Client certificate file path", &cc.details.ClientCertPath, cc.defaultDetails.ClientCertPath) @@ -409,7 +473,7 @@ func (cc *ConfigCommand) readClientCertInfoFromConsole() { } } -func checkClientCertForReverseProxy(cc *ConfigCommand) { +func (cc *ConfigCommand) checkClientCertForReverseProxy() { if cc.details.ClientCertPath != "" && cc.details.ClientCertKeyPath != "" { return } @@ -439,7 +503,7 @@ func getSshKeyPath(details *config.ServerDetails) error { } // If SSH key path provided, check if exists: - details.SshKeyPath = clientutils.ReplaceTildeWithUserHome(details.SshKeyPath) + details.SshKeyPath = clientUtils.ReplaceTildeWithUserHome(details.SshKeyPath) exists, err := fileutils.IsFileExists(details.SshKeyPath, false) if err != nil { return err @@ -689,9 +753,9 @@ func (cc *ConfigCommand) encryptPassword() error { // Assert all services URLs are safe func (cc *ConfigCommand) assertUrlsSafe() error { - for _, url := range []string{cc.details.Url, cc.details.AccessUrl, cc.details.ArtifactoryUrl, + for _, curUrl := range []string{cc.details.Url, cc.details.AccessUrl, cc.details.ArtifactoryUrl, cc.details.DistributionUrl, cc.details.MissionControlUrl, cc.details.PipelinesUrl, cc.details.XrayUrl} { - if isUrlSafe(url) { + if isUrlSafe(curUrl) { continue } if cc.interactive { @@ -713,21 +777,33 @@ func (cc *ConfigCommand) validateTokenIsNotApiKey() error { return nil } +func (cc *ConfigCommand) handleWebLogin() error { + token, err := utils.DoWebLogin(cc.details) + if err != nil { + return err + } + cc.details.AccessToken = token.AccessToken + cc.details.RefreshToken = token.RefreshToken + cc.details.WebLogin = true + cc.tryExtractingUsernameFromAccessToken() + return nil +} + // Return true if a URL is safe. URL is considered not safe if the following conditions are met: // 1. The URL uses an http:// scheme // 2. The URL leads to a URL outside the local machine func isUrlSafe(urlToCheck string) bool { - url, err := url.Parse(urlToCheck) + parsedUrl, err := url.Parse(urlToCheck) if err != nil { - // Unparseable URL is not unsafe + // If the URL cannot be parsed, we treat it as safe. return true } - if url.Scheme != "http" { + if parsedUrl.Scheme != "http" { return true } - hostName := url.Hostname() + hostName := parsedUrl.Hostname() if hostName == "127.0.0.1" || hostName == "localhost" { return true } @@ -735,7 +811,7 @@ func isUrlSafe(urlToCheck string) bool { return false } -func checkSingleAuthMethod(details *config.ServerDetails) error { +func assertSingleAuthMethod(details *config.ServerDetails) error { authMethods := []bool{ details.User != "" && details.Password != "", details.AccessToken != "" && details.ArtifactoryRefreshToken == "", diff --git a/common/commands/config_test.go b/common/commands/config_test.go index 624299a4c..0002abfd9 100644 --- a/common/commands/config_test.go +++ b/common/commands/config_test.go @@ -15,6 +15,8 @@ import ( "github.com/stretchr/testify/assert" ) +const testServerId = "test" + func init() { log.SetDefaultLogger() } @@ -38,6 +40,20 @@ func TestUsernameSavedLowercase(t *testing.T) { assert.Equal(t, outputConfig.User, "admin", "The config command is supposed to save username as lowercase") } +func TestDefaultServerId(t *testing.T) { + inputDetails := tests.CreateTestServerDetails() + inputDetails.User = "admin" + inputDetails.Password = "password" + // Remove server ID to verify the default one will be applied in non-interactive execution. + inputDetails.ServerId = "" + + doConfig(t, "", inputDetails, false, true, false) + outputConfig, err := GetConfig(config.DefaultServerId, false) + assert.NoError(t, err) + assert.Equal(t, config.DefaultServerId, outputConfig.ServerId) + assert.NoError(t, NewConfigCommand(Delete, config.DefaultServerId).Run()) +} + func TestArtifactorySshKey(t *testing.T) { inputDetails := tests.CreateTestServerDetails() inputDetails.SshKeyPath = "/tmp/sshKey" @@ -71,7 +87,7 @@ func TestApiKeyInAccessToken(t *testing.T) { inputDetails.AccessToken = apiKey // Should throw error if access token is API key and no username - configCmd := NewConfigCommand(AddOrEdit, "test").SetDetails(inputDetails).SetUseBasicAuthOnly(true).SetInteractive(false) + configCmd := NewConfigCommand(AddOrEdit, testServerId).SetDetails(inputDetails).SetUseBasicAuthOnly(true).SetInteractive(false) configCmd.disablePrompts = true assert.ErrorContains(t, configCmd.Run(), "the provided Access Token is an API key") @@ -120,7 +136,7 @@ func TestUrls(t *testing.T) { func testUrls(t *testing.T, interactive bool) { inputDetails := config.ServerDetails{ Url: "http://localhost:8080", User: "admin", Password: "password", - ServerId: "test", ClientCertPath: "test/cert/path", ClientCertKeyPath: "test/cert/key/path", + ServerId: testServerId, ClientCertPath: "test/cert/path", ClientCertKeyPath: "test/cert/key/path", IsDefault: false} outputConfig, err := configAndGetTestServer(t, &inputDetails, false, interactive) @@ -160,13 +176,45 @@ func TestBasicAuthOnlyOption(t *testing.T) { outputConfig, err := configAndGetTestServer(t, inputDetails, true, false) assert.NoError(t, err) assert.Equal(t, coreutils.TokenRefreshDisabled, outputConfig.ArtifactoryTokenRefreshInterval, "expected refreshable token to be disabled") - assert.NoError(t, NewConfigCommand(Delete, "test").Run()) + assert.NoError(t, NewConfigCommand(Delete, testServerId).Run()) // Verify setting the option enables refreshable tokens. outputConfig, err = configAndGetTestServer(t, inputDetails, false, false) assert.NoError(t, err) assert.Equal(t, coreutils.TokenRefreshDefaultInterval, outputConfig.ArtifactoryTokenRefreshInterval, "expected refreshable token to be enabled") - assert.NoError(t, NewConfigCommand(Delete, "test").Run()) + assert.NoError(t, NewConfigCommand(Delete, testServerId).Run()) +} + +func TestMakeDefaultOption(t *testing.T) { + cleanUpJfrogHome, err := utilsTests.SetJfrogHome() + assert.NoError(t, err) + defer cleanUpJfrogHome() + + originalDefault := tests.CreateTestServerDetails() + originalDefault.ServerId = "originalDefault" + originalDefault.IsDefault = false + newDefault := tests.CreateTestServerDetails() + newDefault.ServerId = "newDefault" + newDefault.IsDefault = false + + // Config the first server, and expect it to be default because it is the only server. + configAndAssertDefault(t, originalDefault, false) + defer deleteServer(t, originalDefault.ServerId) + + // Config a second server and pass the makeDefault option. + configAndAssertDefault(t, newDefault, true) + defer deleteServer(t, newDefault.ServerId) +} + +func configAndAssertDefault(t *testing.T, inputDetails *config.ServerDetails, makeDefault bool) { + outputConfig, err := configAndGetServer(t, inputDetails.ServerId, inputDetails, false, false, makeDefault) + assert.NoError(t, err) + assert.Equal(t, inputDetails.ServerId, outputConfig.ServerId) + assert.True(t, outputConfig.IsDefault) +} + +func deleteServer(t *testing.T, serverId string) { + assert.NoError(t, NewConfigCommand(Delete, serverId).Run()) } type unsafeUrlTest struct { @@ -193,11 +241,11 @@ func TestAssertUrlsSafe(t *testing.T) { for _, testCase := range unsafeUrlTestCases { t.Run(testCase.url, func(t *testing.T) { // Test non-interactive - should pass with a warning message - inputDetails := &config.ServerDetails{Url: testCase.url, ServerId: "test"} + inputDetails := &config.ServerDetails{Url: testCase.url, ServerId: testServerId} configAndTest(t, inputDetails, false) // Test interactive - should fail with an error - configCmd := NewConfigCommand(AddOrEdit, "test").SetDetails(inputDetails).SetInteractive(true) + configCmd := NewConfigCommand(AddOrEdit, testServerId).SetDetails(inputDetails).SetInteractive(true) configCmd.disablePrompts = true err := configCmd.Run() if testCase.isSafe { @@ -259,13 +307,13 @@ func TestKeyDecryptionError(t *testing.T) { inputDetails.Password = "password" // Configure server with JFROG_CLI_ENCRYPTION_KEY set - configCmd := NewConfigCommand(AddOrEdit, "test").SetDetails(inputDetails).SetUseBasicAuthOnly(true).SetInteractive(false) + configCmd := NewConfigCommand(AddOrEdit, testServerId).SetDetails(inputDetails).SetUseBasicAuthOnly(true).SetInteractive(false) configCmd.disablePrompts = true assert.NoError(t, configCmd.Run()) // Get the server details when JFROG_CLI_ENCRYPTION_KEY is not set and expect an error assert.NoError(t, os.Unsetenv(coreutils.EncryptionKey)) - _, err = GetConfig("test", false) + _, err = GetConfig(testServerId, false) assert.ErrorContains(t, err, "cannot decrypt config") } @@ -281,15 +329,24 @@ func configAndTest(t *testing.T, inputDetails *config.ServerDetails, interactive outputConfig, err := configAndGetTestServer(t, inputDetails, true, interactive) assert.NoError(t, err) assert.Equal(t, configStructToString(t, inputDetails), configStructToString(t, outputConfig), "unexpected configuration was saved to file") - assert.NoError(t, NewConfigCommand(Delete, "test").Run()) + assert.NoError(t, NewConfigCommand(Delete, testServerId).Run()) testExportImport(t, inputDetails) } func configAndGetTestServer(t *testing.T, inputDetails *config.ServerDetails, basicAuthOnly, interactive bool) (*config.ServerDetails, error) { - configCmd := NewConfigCommand(AddOrEdit, "test").SetDetails(inputDetails).SetUseBasicAuthOnly(basicAuthOnly).SetInteractive(interactive) + return configAndGetServer(t, testServerId, inputDetails, basicAuthOnly, interactive, false) +} + +func configAndGetServer(t *testing.T, serverId string, inputDetails *config.ServerDetails, basicAuthOnly, interactive, makeDefault bool) (*config.ServerDetails, error) { + doConfig(t, serverId, inputDetails, basicAuthOnly, interactive, makeDefault) + return GetConfig(serverId, false) +} + +func doConfig(t *testing.T, serverId string, inputDetails *config.ServerDetails, basicAuthOnly, interactive, makeDefault bool) { + configCmd := NewConfigCommand(AddOrEdit, serverId).SetDetails(inputDetails).SetUseBasicAuthOnly(basicAuthOnly). + SetInteractive(interactive).SetMakeDefault(makeDefault) configCmd.disablePrompts = true assert.NoError(t, configCmd.Run()) - return GetConfig("test", false) } func configStructToString(t *testing.T, artConfig *config.ServerDetails) string { diff --git a/common/tests/utils.go b/common/tests/utils.go index 227a4c5ee..207609e77 100644 --- a/common/tests/utils.go +++ b/common/tests/utils.go @@ -1,15 +1,17 @@ package tests import ( + "net/http" + "net/http/httptest" + "testing" + "github.com/jfrog/jfrog-cli-core/v2/artifactory/utils" "github.com/jfrog/jfrog-cli-core/v2/utils/config" testsutils "github.com/jfrog/jfrog-cli-core/v2/utils/config/tests" + "github.com/jfrog/jfrog-client-go/access" "github.com/jfrog/jfrog-client-go/artifactory" "github.com/jfrog/jfrog-client-go/distribution" "github.com/stretchr/testify/assert" - "net/http" - "net/http/httptest" - "testing" ) func ConfigTestServer(t *testing.T) (cleanUp func(), err error) { @@ -43,13 +45,22 @@ func CreateRestsMockServer(testHandler restsTestHandler) *httptest.Server { func CreateRtRestsMockServer(t *testing.T, testHandler restsTestHandler) (*httptest.Server, *config.ServerDetails, artifactory.ArtifactoryServicesManager) { testServer := CreateRestsMockServer(testHandler) - serverDetails := &config.ServerDetails{ArtifactoryUrl: testServer.URL + "/"} + serverDetails := &config.ServerDetails{Url: testServer.URL + "/", ArtifactoryUrl: testServer.URL + "/"} serviceManager, err := utils.CreateServiceManager(serverDetails, -1, 0, false) assert.NoError(t, err) return testServer, serverDetails, serviceManager } +func CreateAccessRestsMockServer(t *testing.T, testHandler restsTestHandler) (*httptest.Server, *config.ServerDetails, *access.AccessServicesManager) { + testServer := CreateRestsMockServer(testHandler) + serverDetails := &config.ServerDetails{Url: testServer.URL + "/", ServerId: "test-server"} + + serviceManager, err := utils.CreateAccessServiceManager(serverDetails, false) + assert.NoError(t, err) + return testServer, serverDetails, serviceManager +} + func CreateDsRestsMockServer(t *testing.T, testHandler restsTestHandler) (*httptest.Server, *config.ServerDetails, *distribution.DistributionServicesManager) { testServer := CreateRestsMockServer(testHandler) serverDetails := &config.ServerDetails{DistributionUrl: testServer.URL + "/"} diff --git a/general/cisetup/pipelinesyaml.go b/general/cisetup/pipelinesyaml.go index a142a1daa..1302b754f 100644 --- a/general/cisetup/pipelinesyaml.go +++ b/general/cisetup/pipelinesyaml.go @@ -5,7 +5,7 @@ import ( "github.com/jfrog/jfrog-cli-core/v2/utils/coreutils" "github.com/jfrog/jfrog-client-go/utils" "github.com/jfrog/jfrog-client-go/utils/errorutils" - "gopkg.in/yaml.v2" + "gopkg.in/yaml.v3" "strconv" ) diff --git a/general/envsetup/envsetup.go b/general/envsetup/envsetup.go index 2e4ccedbf..fa2fafe32 100644 --- a/general/envsetup/envsetup.go +++ b/general/envsetup/envsetup.go @@ -3,29 +3,25 @@ package envsetup import ( "encoding/base64" "encoding/json" - "errors" "fmt" + "github.com/google/uuid" "github.com/jfrog/jfrog-cli-core/v2/artifactory/commands/generic" "github.com/jfrog/jfrog-cli-core/v2/artifactory/utils" - "github.com/jfrog/jfrog-cli-core/v2/utils/ioutils" - "github.com/jfrog/jfrog-client-go/access/services" - "net/http" - "net/url" - "strings" - "time" - - "github.com/pkg/browser" - - "github.com/google/uuid" "github.com/jfrog/jfrog-cli-core/v2/common/commands" + "github.com/jfrog/jfrog-cli-core/v2/general" "github.com/jfrog/jfrog-cli-core/v2/utils/config" "github.com/jfrog/jfrog-cli-core/v2/utils/coreutils" + "github.com/jfrog/jfrog-cli-core/v2/utils/ioutils" + "github.com/jfrog/jfrog-client-go/access/services" "github.com/jfrog/jfrog-client-go/http/httpclient" - clientutils "github.com/jfrog/jfrog-client-go/utils" + clientUtils "github.com/jfrog/jfrog-client-go/utils" "github.com/jfrog/jfrog-client-go/utils/errorutils" - clientioutils "github.com/jfrog/jfrog-client-go/utils/io" + ioUtils "github.com/jfrog/jfrog-client-go/utils/io" "github.com/jfrog/jfrog-client-go/utils/io/httputils" "github.com/jfrog/jfrog-client-go/utils/log" + "github.com/pkg/browser" + "net/http" + "time" ) type OutputFormat string @@ -59,7 +55,7 @@ type EnvSetupCommand struct { encodedConnectionDetails string id uuid.UUID serverDetails *config.ServerDetails - progress clientioutils.ProgressMgr + progress ioUtils.ProgressMgr outputFormat OutputFormat } @@ -77,7 +73,7 @@ func (ftc *EnvSetupCommand) ServerDetails() (*config.ServerDetails, error) { return nil, nil } -func (ftc *EnvSetupCommand) SetProgress(progress clientioutils.ProgressMgr) { +func (ftc *EnvSetupCommand) SetProgress(progress ioUtils.ProgressMgr) { ftc.progress = progress } @@ -174,7 +170,7 @@ func (ftc *EnvSetupCommand) SetupAndConfigServer() (err error) { if err != nil { return } - err = configServer(server) + err = general.ConfigServerWithDeducedId(server, false, false) return } @@ -233,7 +229,7 @@ func (ftc *EnvSetupCommand) setupExistingUser() (server *config.ServerDetails, e func (ftc *EnvSetupCommand) scanAndValidateJFrogPasswordFromConsole(server *config.ServerDetails) (err error) { // User has limited number of retries to enter his correct password. // Password validation is operated by Artifactory ping API. - server.ArtifactoryUrl = clientutils.AddTrailingSlashIfNeeded(server.Url) + "artifactory/" + server.ArtifactoryUrl = clientUtils.AddTrailingSlashIfNeeded(server.Url) + "artifactory/" for i := 0; i < enterPasswordMaxRetries; i++ { server.Password, err = ioutils.ScanJFrogPasswordFromConsole() if err != nil { @@ -248,7 +244,7 @@ func (ftc *EnvSetupCommand) scanAndValidateJFrogPasswordFromConsole(server *conf } log.Output(err.Error()) } - err = errorutils.CheckError(errors.New("bad credentials: Wrong password. ")) + err = errorutils.CheckErrorf("bad credentials: Wrong password. ") return } @@ -293,7 +289,7 @@ func (ftc *EnvSetupCommand) CommandName() string { return "setup" } -// Returns the new server deatailes from My-JFrog +// Returns the new server details from My-JFrog func (ftc *EnvSetupCommand) getNewServerDetails() (serverDetails *config.ServerDetails, err error) { requestBody := &myJfrogGetStatusRequest{CliRegistrationId: ftc.id.String()} requestContent, err := json.Marshal(requestBody) @@ -387,21 +383,6 @@ func (ftc *EnvSetupCommand) getNewServerDetails() (serverDetails *config.ServerD return serverDetails, nil } -// Add the given server details to the cli's config by running a 'jf config' command -func configServer(server *config.ServerDetails) error { - u, err := url.Parse(server.Url) - if errorutils.CheckError(err) != nil { - return err - } - // Take the server name from host name: https://myjfrog.jfrog.com/ -> myjfrog - serverId := strings.Split(u.Host, ".")[0] - configCmd := commands.NewConfigCommand(commands.AddOrEdit, serverId).SetInteractive(false).SetDetails(server) - if err = configCmd.Run(); err != nil { - return err - } - return commands.NewConfigCommand(commands.Use, serverId).SetInteractive(false).SetDetails(server).Run() -} - type myJfrogGetStatusRequest struct { CliRegistrationId string `json:"cliRegistrationId,omitempty"` } diff --git a/general/login/login.go b/general/login/login.go new file mode 100644 index 000000000..2845bb814 --- /dev/null +++ b/general/login/login.go @@ -0,0 +1,101 @@ +package login + +import ( + "github.com/jfrog/jfrog-cli-core/v2/common/commands" + "github.com/jfrog/jfrog-cli-core/v2/general" + "github.com/jfrog/jfrog-cli-core/v2/utils/config" + "github.com/jfrog/jfrog-cli-core/v2/utils/ioutils" + "github.com/jfrog/jfrog-client-go/utils/errorutils" + "github.com/jfrog/jfrog-client-go/utils/io/fileutils" +) + +const ( + newSeverPlaceholder = "[New Server]" +) + +type LoginCommand struct { +} + +func NewLoginCommand() *LoginCommand { + return &LoginCommand{} +} + +func (lc *LoginCommand) Run() error { + configurations, err := config.GetAllServersConfigs() + if err != nil { + return err + } + if len(configurations) == 0 { + return newConfLogin() + } + return existingConfLogin(configurations) +} + +func newConfLogin() error { + platformUrl, err := promptPlatformUrl() + if err != nil { + return err + } + newServer := config.ServerDetails{Url: platformUrl} + return general.ConfigServerWithDeducedId(&newServer, true, true) +} + +func promptPlatformUrl() (string, error) { + var platformUrl string + ioutils.ScanFromConsole("JFrog Platform URL", &platformUrl, "") + if platformUrl == "" { + return "", errorutils.CheckErrorf("providing JFrog Platform URL is mandatory") + } + return platformUrl, nil +} + +func existingConfLogin(configurations []*config.ServerDetails) error { + selectedChoice, err := promptAddOrEdit(configurations) + if err != nil { + return err + } + if selectedChoice == newSeverPlaceholder { + return selectedNewServer() + } + return existingServerLogin(selectedChoice) +} + +// When configurations exist and the user chose to log in with a new server we direct him to a clean config process, +// where he will be prompted for server ID and URL. +func selectedNewServer() error { + return general.ConfigServerAsDefault(nil, "", true, true) +} + +// When a user chose to log in to an existing server, +// we run a config process while keeping all his current server details except credentials. +func existingServerLogin(serverId string) error { + serverDetails, err := commands.GetConfig(serverId, true) + if err != nil { + return err + } + if serverDetails.Url == "" { + serverDetails = &config.ServerDetails{ServerId: serverDetails.ServerId} + } else { + if fileutils.IsSshUrl(serverDetails.Url) { + return errorutils.CheckErrorf("web login cannot be performed via SSH. Please try again with different server configuration or configure a new one") + } + serverDetails.User = "" + serverDetails.Password = "" + serverDetails.AccessToken = "" + serverDetails.RefreshToken = "" + } + return general.ConfigServerAsDefault(serverDetails, serverId, true, true) +} + +// Prompt a list of all server IDs and an option for a new server, and let the user choose to which to log in. +func promptAddOrEdit(configurations []*config.ServerDetails) (selectedChoice string, err error) { + selectableItems := []ioutils.PromptItem{{Option: newSeverPlaceholder, TargetValue: &selectedChoice}} + for i := range configurations { + selectableItems = append(selectableItems, ioutils.PromptItem{Option: configurations[i].ServerId, TargetValue: &selectedChoice}) + } + err = ioutils.SelectString(selectableItems, "Select whether to create a new server configuration or to web login to an existing one:", false, func(item ioutils.PromptItem) { + *item.TargetValue = item.Option + selectedChoice = *item.TargetValue + }) + return +} diff --git a/general/project/projectinit.go b/general/project/projectinit.go index 484351a5a..5867362bb 100644 --- a/general/project/projectinit.go +++ b/general/project/projectinit.go @@ -13,7 +13,7 @@ import ( "github.com/jfrog/jfrog-cli-core/v2/utils/coreutils" "github.com/jfrog/jfrog-client-go/utils/errorutils" "github.com/jfrog/jfrog-client-go/utils/io/fileutils" - "gopkg.in/yaml.v2" + "gopkg.in/yaml.v3" ) const ( diff --git a/general/utils.go b/general/utils.go new file mode 100644 index 000000000..3aee57434 --- /dev/null +++ b/general/utils.go @@ -0,0 +1,43 @@ +package general + +import ( + "github.com/jfrog/jfrog-cli-core/v2/common/commands" + "github.com/jfrog/jfrog-cli-core/v2/utils/config" + "github.com/jfrog/jfrog-client-go/utils/errorutils" + "net" + "net/url" + "strings" +) + +const defaultServerId = "default-server" + +// Deduce the server ID from the URL and add server details to config. +func ConfigServerWithDeducedId(server *config.ServerDetails, interactive, webLogin bool) error { + serverId, err := deduceServerId(server.Url) + if err != nil { + return err + } + return ConfigServerAsDefault(server, serverId, interactive, webLogin) +} + +func deduceServerId(platformUrl string) (string, error) { + u, err := url.Parse(platformUrl) + if errorutils.CheckError(err) != nil { + return "", err + } + + // If the host is an IP address, use a default server ID. + serverId := defaultServerId + if net.ParseIP(u.Hostname()) == nil { + // Otherwise, take the server name from host name: https://myjfrog.jfrog.com/ -> myjfrog + serverId = strings.Split(u.Hostname(), ".")[0] + } + return serverId, nil +} + +// Add the given server details to the CLI's config by running a 'jf config' command, and make it the default server. +func ConfigServerAsDefault(server *config.ServerDetails, serverId string, interactive, webLogin bool) error { + return commands.NewConfigCommand(commands.AddOrEdit, serverId). + SetInteractive(interactive).SetUseWebLogin(webLogin). + SetDetails(server).SetMakeDefault(true).Run() +} diff --git a/general/utils_test.go b/general/utils_test.go new file mode 100644 index 000000000..9faa252c8 --- /dev/null +++ b/general/utils_test.go @@ -0,0 +1,27 @@ +package general + +import ( + "github.com/stretchr/testify/assert" + "testing" +) + +type deduceServerIdTest struct { + url string + expectedServerID string +} + +func TestDeduceServerId(t *testing.T) { + testCases := []deduceServerIdTest{ + {"http://localhost:8082/", "localhost"}, + {"https://platform.jfrog.io/", "platform"}, + {"http://127.0.0.1:8082/", defaultServerId}, + } + + for _, testCase := range testCases { + t.Run(testCase.url, func(t *testing.T) { + serverId, err := deduceServerId(testCase.url) + assert.NoError(t, err) + assert.Equal(t, testCase.expectedServerID, serverId) + }) + } +} diff --git a/go.mod b/go.mod index 8244563ec..d8791b5f8 100644 --- a/go.mod +++ b/go.mod @@ -6,37 +6,37 @@ require ( github.com/buger/jsonparser v1.1.1 github.com/chzyer/readline v1.5.1 github.com/forPelevin/gomoji v1.1.8 - github.com/gocarina/gocsv v0.0.0-20230406101422-6445c2b15027 + github.com/gocarina/gocsv v0.0.0-20230616125104-99d496ca653d github.com/google/uuid v1.3.0 - github.com/gookit/color v1.5.3 + github.com/gookit/color v1.5.4 github.com/jedib0t/go-pretty/v6 v6.4.6 - github.com/jfrog/build-info-go v1.9.6 + github.com/jfrog/build-info-go v1.9.8 github.com/jfrog/gofrog v1.3.0 - github.com/jfrog/jfrog-client-go v1.30.0 + github.com/jfrog/jfrog-client-go v1.31.5 github.com/magiconair/properties v1.8.7 github.com/manifoldco/promptui v0.9.0 - github.com/owenrumney/go-sarif/v2 v2.1.3 + github.com/owenrumney/go-sarif/v2 v2.2.0 github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8 - github.com/pkg/errors v0.9.1 - github.com/spf13/viper v1.15.0 + github.com/spf13/viper v1.16.0 github.com/stretchr/testify v1.8.4 - github.com/urfave/cli v1.22.12 + github.com/urfave/cli v1.22.14 github.com/vbauerster/mpb/v7 v7.5.3 - golang.org/x/exp v0.0.0-20230522175609-2e198f4a06a1 - golang.org/x/mod v0.10.0 - golang.org/x/sync v0.1.0 - golang.org/x/term v0.8.0 - golang.org/x/text v0.9.0 - gopkg.in/yaml.v2 v2.4.0 + golang.org/x/exp v0.0.0-20230817173708-d852ddb80c63 + golang.org/x/mod v0.12.0 + golang.org/x/sync v0.3.0 + golang.org/x/term v0.11.0 + golang.org/x/text v0.12.0 + gopkg.in/yaml.v3 v3.0.1 ) require github.com/c-bata/go-prompt v0.2.5 // Should not be updated to 0.2.6 due to a bug (https://github.com/jfrog/jfrog-cli-core/pull/372) require ( - github.com/BurntSushi/toml v1.2.1 // indirect + dario.cat/mergo v1.0.0 // indirect + github.com/BurntSushi/toml v1.3.2 // indirect github.com/CycloneDX/cyclonedx-go v0.7.1 // indirect - github.com/Microsoft/go-winio v0.5.2 // indirect - github.com/ProtonMail/go-crypto v0.0.0-20230528122434-6f98819771a1 // indirect + github.com/Microsoft/go-winio v0.6.1 // indirect + github.com/ProtonMail/go-crypto v0.0.0-20230717121422-5aa5874ade95 // indirect github.com/VividCortex/ewma v1.2.0 // indirect github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d // indirect github.com/acomagu/bufpipe v1.0.4 // indirect @@ -49,36 +49,35 @@ require ( github.com/fsnotify/fsnotify v1.6.0 // indirect github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 // indirect github.com/go-git/go-billy/v5 v5.4.1 // indirect - github.com/go-git/go-git/v5 v5.7.0 // indirect + github.com/go-git/go-git/v5 v5.8.1 // indirect github.com/golang-jwt/jwt/v4 v4.5.0 // indirect github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect - github.com/golang/snappy v0.0.2 // indirect + github.com/golang/snappy v0.0.4 // indirect github.com/hashicorp/hcl v1.0.0 // indirect - github.com/imdario/mergo v0.3.15 // indirect github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 // indirect github.com/kevinburke/ssh_config v1.2.0 // indirect - github.com/klauspost/compress v1.11.4 // indirect + github.com/klauspost/compress v1.15.9 // indirect github.com/klauspost/cpuid/v2 v2.2.3 // indirect github.com/klauspost/pgzip v1.2.5 // indirect github.com/mattn/go-colorable v0.1.12 // indirect - github.com/mattn/go-isatty v0.0.14 // indirect + github.com/mattn/go-isatty v0.0.16 // indirect github.com/mattn/go-runewidth v0.0.13 // indirect github.com/mattn/go-tty v0.0.3 // indirect github.com/mholt/archiver/v3 v3.5.1 // indirect - github.com/minio/sha256-simd v1.0.1-0.20230222114820-6096f891a77b // indirect + github.com/minio/sha256-simd v1.0.1 // indirect github.com/mitchellh/mapstructure v1.5.0 // indirect github.com/nwaples/rardecode v1.1.0 // indirect - github.com/pelletier/go-toml/v2 v2.0.6 // indirect - github.com/pierrec/lz4/v4 v4.1.2 // indirect + github.com/pelletier/go-toml/v2 v2.0.8 // indirect + github.com/pierrec/lz4/v4 v4.1.15 // indirect github.com/pjbgf/sha1cd v0.3.0 // indirect github.com/pkg/term v1.1.0 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect github.com/rivo/uniseg v0.4.3 // indirect github.com/russross/blackfriday/v2 v2.1.0 // indirect github.com/sergi/go-diff v1.1.0 // indirect - github.com/skeema/knownhosts v1.1.1 // indirect - github.com/spf13/afero v1.9.3 // indirect - github.com/spf13/cast v1.5.0 // indirect + github.com/skeema/knownhosts v1.2.0 // indirect + github.com/spf13/afero v1.9.5 // indirect + github.com/spf13/cast v1.5.1 // indirect github.com/spf13/jwalterweatherman v1.1.0 // indirect github.com/spf13/pflag v1.0.5 // indirect github.com/subosito/gotenv v1.4.2 // indirect @@ -86,16 +85,16 @@ require ( github.com/xanzy/ssh-agent v0.3.3 // indirect github.com/xi2/xz v0.0.0-20171230120015-48954b6210f8 // indirect github.com/xo/terminfo v0.0.0-20210125001918-ca9a967f8778 // indirect - golang.org/x/crypto v0.9.0 // indirect - golang.org/x/net v0.10.0 // indirect; indirectmake - golang.org/x/sys v0.8.0 // indirect + golang.org/x/crypto v0.12.0 // indirect + golang.org/x/net v0.14.0 // indirect; indirectmake + golang.org/x/sys v0.11.0 // indirect + golang.org/x/tools v0.12.1-0.20230815132531-74c255bcf846 // indirect gopkg.in/ini.v1 v1.67.0 // indirect gopkg.in/warnings.v0 v0.1.2 // indirect - gopkg.in/yaml.v3 v3.0.1 // indirect ) -replace github.com/jfrog/jfrog-client-go => github.com/jfrog/jfrog-client-go v1.28.1-0.20230611131847-a3b84a9004c3 +// replace github.com/jfrog/jfrog-client-go => github.com/jfrog/jfrog-client-go v1.28.1-0.20230803140217-0a5f43783ae8 -// replace github.com/jfrog/build-info-go => github.com/jfrog/build-info-go v1.8.9-0.20230518114837-fe6a826d5001 +// replace github.com/jfrog/build-info-go => github.com/jfrog/build-info-go v1.8.9-0.20230820165857-52ff32c4d8eb // replace github.com/jfrog/gofrog => github.com/jfrog/gofrog v1.2.6-0.20230418122323-2bf299dd6d27 diff --git a/go.sum b/go.sum index 0a9e310f3..63a72106b 100644 --- a/go.sum +++ b/go.sum @@ -35,17 +35,20 @@ cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohl cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs= cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= cloud.google.com/go/storage v1.14.0/go.mod h1:GrKmX003DSIwi9o29oFT7YDnHYwZoctc3fOKtUw0Xmo= +dario.cat/mergo v1.0.0 h1:AGCNq9Evsj31mOgNPcLyXc+4PNABt905YmuqPYYpBWk= +dario.cat/mergo v1.0.0/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk= dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= -github.com/BurntSushi/toml v1.2.1 h1:9F2/+DoOYIOksmaJFPw1tGFy1eDnIJXg+UHjuD8lTak= -github.com/BurntSushi/toml v1.2.1/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ= +github.com/BurntSushi/toml v1.3.2 h1:o7IhLm0Msx3BaB+n3Ag7L8EVlByGnpq14C4YWiu/gL8= +github.com/BurntSushi/toml v1.3.2/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ= github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= github.com/CycloneDX/cyclonedx-go v0.7.1 h1:5w1SxjGm9MTMNTuRbEPyw21ObdbaagTWF/KfF0qHTRE= github.com/CycloneDX/cyclonedx-go v0.7.1/go.mod h1:N/nrdWQI2SIjaACyyDs/u7+ddCkyl/zkNs8xFsHF2Ps= -github.com/Microsoft/go-winio v0.5.2 h1:a9IhgEQBCUEk6QCdml9CiJGhAws+YwffDHEMp1VMrpA= github.com/Microsoft/go-winio v0.5.2/go.mod h1:WpS1mjBmmwHBEWmogvA2mj8546UReBk4v8QkMxJ6pZY= -github.com/ProtonMail/go-crypto v0.0.0-20230528122434-6f98819771a1 h1:JMDGhoQvXNTqH6Y3MC0IUw6tcZvaUdujNqzK2HYWZc8= -github.com/ProtonMail/go-crypto v0.0.0-20230528122434-6f98819771a1/go.mod h1:EjAoLdwvbIOoOQr3ihjnSoLZRtE8azugULFRteWMNc0= +github.com/Microsoft/go-winio v0.6.1 h1:9/kr64B9VUZrLm5YYwbGtUJnMgqWVOdUAXu6Migciow= +github.com/Microsoft/go-winio v0.6.1/go.mod h1:LRdKpFKfdobln8UmuiYcKPot9D2v6svN5+sAH+4kjUM= +github.com/ProtonMail/go-crypto v0.0.0-20230717121422-5aa5874ade95 h1:KLq8BE0KwCL+mmXnjLWEAOYO+2l2AE4YMmqG1ZpZHBs= +github.com/ProtonMail/go-crypto v0.0.0-20230717121422-5aa5874ade95/go.mod h1:EjAoLdwvbIOoOQr3ihjnSoLZRtE8azugULFRteWMNc0= github.com/VividCortex/ewma v1.2.0 h1:f58SaIzcDXrSy3kWaHNvuJgJ3Nmz59Zji6XoJR/q1ow= github.com/VividCortex/ewma v1.2.0/go.mod h1:nz4BbCtbLyFDeC9SUHbtcT5644juEuWfUAUnGx7j5l4= github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d h1:licZJFw2RwpHMqeKTCYkitsPqHNxTmd4SNR5r94FGM8= @@ -100,7 +103,7 @@ github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.m github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= github.com/forPelevin/gomoji v1.1.8 h1:JElzDdt0TyiUlecy6PfITDL6eGvIaxqYH1V52zrd0qQ= github.com/forPelevin/gomoji v1.1.8/go.mod h1:8+Z3KNGkdslmeGZBC3tCrwMrcPy5GRzAD+gL9NAwMXg= -github.com/frankban/quicktest v1.14.3 h1:FJKSZTDHjyhriyC81FLQ0LY93eSai0ZyR/ZIkd3ZUKE= +github.com/frankban/quicktest v1.14.4 h1:g2rn0vABPOOXmZUj+vbmUp0lPoXEMuhTpIluN0XL9UY= github.com/fsnotify/fsnotify v1.6.0 h1:n+5WquG0fcWoWp6xPWfHdbskMCQaFnG6PfBrh1Ky4HY= github.com/fsnotify/fsnotify v1.6.0/go.mod h1:sl3t1tCWJFWoRz9R8WJCbQihKKwmorjAbSClcnxKAGw= github.com/gliderlabs/ssh v0.3.5 h1:OcaySEmAQJgyYcArR+gGGTHCyE7nvhEMTlYY+Dp8CpY= @@ -109,13 +112,13 @@ github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376/go.mod h1:an3vInlBmS github.com/go-git/go-billy/v5 v5.4.1 h1:Uwp5tDRkPr+l/TnbHOQzp+tmJfLceOlbVucgpTz8ix4= github.com/go-git/go-billy/v5 v5.4.1/go.mod h1:vjbugF6Fz7JIflbVpl1hJsGjSHNltrSw45YK/ukIvQg= github.com/go-git/go-git-fixtures/v4 v4.3.2-0.20230305113008-0c11038e723f h1:Pz0DHeFij3XFhoBRGUDPzSJ+w2UcK5/0JvF8DRI58r8= -github.com/go-git/go-git/v5 v5.7.0 h1:t9AudWVLmqzlo+4bqdf7GY+46SUuRsx59SboFxkq2aE= -github.com/go-git/go-git/v5 v5.7.0/go.mod h1:coJHKEOk5kUClpsNlXrUvPrDxY3w3gjHvhcZd8Fodw8= +github.com/go-git/go-git/v5 v5.8.1 h1:Zo79E4p7TRk0xoRgMq0RShiTHGKcKI4+DI6BfJc/Q+A= +github.com/go-git/go-git/v5 v5.8.1/go.mod h1:FHFuoD6yGz5OSKEBK+aWN9Oah0q54Jxl0abmj6GnqAo= github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= -github.com/gocarina/gocsv v0.0.0-20230406101422-6445c2b15027 h1:LCGzZb4kMUUjMUzLxxqSJBwo9szUO0tK8cOxnEOT4Jc= -github.com/gocarina/gocsv v0.0.0-20230406101422-6445c2b15027/go.mod h1:5YoVOkjYAQumqlV356Hj3xeYh4BdZuLE0/nRkf2NKkI= +github.com/gocarina/gocsv v0.0.0-20230616125104-99d496ca653d h1:KbPOUXFUDJxwZ04vbmDOc3yuruGvVO+LOa7cVER3yWw= +github.com/gocarina/gocsv v0.0.0-20230616125104-99d496ca653d/go.mod h1:5YoVOkjYAQumqlV356Hj3xeYh4BdZuLE0/nRkf2NKkI= github.com/golang-jwt/jwt/v4 v4.5.0 h1:7cYmW1XlMY7h7ii7UhUyChSgS5wUJEnm9uZVTGqOWzg= github.com/golang-jwt/jwt/v4 v4.5.0/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= @@ -145,8 +148,9 @@ github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvq github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8= github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= -github.com/golang/snappy v0.0.2 h1:aeE13tS0IiQgFjYdoL8qN3K1N2bXXtI6Vi51/y7BpMw= github.com/golang/snappy v0.0.2/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM= +github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= @@ -180,34 +184,33 @@ github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+ github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= github.com/googleapis/google-cloud-go-testing v0.0.0-20200911160855-bcd43fbb19e8/go.mod h1:dvDLG8qkwmyD9a/MJJN3XJcT3xFxOKAvTZGvuZmac9g= -github.com/gookit/color v1.5.3 h1:twfIhZs4QLCtimkP7MOxlF3A0U/5cDPseRT9M/+2SCE= -github.com/gookit/color v1.5.3/go.mod h1:NUzwzeehUfl7GIb36pqId+UGmRfQcU/WiiyTTeNjHtE= +github.com/gookit/color v1.5.4 h1:FZmqs7XOyGgCAxmWyPslpiok1k05wmY3SJTytgvYFs0= +github.com/gookit/color v1.5.4/go.mod h1:pZJOeOS8DM43rXbp4AZo1n9zCU2qjpcRko0b6/QJi9w= github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= -github.com/imdario/mergo v0.3.15 h1:M8XP7IuFNsqUx6VPK2P9OSmsYsI/YFaGil0uD21V3dM= -github.com/imdario/mergo v0.3.15/go.mod h1:WBLT9ZmE3lPoWsEzCh9LPo3TiwVN+ZKEjmz+hD27ysY= github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 h1:BQSFePA1RWJOlocH6Fxy8MmwDt+yVQYULKfN0RoTN8A= github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99/go.mod h1:1lJo3i6rXxKeerYnT8Nvf0QmHCRC1n8sfWVwXF2Frvo= github.com/jedib0t/go-pretty/v6 v6.4.6 h1:v6aG9h6Uby3IusSSEjHaZNXpHFhzqMmjXcPq1Rjl9Jw= github.com/jedib0t/go-pretty/v6 v6.4.6/go.mod h1:Ndk3ase2CkQbXLLNf5QDHoYb6J9WtVfmHZu9n8rk2xs= -github.com/jfrog/build-info-go v1.9.6 h1:lCJ2j5uXAlJsSwDe5J8WD7Co1f/hUlZvMfwfb5AzLJU= -github.com/jfrog/build-info-go v1.9.6/go.mod h1:GbuFS+viHCKZYx9nWHYu7ab1DgQkFdtVN3BJPUNb2D4= +github.com/jfrog/build-info-go v1.9.8 h1:D8/ga+YgQpqp/CJj2zteS4/twmSy8zvm1v9lCd2Kv1M= +github.com/jfrog/build-info-go v1.9.8/go.mod h1:t31QRpH5xUJKw8XkQlAA+Aq7aanyS1rrzpcK8xSNVts= github.com/jfrog/gofrog v1.3.0 h1:o4zgsBZE4QyDbz2M7D4K6fXPTBJht+8lE87mS9bw7Gk= github.com/jfrog/gofrog v1.3.0/go.mod h1:IFMc+V/yf7rA5WZ74CSbXe+Lgf0iApEQLxRZVzKRUR0= -github.com/jfrog/jfrog-client-go v1.28.1-0.20230611131847-a3b84a9004c3 h1:bIpljSo/bnilaRky2mtXcljC0JmONgc97AEy1YG6rXE= -github.com/jfrog/jfrog-client-go v1.28.1-0.20230611131847-a3b84a9004c3/go.mod h1:qEJxoe68sUtqHJ1YhXv/7pKYP/9p1D5tJrruzJKYeoI= +github.com/jfrog/jfrog-client-go v1.31.5 h1:dYVgIJzMwX+EU9GEELKPSHFLyfW6UrrjZWMEZtAyx6A= +github.com/jfrog/jfrog-client-go v1.31.5/go.mod h1:icb00ZJN/mMMNkQduHDkzpqsXH9Flwi3f3COYexq3Nc= github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= github.com/kevinburke/ssh_config v1.2.0 h1:x584FjTGwHzMwvHx18PXxbBVzfnxogHaAReU4gf13a4= github.com/kevinburke/ssh_config v1.2.0/go.mod h1:CT57kijsi8u/K/BOFA39wgDQJ9CxiF4nAY/ojJ6r6mM= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= github.com/klauspost/compress v1.4.1/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A= -github.com/klauspost/compress v1.11.4 h1:kz40R/YWls3iqT9zX9AHN3WoVsrAWVyui5sxuLqiXqU= github.com/klauspost/compress v1.11.4/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= +github.com/klauspost/compress v1.15.9 h1:wKRjX6JRtDdrE9qwa4b/Cip7ACOshUI4smpCQanqjSY= +github.com/klauspost/compress v1.15.9/go.mod h1:PhcZ0MbTNciWF3rruxRgKxI5NkcHHrHUDtV4Yw2GlzU= github.com/klauspost/cpuid v1.2.0/go.mod h1:Pj4uuM528wm8OyEC2QMXAi2YiTZ96dNQPGgoMS4s3ek= github.com/klauspost/cpuid/v2 v2.2.3 h1:sxCkb+qR91z4vsqw4vGGZlDgPz3G7gjaLyK3V8y70BU= github.com/klauspost/cpuid/v2 v2.2.3/go.mod h1:RVVoqg1df56z8g3pUjL/3lE5UfnlrJX8tyFgg4nqhuY= @@ -216,7 +219,7 @@ github.com/klauspost/pgzip v1.2.5/go.mod h1:Ch1tH69qFZu15pkjo5kYi6mth2Zzwzt50oCQ github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= -github.com/kr/pretty v0.3.0 h1:WgNl7dwNpEZ6jJ9k1snq4pZsg7DOEN8hP9Xw0Tsjwk0= +github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= @@ -234,8 +237,9 @@ github.com/mattn/go-colorable v0.1.12/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= github.com/mattn/go-isatty v0.0.10/go.mod h1:qgIWMr58cqv1PHHyhnkY9lrL7etaEgOFcMEpPG5Rm84= github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= -github.com/mattn/go-isatty v0.0.14 h1:yVuAays6BHfxijgZPzw+3Zlu5yQgKGP2/hcQbHb7S9Y= github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= +github.com/mattn/go-isatty v0.0.16 h1:bq3VjFmv/sOjHtdEhmkEV4x1AJtvUvOJ2PFAZ5+peKQ= +github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= github.com/mattn/go-runewidth v0.0.6/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI= github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI= github.com/mattn/go-runewidth v0.0.13 h1:lTGmDsbAYt5DmK6OnoV7EuIF1wEIFAcxld6ypU4OSgU= @@ -244,20 +248,21 @@ github.com/mattn/go-tty v0.0.3 h1:5OfyWorkyO7xP52Mq7tB36ajHDG5OHrmBGIS/DtakQI= github.com/mattn/go-tty v0.0.3/go.mod h1:ihxohKRERHTVzN+aSVRwACLCeqIoZAWpoICkkvrWyR0= github.com/mholt/archiver/v3 v3.5.1 h1:rDjOBX9JSF5BvoJGvjqK479aL70qh9DIpZCl+k7Clwo= github.com/mholt/archiver/v3 v3.5.1/go.mod h1:e3dqJ7H78uzsRSEACH1joayhuSyhnonssnDhppzS1L4= -github.com/minio/sha256-simd v1.0.1-0.20230222114820-6096f891a77b h1:kr87H4ULRbe6LQNF5f3A+nGY8TQLgckmdG9BLJ/QB18= -github.com/minio/sha256-simd v1.0.1-0.20230222114820-6096f891a77b/go.mod h1:Pz6AKMiUdngCLpeTL/RJY1M9rUuPMYujV5xJjtbRSN8= +github.com/minio/sha256-simd v1.0.1 h1:6kaan5IFmwTNynnKKpDHe6FWHohJOHhCPchzK49dzMM= +github.com/minio/sha256-simd v1.0.1/go.mod h1:Pz6AKMiUdngCLpeTL/RJY1M9rUuPMYujV5xJjtbRSN8= github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= github.com/nwaples/rardecode v1.1.0 h1:vSxaY8vQhOcVr4mm5e8XllHWTiM4JF507A0Katqw7MQ= github.com/nwaples/rardecode v1.1.0/go.mod h1:5DzqNKiOdpKKBH87u8VlvAnPZMXcGRhxWkRpHbbfGS0= github.com/owenrumney/go-sarif v1.1.1/go.mod h1:dNDiPlF04ESR/6fHlPyq7gHKmrM0sHUvAGjsoh8ZH0U= -github.com/owenrumney/go-sarif/v2 v2.1.3 h1:1guchw824yg1CwjredY8pnzcE0SG+sfNzFY5CUYWgE4= -github.com/owenrumney/go-sarif/v2 v2.1.3/go.mod h1:MSqMMx9WqlBSY7pXoOZWgEsVB4FDNfhcaXDA1j6Sr+w= -github.com/pelletier/go-toml/v2 v2.0.6 h1:nrzqCb7j9cDFj2coyLNLaZuJTLjWjlaz6nvTvIwycIU= -github.com/pelletier/go-toml/v2 v2.0.6/go.mod h1:eumQOmlWiOPt5WriQQqoM5y18pDHwha2N+QD+EUNTek= -github.com/pierrec/lz4/v4 v4.1.2 h1:qvY3YFXRQE/XB8MlLzJH7mSzBs74eA2gg52YTk6jUPM= +github.com/owenrumney/go-sarif/v2 v2.2.0 h1:1DmZaijK0HBZCR1fgcDSGa7VzYkU9NDmbZ7qC2QfUjE= +github.com/owenrumney/go-sarif/v2 v2.2.0/go.mod h1:MSqMMx9WqlBSY7pXoOZWgEsVB4FDNfhcaXDA1j6Sr+w= +github.com/pelletier/go-toml/v2 v2.0.8 h1:0ctb6s9mE31h0/lhu+J6OPmVeDxJn+kYnJc2jZR9tGQ= +github.com/pelletier/go-toml/v2 v2.0.8/go.mod h1:vuYfssBdrU2XDZ9bYydBu6t+6a6PYNcZljzZR9VXg+4= github.com/pierrec/lz4/v4 v4.1.2/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= +github.com/pierrec/lz4/v4 v4.1.15 h1:MO0/ucJhngq7299dKLwIMtgTfbkoSPF6AoMYDd8Q4q0= +github.com/pierrec/lz4/v4 v4.1.15/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= github.com/pjbgf/sha1cd v0.3.0 h1:4D5XXmUUBUl/xQ6IjCkEAbqXskkq/4O7LmGn0AqMDs4= github.com/pjbgf/sha1cd v0.3.0/go.mod h1:nZ1rrWOcGJ5uZgEEVL1VUM9iRQiZvWdbZjkKyFzPPsI= github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8 h1:KoWmjvw+nsYOo29YJK9vDA65RGE3NrOnUtO7a+RF9HU= @@ -275,24 +280,24 @@ github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJ github.com/rivo/uniseg v0.4.3 h1:utMvzDsuh3suAEnhH0RdHmoPbU648o6CvXxTx4SBMOw= github.com/rivo/uniseg v0.4.3/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88= github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= -github.com/rogpeppe/go-internal v1.6.1 h1:/FiVV8dS/e+YqF2JvO3yXRFbBLTIuSDkuC7aBOAvL+k= +github.com/rogpeppe/go-internal v1.9.0 h1:73kH8U+JUqXU8lRuOHeVHaa/SZPifC7BkcraZVejAe8= github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/sergi/go-diff v1.1.0 h1:we8PVUC3FE2uYfodKH/nBHMSetSfHDR6scGdBi+erh0= github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM= github.com/sirupsen/logrus v1.7.0/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= -github.com/skeema/knownhosts v1.1.1 h1:MTk78x9FPgDFVFkDLTrsnnfCJl7g1C/nnKvePgrIngE= -github.com/skeema/knownhosts v1.1.1/go.mod h1:g4fPeYpque7P0xefxtGzV81ihjC8sX2IqpAoNkjxbMo= -github.com/spf13/afero v1.9.3 h1:41FoI0fD7OR7mGcKE/aOiLkGreyf8ifIOQmJANWogMk= -github.com/spf13/afero v1.9.3/go.mod h1:iUV7ddyEEZPO5gA3zD4fJt6iStLlL+Lg4m2cihcDf8Y= -github.com/spf13/cast v1.5.0 h1:rj3WzYc11XZaIZMPKmwP96zkFEnnAmV8s6XbB2aY32w= -github.com/spf13/cast v1.5.0/go.mod h1:SpXXQ5YoyJw6s3/6cMTQuxvgRl3PCJiyaX9p6b155UU= +github.com/skeema/knownhosts v1.2.0 h1:h9r9cf0+u7wSE+M183ZtMGgOJKiL96brpaz5ekfJCpM= +github.com/skeema/knownhosts v1.2.0/go.mod h1:g4fPeYpque7P0xefxtGzV81ihjC8sX2IqpAoNkjxbMo= +github.com/spf13/afero v1.9.5 h1:stMpOSZFs//0Lv29HduCmli3GUfpFoF3Y1Q/aXj/wVM= +github.com/spf13/afero v1.9.5/go.mod h1:UBogFpq8E9Hx+xc5CNTTEpTnuHVmXDwZcZcE1eb/UhQ= +github.com/spf13/cast v1.5.1 h1:R+kOtfhWQE6TVQzY+4D7wJLBgkdVasCEFxSUBYBYIlA= +github.com/spf13/cast v1.5.1/go.mod h1:b9PdjNptOpzXr7Rq1q9gJML/2cdGQAo69NKzQ10KN48= github.com/spf13/jwalterweatherman v1.1.0 h1:ue6voC5bR5F8YxI5S67j9i582FU4Qvo2bmqnqMYADFk= github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0EXowPYD95IqWIGo= github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= -github.com/spf13/viper v1.15.0 h1:js3yy885G8xwJa6iOISGFwd+qlUo5AvyXb7CiihdtiU= -github.com/spf13/viper v1.15.0/go.mod h1:fFcTBJxvhhzSJiZy8n+PeW6t8l+KeT/uTARa0jHOQLA= +github.com/spf13/viper v1.16.0 h1:rGGH0XDZhdUOryiDWjmIvUSWpbNqisK8Wk0Vyefw8hc= +github.com/spf13/viper v1.16.0/go.mod h1:yg78JgCJcbrQOvV9YLXgkLaZqUidkY9K+Dd1FofRzQg= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= @@ -303,7 +308,7 @@ github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/ github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.4/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= -github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= +github.com/stretchr/testify v1.8.3/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk= github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= github.com/subosito/gotenv v1.4.2 h1:X1TuBLAMDFbaTAChgCBLu3DU3UPyELpnF2jjJ2cz/S8= @@ -311,8 +316,8 @@ github.com/subosito/gotenv v1.4.2/go.mod h1:ayKnFf/c6rvx/2iiLrJUk1e6plDbT3edrFNG github.com/ulikunitz/xz v0.5.8/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14= github.com/ulikunitz/xz v0.5.9 h1:RsKRIA2MO8x56wkkcd3LbtcE/uMszhb6DpRf+3uwa3I= github.com/ulikunitz/xz v0.5.9/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14= -github.com/urfave/cli v1.22.12 h1:igJgVw1JdKH+trcLWLeLwZjU9fEfPesQ+9/e4MQ44S8= -github.com/urfave/cli v1.22.12/go.mod h1:sSBEIC79qR6OvcmsD4U3KABeOTxDqQtdDnaFuUN30b8= +github.com/urfave/cli v1.22.14 h1:ebbhrRiGK2i4naQJr+1Xj92HXZCrK7MsyTS/ob3HnAk= +github.com/urfave/cli v1.22.14/go.mod h1:X0eDS6pD6Exaclxm99NJ3FiCDRED7vIHpx2mDOHLvkA= github.com/vbauerster/mpb/v7 v7.5.3 h1:BkGfmb6nMrrBQDFECR/Q7RkKCw7ylMetCb4079CGs4w= github.com/vbauerster/mpb/v7 v7.5.3/go.mod h1:i+h4QY6lmLvBNK2ah1fSreiw3ajskRlBp9AhY/PnuOE= github.com/vmihailenco/msgpack/v4 v4.3.12/go.mod h1:gborTTJjAo/GWTqqRjrLCn9pgNN+NXzzngzBKDPIqw4= @@ -342,12 +347,12 @@ golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8U golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20210421170649-83a5a9bb288b/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.0.0-20211108221036-ceb1ce70b4fa/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= +golang.org/x/crypto v0.0.0-20220722155217-630584e8d5aa/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/crypto v0.3.1-0.20221117191849-2c476679df9a/go.mod h1:hebNnKkNXi2UzZN1eVRvBB7co0a+JxK6XbPiWVs/3J4= golang.org/x/crypto v0.7.0/go.mod h1:pYwdfH91IfpZVANVyUOhSIPZaFoJGxTFbZhFTx+dXZU= -golang.org/x/crypto v0.9.0 h1:LF6fAI+IutBocDJ2OT0Q1g8plpYljMZ4+lty+dsqw3g= -golang.org/x/crypto v0.9.0/go.mod h1:yrmDGqONDYtNj3tH8X9dzUun2m2lzPa9ngI6/RUPGR0= +golang.org/x/crypto v0.12.0 h1:tFM/ta59kqch6LlvYnPa0yx5a83cL2nHflFhYKvv9Yk= +golang.org/x/crypto v0.12.0/go.mod h1:NF0Gs7EO5K4qLn+Ylc+fih8BSTeIjAP05siRnAh98yw= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= @@ -358,8 +363,8 @@ golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u0 golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= -golang.org/x/exp v0.0.0-20230522175609-2e198f4a06a1 h1:k/i9J1pBpvlfR+9QsetwPyERsqu1GIbi967PQMq3Ivc= -golang.org/x/exp v0.0.0-20230522175609-2e198f4a06a1/go.mod h1:V1LtkGg67GoY2N1AnLN78QLrzxkLyJw7RJb1gzOOz9w= +golang.org/x/exp v0.0.0-20230817173708-d852ddb80c63 h1:m64FZMko/V45gv0bNmrNYoDEq8U5YUhetc9cBWKS1TQ= +golang.org/x/exp v0.0.0-20230817173708-d852ddb80c63/go.mod h1:0v4NqG35kSWCMzLaMeX+IQrlSnVE/bqGSyC2cz/9Le8= golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= @@ -385,8 +390,8 @@ golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= -golang.org/x/mod v0.10.0 h1:lFO9qtOdlre5W1jxS3r/4szv2/6iXxScdzjoBMXNhYk= -golang.org/x/mod v0.10.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/mod v0.12.0 h1:rmsUpXtvNzj340zd98LZ4KntptpfRHwpFOHG188oHXc= +golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -423,8 +428,8 @@ golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug golang.org/x/net v0.2.0/go.mod h1:KqCZLdyyvdV855qA2rE3GC2aiw5xGR5TEjj8smXukLY= golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= golang.org/x/net v0.8.0/go.mod h1:QVkue5JL9kW//ek3r6jTKnTFis1tRmNAW2P1shuFdJc= -golang.org/x/net v0.10.0 h1:X2//UzNDwYmtCLn7To6G58Wr6f5ahEAQgKNzv9Y951M= -golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= +golang.org/x/net v0.14.0 h1:BONx9s002vGdD9umnlX1Po8vOZmrgH34qlHcD1MfK14= +golang.org/x/net v0.14.0/go.mod h1:PpSgVXXLK0OxS0F31C1/tv6XNguvCrnXIDrFMspZIUI= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= @@ -445,8 +450,9 @@ golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.1.0 h1:wsuoTGHzEhffawBOhz5CYhcrV4IdKZbEyZjBMuTp12o= golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.3.0 h1:ftCYgMx6zT/asHUrPw8BLLscYtGznsLAnjq5RH9P66E= +golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20181122145206-62eef0e2fa9b/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= @@ -499,6 +505,7 @@ golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20220704084225-05e143d24a9e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220908164124-27713097b956/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220909162455-aba9fc2a8ff2/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= @@ -506,15 +513,15 @@ golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.3.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.8.0 h1:EBmGv8NaZBZTWvrbjNoL6HVt+IVy3QDQpJs7VRIw3tU= -golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.11.0 h1:eG7RXZHdqOJ1i+0lgLgCpSXAp6M3LYlAo6osgSi0xOM= +golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.2.0/go.mod h1:TVmDHMZPmdnySmBfhjOoOdhjzdE1h4u1VwSiw2l1Nuc= golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= golang.org/x/term v0.6.0/go.mod h1:m6U89DPEgQRMq3DNkDClhWw02AUbt2daBVO4cn4Hv9U= -golang.org/x/term v0.8.0 h1:n5xxQn2i3PC0yLAbjTpNT85q/Kgzcr2gIoX9OrJUols= -golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo= +golang.org/x/term v0.11.0 h1:F9tnn/DA/Im8nCwm+fX+1/eBwi4qFjRT++MhtVC4ZX0= +golang.org/x/term v0.11.0/go.mod h1:zC9APTIj3jG3FdV/Ons+XE1riIZXG4aZ4GTHiPZJPIU= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -527,8 +534,8 @@ golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.8.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= -golang.org/x/text v0.9.0 h1:2sjJmO8cDvYveuX97RDLsxlyUxLl+GHoLxBiRdHllBE= -golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= +golang.org/x/text v0.12.0 h1:k+n5B8goJNdU7hSvEtMUz3d1Q6D/XW4COJSJR6fN0mc= +golang.org/x/text v0.12.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= @@ -581,6 +588,8 @@ golang.org/x/tools v0.0.0-20210108195828-e2f9c7f1fc8e/go.mod h1:emZCQorbCU4vsT4f golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= +golang.org/x/tools v0.12.1-0.20230815132531-74c255bcf846 h1:Vve/L0v7CXXuxUmaMGIEK/dEeq7uiqb5qBgQrZzIE7E= +golang.org/x/tools v0.12.1-0.20230815132531-74c255bcf846/go.mod h1:Sc0INKfu04TlqNoRA1hgpFZbhYXHPr4V5DzpSBTPqQM= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= @@ -685,7 +694,6 @@ gopkg.in/warnings.v0 v0.1.2 h1:wFXVbFY8DY5/xOe1ECiWdKCzZlxgshcYVNkBHstARME= gopkg.in/warnings.v0 v0.1.2/go.mod h1:jksf8JmL6Qr/oQM2OXTHunEvvTAsrWBLb6OOjuVWRNI= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= diff --git a/lifecycle/common.go b/lifecycle/common.go new file mode 100644 index 000000000..93045de66 --- /dev/null +++ b/lifecycle/common.go @@ -0,0 +1,36 @@ +package lifecycle + +import ( + "github.com/jfrog/jfrog-cli-core/v2/artifactory/utils" + "github.com/jfrog/jfrog-cli-core/v2/utils/config" + "github.com/jfrog/jfrog-client-go/lifecycle" + "github.com/jfrog/jfrog-client-go/lifecycle/services" +) + +type releaseBundleCmd struct { + serverDetails *config.ServerDetails + releaseBundleName string + releaseBundleVersion string + signingKeyName string + sync bool + rbProjectKey string +} + +func (rbc *releaseBundleCmd) getPrerequisites() (servicesManager *lifecycle.LifecycleServicesManager, rbDetails services.ReleaseBundleDetails, params services.CreateOrPromoteReleaseBundleParams, err error) { + servicesManager, err = utils.CreateLifecycleServiceManager(rbc.serverDetails, false) + if err != nil { + return + } + rbDetails = services.ReleaseBundleDetails{ + ReleaseBundleName: rbc.releaseBundleName, + ReleaseBundleVersion: rbc.releaseBundleVersion, + } + params = services.CreateOrPromoteReleaseBundleParams{ + ReleaseBundleQueryParams: services.ReleaseBundleQueryParams{ + ProjectKey: rbc.rbProjectKey, + Async: !rbc.sync, + }, + SigningKeyName: rbc.signingKeyName, + } + return +} diff --git a/lifecycle/createcommon.go b/lifecycle/createcommon.go new file mode 100644 index 000000000..aab498eec --- /dev/null +++ b/lifecycle/createcommon.go @@ -0,0 +1,75 @@ +package lifecycle + +import ( + "github.com/jfrog/jfrog-cli-core/v2/utils/config" +) + +type ReleaseBundleCreate struct { + releaseBundleCmd + buildsSpecPath string + releaseBundlesSpecPath string +} + +func NewReleaseBundleCreate() *ReleaseBundleCreate { + return &ReleaseBundleCreate{} +} + +func (rbc *ReleaseBundleCreate) SetServerDetails(serverDetails *config.ServerDetails) *ReleaseBundleCreate { + rbc.serverDetails = serverDetails + return rbc +} + +func (rbc *ReleaseBundleCreate) SetReleaseBundleName(releaseBundleName string) *ReleaseBundleCreate { + rbc.releaseBundleName = releaseBundleName + return rbc +} + +func (rbc *ReleaseBundleCreate) SetReleaseBundleVersion(releaseBundleVersion string) *ReleaseBundleCreate { + rbc.releaseBundleVersion = releaseBundleVersion + return rbc +} + +func (rbc *ReleaseBundleCreate) SetSigningKeyName(signingKeyName string) *ReleaseBundleCreate { + rbc.signingKeyName = signingKeyName + return rbc +} + +func (rbc *ReleaseBundleCreate) SetSync(sync bool) *ReleaseBundleCreate { + rbc.sync = sync + return rbc +} + +func (rbc *ReleaseBundleCreate) SetReleaseBundleProject(rbProjectKey string) *ReleaseBundleCreate { + rbc.rbProjectKey = rbProjectKey + return rbc +} + +func (rbc *ReleaseBundleCreate) SetBuildsSpecPath(buildsSpecPath string) *ReleaseBundleCreate { + rbc.buildsSpecPath = buildsSpecPath + return rbc +} + +func (rbc *ReleaseBundleCreate) SetReleaseBundlesSpecPath(releaseBundlesSpecPath string) *ReleaseBundleCreate { + rbc.releaseBundlesSpecPath = releaseBundlesSpecPath + return rbc +} + +func (rbc *ReleaseBundleCreate) CommandName() string { + return "rb_create" +} + +func (rbc *ReleaseBundleCreate) ServerDetails() (*config.ServerDetails, error) { + return rbc.serverDetails, nil +} + +func (rbc *ReleaseBundleCreate) Run() error { + servicesManager, rbDetails, params, err := rbc.getPrerequisites() + if err != nil { + return err + } + + if rbc.buildsSpecPath != "" { + return rbc.createFromBuilds(servicesManager, rbDetails, params) + } + return rbc.createFromReleaseBundles(servicesManager, rbDetails, params) +} diff --git a/lifecycle/createfrombuilds.go b/lifecycle/createfrombuilds.go new file mode 100644 index 000000000..f54ea89f5 --- /dev/null +++ b/lifecycle/createfrombuilds.go @@ -0,0 +1,88 @@ +package lifecycle + +import ( + "encoding/json" + rtUtils "github.com/jfrog/jfrog-cli-core/v2/artifactory/utils" + rtServices "github.com/jfrog/jfrog-client-go/artifactory/services" + "github.com/jfrog/jfrog-client-go/artifactory/services/utils" + "github.com/jfrog/jfrog-client-go/lifecycle" + "github.com/jfrog/jfrog-client-go/lifecycle/services" + "github.com/jfrog/jfrog-client-go/utils/errorutils" + "github.com/jfrog/jfrog-client-go/utils/io/fileutils" +) + +func (rbc *ReleaseBundleCreate) createFromBuilds(servicesManager *lifecycle.LifecycleServicesManager, + rbDetails services.ReleaseBundleDetails, params services.CreateOrPromoteReleaseBundleParams) error { + + builds := CreateFromBuildsSpec{} + content, err := fileutils.ReadFile(rbc.buildsSpecPath) + if err != nil { + return err + } + if err = json.Unmarshal(content, &builds); err != nil { + return errorutils.CheckError(err) + } + + if len(builds.Builds) == 0 { + return errorutils.CheckErrorf("at least one build is expected in order to create a release bundle from builds") + } + + buildsSource, err := rbc.convertToBuildsSource(builds) + if err != nil { + return err + } + return servicesManager.CreateReleaseBundleFromBuilds(rbDetails, params, buildsSource) +} + +func (rbc *ReleaseBundleCreate) convertToBuildsSource(builds CreateFromBuildsSpec) (services.CreateFromBuildsSource, error) { + buildsSource := services.CreateFromBuildsSource{} + for _, build := range builds.Builds { + buildSource := services.BuildSource{BuildName: build.Name} + buildNumber, err := rbc.getLatestBuildNumberIfEmpty(build.Name, build.Number, build.Project) + if err != nil { + return services.CreateFromBuildsSource{}, err + } + buildSource.BuildNumber = buildNumber + buildSource.BuildRepository = utils.GetBuildInfoRepositoryByProject(build.Project) + buildsSource.Builds = append(buildsSource.Builds, buildSource) + } + return buildsSource, nil +} + +func (rbc *ReleaseBundleCreate) getLatestBuildNumberIfEmpty(buildName, buildNumber, project string) (string, error) { + if buildNumber != "" { + return buildNumber, nil + } + + aqlService, err := rbc.getAqlService() + if err != nil { + return "", err + } + + buildNumber, err = utils.GetLatestBuildNumberFromArtifactory(buildName, project, aqlService) + if err != nil { + return "", err + } + if buildNumber == "" { + return "", errorutils.CheckErrorf("could not find a build info with name '%s' in artifactory", buildName) + } + return buildNumber, nil +} + +func (rbc *ReleaseBundleCreate) getAqlService() (*rtServices.AqlService, error) { + rtServiceManager, err := rtUtils.CreateServiceManager(rbc.serverDetails, 3, 0, false) + if err != nil { + return nil, err + } + return rtServices.NewAqlService(rtServiceManager.GetConfig().GetServiceDetails(), rtServiceManager.Client()), nil +} + +type CreateFromBuildsSpec struct { + Builds []SourceBuildSpec `json:"builds,omitempty"` +} + +type SourceBuildSpec struct { + Name string `json:"name,omitempty"` + Number string `json:"number,omitempty"` + Project string `json:"project,omitempty"` +} diff --git a/lifecycle/createfrombundles.go b/lifecycle/createfrombundles.go new file mode 100644 index 000000000..729babaf3 --- /dev/null +++ b/lifecycle/createfrombundles.go @@ -0,0 +1,52 @@ +package lifecycle + +import ( + "encoding/json" + "github.com/jfrog/jfrog-client-go/lifecycle" + "github.com/jfrog/jfrog-client-go/lifecycle/services" + "github.com/jfrog/jfrog-client-go/utils/errorutils" + "github.com/jfrog/jfrog-client-go/utils/io/fileutils" +) + +func (rbc *ReleaseBundleCreate) createFromReleaseBundles(servicesManager *lifecycle.LifecycleServicesManager, + rbDetails services.ReleaseBundleDetails, params services.CreateOrPromoteReleaseBundleParams) error { + + bundles := CreateFromReleaseBundlesSpec{} + content, err := fileutils.ReadFile(rbc.releaseBundlesSpecPath) + if err != nil { + return err + } + if err = json.Unmarshal(content, &bundles); err != nil { + return errorutils.CheckError(err) + } + + if len(bundles.ReleaseBundles) == 0 { + return errorutils.CheckErrorf("at least one release bundle is expected in order to create a release bundle from release bundles") + } + + releaseBundlesSource := rbc.convertToReleaseBundlesSource(bundles) + return servicesManager.CreateReleaseBundleFromBundles(rbDetails, params, releaseBundlesSource) +} + +func (rbc *ReleaseBundleCreate) convertToReleaseBundlesSource(bundles CreateFromReleaseBundlesSpec) services.CreateFromReleaseBundlesSource { + releaseBundlesSource := services.CreateFromReleaseBundlesSource{} + for _, rb := range bundles.ReleaseBundles { + rbSource := services.ReleaseBundleSource{ + ReleaseBundleName: rb.Name, + ReleaseBundleVersion: rb.Version, + ProjectKey: rb.Project, + } + releaseBundlesSource.ReleaseBundles = append(releaseBundlesSource.ReleaseBundles, rbSource) + } + return releaseBundlesSource +} + +type CreateFromReleaseBundlesSpec struct { + ReleaseBundles []SourceReleaseBundleSpec `json:"releaseBundles,omitempty"` +} + +type SourceReleaseBundleSpec struct { + Name string `json:"name,omitempty"` + Version string `json:"version,omitempty"` + Project string `json:"project,omitempty"` +} diff --git a/lifecycle/promote.go b/lifecycle/promote.go new file mode 100644 index 000000000..ac7921715 --- /dev/null +++ b/lifecycle/promote.go @@ -0,0 +1,84 @@ +package lifecycle + +import ( + "encoding/json" + "github.com/jfrog/jfrog-cli-core/v2/utils/config" + "github.com/jfrog/jfrog-client-go/utils" + "github.com/jfrog/jfrog-client-go/utils/log" +) + +type ReleaseBundlePromote struct { + releaseBundleCmd + environment string + overwrite bool +} + +func NewReleaseBundlePromote() *ReleaseBundlePromote { + return &ReleaseBundlePromote{} +} + +func (rbp *ReleaseBundlePromote) SetServerDetails(serverDetails *config.ServerDetails) *ReleaseBundlePromote { + rbp.serverDetails = serverDetails + return rbp +} + +func (rbp *ReleaseBundlePromote) SetReleaseBundleName(releaseBundleName string) *ReleaseBundlePromote { + rbp.releaseBundleName = releaseBundleName + return rbp +} + +func (rbp *ReleaseBundlePromote) SetReleaseBundleVersion(releaseBundleVersion string) *ReleaseBundlePromote { + rbp.releaseBundleVersion = releaseBundleVersion + return rbp +} + +func (rbp *ReleaseBundlePromote) SetSigningKeyName(signingKeyName string) *ReleaseBundlePromote { + rbp.signingKeyName = signingKeyName + return rbp +} + +func (rbp *ReleaseBundlePromote) SetSync(sync bool) *ReleaseBundlePromote { + rbp.sync = sync + return rbp +} + +func (rbp *ReleaseBundlePromote) SetReleaseBundleProject(rbProjectKey string) *ReleaseBundlePromote { + rbp.rbProjectKey = rbProjectKey + return rbp +} + +func (rbp *ReleaseBundlePromote) SetEnvironment(environment string) *ReleaseBundlePromote { + rbp.environment = environment + return rbp +} + +func (rbp *ReleaseBundlePromote) SetOverwrite(overwrite bool) *ReleaseBundlePromote { + rbp.overwrite = overwrite + return rbp +} + +func (rbp *ReleaseBundlePromote) CommandName() string { + return "rb_promote" +} + +func (rbp *ReleaseBundlePromote) ServerDetails() (*config.ServerDetails, error) { + return rbp.serverDetails, nil +} + +func (rbp *ReleaseBundlePromote) Run() error { + servicesManager, rbDetails, params, err := rbp.getPrerequisites() + if err != nil { + return err + } + + promotionResp, err := servicesManager.PromoteReleaseBundle(rbDetails, params, rbp.environment, rbp.overwrite) + if err != nil { + return err + } + content, err := json.Marshal(promotionResp) + if err != nil { + return err + } + log.Output(utils.IndentJson(content)) + return nil +} diff --git a/utils/config/config.go b/utils/config/config.go index 62b88a4ed..019f979b6 100644 --- a/utils/config/config.go +++ b/utils/config/config.go @@ -11,6 +11,7 @@ import ( artifactoryAuth "github.com/jfrog/jfrog-client-go/artifactory/auth" "github.com/jfrog/jfrog-client-go/auth" distributionAuth "github.com/jfrog/jfrog-client-go/distribution/auth" + lifecycleAuth "github.com/jfrog/jfrog-client-go/lifecycle/auth" pipelinesAuth "github.com/jfrog/jfrog-client-go/pipelines/auth" "github.com/jfrog/jfrog-client-go/utils" "github.com/jfrog/jfrog-client-go/utils/errorutils" @@ -218,7 +219,6 @@ func getConfigFile() (content []byte, err error) { if exists { content, err = fileutils.ReadFile(confFilePath) return - } // Try to look for older config files for i := coreutils.GetCliConfigVersion() - 1; i >= 3; i-- { @@ -567,6 +567,7 @@ type ServerDetails struct { MissionControlUrl string `json:"missionControlUrl,omitempty"` PipelinesUrl string `json:"pipelinesUrl,omitempty"` AccessUrl string `json:"accessUrl,omitempty"` + LifecycleUrl string `json:"-"` User string `json:"user,omitempty"` Password string `json:"password,omitempty"` SshKeyPath string `json:"sshKeyPath,omitempty"` @@ -580,6 +581,7 @@ type ServerDetails struct { ServerId string `json:"serverId,omitempty"` IsDefault bool `json:"isDefault,omitempty"` InsecureTls bool `json:"-"` + WebLogin bool `json:"webLogin,omitempty"` } // Deprecated @@ -652,6 +654,10 @@ func (serverDetails *ServerDetails) GetAccessUrl() string { return serverDetails.AccessUrl } +func (serverDetails *ServerDetails) GetLifecycleUrl() string { + return serverDetails.LifecycleUrl +} + func (serverDetails *ServerDetails) GetUser() string { return serverDetails.User } @@ -706,6 +712,12 @@ func (serverDetails *ServerDetails) CreateAccessAuthConfig() (auth.ServiceDetail return serverDetails.createAuthConfig(pAuth) } +func (serverDetails *ServerDetails) CreateLifecycleAuthConfig() (auth.ServiceDetails, error) { + lcAuth := lifecycleAuth.NewLifecycleDetails() + lcAuth.SetUrl(serverDetails.LifecycleUrl) + return serverDetails.createAuthConfig(lcAuth) +} + func (serverDetails *ServerDetails) createAuthConfig(details auth.ServiceDetails) (auth.ServiceDetails, error) { details.SetSshUrl(serverDetails.SshUrl) details.SetAccessToken(serverDetails.AccessToken) diff --git a/utils/config/tokenrefresh.go b/utils/config/tokenrefresh.go index 8eef25548..31ed2433e 100644 --- a/utils/config/tokenrefresh.go +++ b/utils/config/tokenrefresh.go @@ -1,7 +1,6 @@ package config import ( - "errors" "github.com/jfrog/jfrog-client-go/access" accessservices "github.com/jfrog/jfrog-client-go/access/services" "github.com/jfrog/jfrog-client-go/utils/errorutils" @@ -116,7 +115,7 @@ func tokenRefreshHandler(currentAccessToken string, tokenType TokenType) (newAcc newAccessToken, err = refreshAccessTokenAndWriteToConfig(serverConfiguration, currentAccessToken) return } - err = errorutils.CheckError(errors.New("unsupported refreshable token type: " + string(tokenType))) + err = errorutils.CheckErrorf("unsupported refreshable token type: " + string(tokenType)) return } @@ -154,7 +153,7 @@ func refreshAccessTokenAndWriteToConfig(serverConfiguration *ServerDetails, curr // Try refreshing tokens newToken, err := refreshExpiredAccessToken(serverConfiguration, currentAccessToken, serverConfiguration.RefreshToken) if err != nil { - return "", errorutils.CheckError(errors.New("Refresh access token failed: " + err.Error())) + return "", errorutils.CheckErrorf("Refresh access token failed: " + err.Error()) } err = writeNewArtifactoryTokens(serverConfiguration, tokenRefreshServerId, newToken.AccessToken, newToken.RefreshToken) return newToken.AccessToken, err diff --git a/utils/coreutils/cmdutils.go b/utils/coreutils/cmdutils.go index d591b81df..cd0305584 100644 --- a/utils/coreutils/cmdutils.go +++ b/utils/coreutils/cmdutils.go @@ -277,32 +277,37 @@ func ExtractXrayOutputFormatFromArgs(args []string) (cleanArgs []string, format return } -// Print the test to the console in green color. +// Add green color style to the string if possible. func PrintTitle(str string) string { return colorStr(str, color.Green) } -// Print the test to the console in cyan color. +// Add cyan color style to the string if possible. func PrintLink(str string) string { return colorStr(str, color.Cyan) } -// Print the test to the console with bold style. +// Add bold style to the string if possible. func PrintBold(str string) string { return colorStr(str, color.Bold) } -// Print the test to the console with bold style. +// Add bold and green style to the string if possible. func PrintBoldTitle(str string) string { return PrintBold(PrintTitle(str)) } -// Print the test to the console in gray color. +// Add gray color style to the string if possible. func PrintComment(str string) string { return colorStr(str, color.Gray) } -// Print the test to the console with the specified color. +// Add yellow color style to the string if possible. +func PrintYellow(str string) string { + return colorStr(str, color.Yellow) +} + +// Add the requested style to the string if possible. func colorStr(str string, c color.Color) string { // Add styles only on supported terminals if log.IsStdOutTerminal() && log.IsColorsSupported() { diff --git a/utils/coreutils/coreconsts.go b/utils/coreutils/coreconsts.go index 1f0aa66a6..7727b2d10 100644 --- a/utils/coreutils/coreconsts.go +++ b/utils/coreutils/coreconsts.go @@ -34,7 +34,7 @@ const ( PluginsExecDirName = "bin" PluginsResourcesDirName = "resources" - // Env + //#nosec G101 ErrorHandling = "JFROG_CLI_ERROR_HANDLING" TempDir = "JFROG_CLI_TEMP_DIR" LogLevel = "JFROG_CLI_LOG_LEVEL" @@ -49,9 +49,10 @@ const ( // Although these vars are constant, they are defined inside a vars section and not a constants section because the tests modify these values. var ( - HomeDir = "JFROG_CLI_HOME_DIR" - BuildName = "JFROG_CLI_BUILD_NAME" - BuildNumber = "JFROG_CLI_BUILD_NUMBER" - Project = "JFROG_CLI_BUILD_PROJECT" + HomeDir = "JFROG_CLI_HOME_DIR" + BuildName = "JFROG_CLI_BUILD_NAME" + BuildNumber = "JFROG_CLI_BUILD_NUMBER" + Project = "JFROG_CLI_BUILD_PROJECT" + //#nosec G101 EncryptionKey = "JFROG_CLI_ENCRYPTION_KEY" ) diff --git a/utils/coreutils/tableutils.go b/utils/coreutils/tableutils.go index 35bdec231..e51965ecd 100644 --- a/utils/coreutils/tableutils.go +++ b/utils/coreutils/tableutils.go @@ -28,6 +28,7 @@ var DefaultMaxColWidth = 25 // In case the struct you want to print contains a field that is a slice of other structs, // you can print it in the table too with the 'embed-table' tag which can be set on slices of structs only. // Fields with the 'extended' tag will be printed iff the 'printExtended' bool input is true. +// You can merge cells horizontally with the 'auto-merge' tag, it will merge cells with the same value. // // Example: // These are the structs Customer and Product: @@ -91,16 +92,44 @@ var DefaultMaxColWidth = 25 // ┌─────────────────────────┐ // │ No customers were found │ // └─────────────────────────┘ +// +// Example(auto-merge): +// These are the structs Customer: +// +// type Customer struct { +// name string `col-name:"Name" auto-merge:"true"` +// age string `col-name:"Age" auto-merge:"true"` +// title string `col-name:"Product Title" auto-merge:"true"` +// CatNumber string `col-name:"Product\nCatalog #" auto-merge:"true"` +// Color string `col-name:"Color" extended:"true" auto-merge:"true"` +// } +// +// customersSlice := []Customer{ +// {name: "Gai", age: "350", title: "SpiderFrog Shirt - Medium", CatNumber: "123456", Color: "Green"}, +// {name: "Gai", age: "350", title: "Floral Bottle", CatNumber: "147585", Color: "Blue"}, +// {name: "Noah", age: "21", title: "Pouch", CatNumber: "456789", Color: "Red"}, +// } +// +// Customers +// ┌──────┬─────┬───────────────────────────┬───────────┐ +// │ NAME │ AGE │ PRODUCT TITLE │ PRODUCT │ +// │ │ │ │ CATALOG # │ +// ├──────┼─────┼───────────────────────────┼───────────┤ +// │ Gai │ 350 │ SpiderFrog Shirt - Medium │ 123456 │ +// │ │ ├───────────────────────────┼───────────┤ +// │ │ │ Floral Bottle │ 147585 │ +// ├──────┼─────┼───────────────────────────┼───────────┤ +// │ Noah │ 21 │ Pouch │ 456789 │ +// └──────┴─────┴───────────────────────────┴───────────┘ + func PrintTable(rows interface{}, title string, emptyTableMessage string, printExtended bool) (err error) { + if title != "" { + log.Output(title) + } tableWriter, err := PrepareTable(rows, emptyTableMessage, printExtended) if err != nil || tableWriter == nil { return } - - if title != "" { - log.Output(title) - } - if log.IsStdOutTerminal() || os.Getenv("GITLAB_CI") == "" { tableWriter.SetStyle(table.StyleLight) } @@ -140,6 +169,7 @@ func PrepareTable(rows interface{}, emptyTableMessage string, printExtended bool columnName, columnNameExist := field.Tag.Lookup("col-name") embedTable, embedTableExist := field.Tag.Lookup("embed-table") extended, extendedExist := field.Tag.Lookup("extended") + _, autoMerge := field.Tag.Lookup("auto-merge") _, omitEmptyColumn := field.Tag.Lookup("omitempty") if !printExtended && extendedExist && extended == "true" { continue @@ -161,7 +191,7 @@ func PrepareTable(rows interface{}, emptyTableMessage string, printExtended bool } else { columnsNames = append(columnsNames, columnName) fieldsProperties = append(fieldsProperties, fieldProperties{index: i}) - columnConfigs = append(columnConfigs, table.ColumnConfig{Name: columnName}) + columnConfigs = append(columnConfigs, table.ColumnConfig{Name: columnName, AutoMerge: autoMerge}) } } tableWriter.AppendHeader(columnsNames) diff --git a/utils/coreutils/techutils.go b/utils/coreutils/techutils.go index 8ea6bcf81..985c1fad3 100644 --- a/utils/coreutils/techutils.go +++ b/utils/coreutils/techutils.go @@ -38,6 +38,8 @@ type TechData struct { exclude []string // Whether this technology is supported by the 'jf ci-setup' command. ciSetupSupport bool + // Whether Contextual Analysis supported in this technology. + applicabilityScannable bool // The file that handles the project's dependencies. packageDescriptor string // Formal name of the technology @@ -52,15 +54,17 @@ type TechData struct { var technologiesData = map[Technology]TechData{ Maven: { - indicators: []string{"pom.xml"}, - ciSetupSupport: true, - packageDescriptor: "pom.xml", - execCommand: "mvn", + indicators: []string{"pom.xml"}, + ciSetupSupport: true, + packageDescriptor: "pom.xml", + execCommand: "mvn", + applicabilityScannable: true, }, Gradle: { - indicators: []string{".gradle", ".gradle.kts"}, - ciSetupSupport: true, - packageDescriptor: "build.gradle, build.gradle.kts", + indicators: []string{".gradle", ".gradle.kts"}, + ciSetupSupport: true, + packageDescriptor: "build.gradle, build.gradle.kts", + applicabilityScannable: true, }, Npm: { indicators: []string{"package.json", "package-lock.json", "npm-shrinkwrap.json"}, @@ -70,12 +74,13 @@ var technologiesData = map[Technology]TechData{ formal: string(Npm), packageVersionOperator: "@", packageInstallationCommand: "install", + applicabilityScannable: true, }, Yarn: { - indicators: []string{".yarnrc.yml", "yarn.lock", ".yarn"}, - packageDescriptor: "package.json", - packageVersionOperator: "@", - packageInstallationCommand: "up", + indicators: []string{".yarnrc.yml", "yarn.lock", ".yarn"}, + packageDescriptor: "package.json", + packageVersionOperator: "@", + applicabilityScannable: true, }, Go: { indicators: []string{"go.mod"}, @@ -84,9 +89,10 @@ var technologiesData = map[Technology]TechData{ packageInstallationCommand: "get", }, Pip: { - packageType: Pypi, - indicators: []string{"setup.py", "requirements.txt"}, - exclude: []string{"Pipfile", "Pipfile.lock", "pyproject.toml", "poetry.lock"}, + packageType: Pypi, + indicators: []string{"setup.py", "requirements.txt"}, + exclude: []string{"Pipfile", "Pipfile.lock", "pyproject.toml", "poetry.lock"}, + applicabilityScannable: true, }, Pipenv: { packageType: Pypi, @@ -94,12 +100,14 @@ var technologiesData = map[Technology]TechData{ packageDescriptor: "Pipfile", packageVersionOperator: "==", packageInstallationCommand: "install", + applicabilityScannable: true, }, Poetry: { packageType: Pypi, indicators: []string{"pyproject.toml", "poetry.lock"}, packageInstallationCommand: "add", packageVersionOperator: "==", + applicabilityScannable: true, }, Nuget: { indicators: []string{".sln", ".csproj"}, @@ -151,10 +159,14 @@ func (tech Technology) GetPackageOperator() string { return technologiesData[tech].packageVersionOperator } -func (tech Technology) GetPackageInstallOperator() string { +func (tech Technology) GetPackageInstallationCommand() string { return technologiesData[tech].packageInstallationCommand } +func (tech Technology) ApplicabilityScannable() bool { + return technologiesData[tech].applicabilityScannable +} + // DetectTechnologies tries to detect all technologies types according to the files in the given path. // 'isCiSetup' will limit the search of possible techs to Maven, Gradle, and npm. // 'recursive' will determine if the search will be limited to files in the root path or not. @@ -205,24 +217,11 @@ func detectTechnologiesByFilePaths(paths []string, isCiSetup bool) (detected map return detected } -// DetectTechnologiesToString returns a string that includes all the names of the detected technologies separated by a comma. -func DetectedTechnologiesToString(detected map[Technology]bool) string { - keys := DetectedTechnologiesToSlice(detected) - if len(keys) > 0 { - detectedTechnologiesString := strings.Join(keys, ", ") - detectedTechnologiesString += "." - return detectedTechnologiesString - } - return "" -} - // DetectedTechnologiesToSlice returns a string slice that includes all the names of the detected technologies. func DetectedTechnologiesToSlice(detected map[Technology]bool) []string { - keys := make([]string, len(detected)) - i := 0 + keys := make([]string, 0, len(detected)) for tech := range detected { - keys[i] = string(tech) - i++ + keys = append(keys, string(tech)) } return keys } @@ -240,3 +239,12 @@ func GetAllTechnologiesList() (technologies []Technology) { } return } + +func ContainsApplicabilityScannableTech(technologies []Technology) bool { + for _, technology := range technologies { + if technology.ApplicabilityScannable() { + return true + } + } + return false +} diff --git a/utils/coreutils/techutils_test.go b/utils/coreutils/techutils_test.go index ebb636f06..943813177 100644 --- a/utils/coreutils/techutils_test.go +++ b/utils/coreutils/techutils_test.go @@ -30,3 +30,20 @@ func TestDetectTechnologiesByFilePaths(t *testing.T) { }) } } + +func TestContainsApplicabilityScannableTech(t *testing.T) { + tests := []struct { + name string + technologies []Technology + want bool + }{ + {name: "contains supported and unsupported techs", technologies: []Technology{Nuget, Go, Npm}, want: true}, + {name: "contains supported techs only", technologies: []Technology{Maven, Yarn, Npm}, want: true}, + {name: "contains unsupported techs only", technologies: []Technology{Dotnet, Nuget, Go}, want: false}, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.want, ContainsApplicabilityScannableTech(tt.technologies)) + }) + } +} diff --git a/utils/coreutils/utils.go b/utils/coreutils/utils.go index 60377a192..a7bae4e0f 100644 --- a/utils/coreutils/utils.go +++ b/utils/coreutils/utils.go @@ -2,6 +2,7 @@ package coreutils import ( "bytes" + "errors" "fmt" "os" "os/exec" @@ -15,7 +16,6 @@ import ( "github.com/jfrog/jfrog-client-go/utils/errorutils" "github.com/jfrog/jfrog-client-go/utils/io/fileutils" "github.com/jfrog/jfrog-client-go/utils/log" - "github.com/pkg/errors" ) const ( @@ -121,8 +121,9 @@ func PanicOnError(err error) error { } func ExitOnErr(err error) { - if err, ok := err.(CliError); ok { - traceExit(err.ExitCode, err) + var cliError CliError + if errors.As(err, &cliError) { + traceExit(cliError.ExitCode, err) } if exitCode := GetExitCode(err, 0, 0, false); exitCode != ExitCodeNoError { traceExit(exitCode, err) @@ -153,7 +154,8 @@ func GetExitCode(err error, success, failed int, failNoOp bool) ExitCode { // We would like to return a regular error instead of ExitError, // because some frameworks (such as codegangsta used by JFrog CLI) automatically exit when this error is returned. func ConvertExitCodeError(err error) error { - if _, ok := err.(*exec.ExitError); ok { + var exitError *exec.ExitError + if errors.As(err, &exitError) { err = errors.New(err.Error()) } return err @@ -570,12 +572,12 @@ func GetServerIdAndRepo(remoteEnv string) (serverID string, repoName string, err serverAndRepo := os.Getenv(remoteEnv) if serverAndRepo == "" { log.Debug(remoteEnv, "is not set") - return "", "", nil + return } // The serverAndRepo is in the form of '/' - serverID, repoName, seperatorExists := strings.Cut(serverAndRepo, "/") + serverID, repoName, separatorExists := strings.Cut(serverAndRepo, "/") // Check that the format is valid - if !seperatorExists || repoName == "" || serverID == "" { + if !separatorExists || repoName == "" || serverID == "" { err = errorutils.CheckErrorf("'%s' environment variable is '%s' but should be '/'", remoteEnv, serverAndRepo) } return diff --git a/utils/reposnapshot/node.go b/utils/reposnapshot/node.go index bfd92d634..eadeb3bb4 100644 --- a/utils/reposnapshot/node.go +++ b/utils/reposnapshot/node.go @@ -2,10 +2,11 @@ package reposnapshot import ( "encoding/json" - "github.com/jfrog/jfrog-client-go/utils/errorutils" "os" "path" "sync" + + "github.com/jfrog/jfrog-client-go/utils/errorutils" ) // Represents a directory in the repo state snapshot. @@ -220,7 +221,6 @@ func (node *Node) IsDoneExploring() (doneExploring bool, err error) { func (node *Node) RestartExploring() error { return node.action(func(node *Node) error { node.NodeStatus = Exploring - node.children = nil node.filesCount = 0 return nil }) @@ -228,23 +228,33 @@ func (node *Node) RestartExploring() error { // Recursively find the node matching the path represented by the dirs array. // The search is done by comparing the children of each node path, till reaching the final node in the array. -// If the node is not found, nil is returned. +// If the node is not found, it is added and then returned. // For example: // For a structure such as repo->dir1->dir2->dir3 // The initial call will be to the root, and for an input of ({"dir1","dir2"}), and the final output will be a pointer to dir2. func (node *Node) findMatchingNode(childrenDirs []string) (matchingNode *Node, err error) { err = node.action(func(node *Node) error { + // The node was found in the cache. Let's return it. if len(childrenDirs) == 0 { matchingNode = node return nil } + + // Check if any of the current node's children are parents of the current node. for i := range node.children { if node.children[i].name == childrenDirs[0] { matchingNode, err = node.children[i].findMatchingNode(childrenDirs[1:]) return err } } - return nil + + // None of the current node's children are parents of the current node. + // This means we need to start creating the searched node parents. + newNode := CreateNewNode(childrenDirs[0], node) + newNode.parent = node + node.children = append(node.children, newNode) + matchingNode, err = newNode.findMatchingNode(childrenDirs[1:]) + return err }) return } diff --git a/utils/reposnapshot/snapshotmanager_test.go b/utils/reposnapshot/snapshotmanager_test.go index 0b225b2d6..06681515a 100644 --- a/utils/reposnapshot/snapshotmanager_test.go +++ b/utils/reposnapshot/snapshotmanager_test.go @@ -121,9 +121,6 @@ func TestLookUpNodeAndActualPath(t *testing.T) { {"dir on root", "2", false}, {"complex path with separator suffix", "1/a/", false}, {"complex path with no separator suffix", "1/a", false}, - {"repository provided", path.Join("test-local", "2"), true}, - {"relative path includes root", "./2", true}, - {"dir doesn't exist", "no/where", true}, {"empty path", "", true}, } @@ -187,22 +184,6 @@ func createNodeBase(t *testing.T, name string, filesCount int, parent *Node) *No return node } -func TestAddChildNode(t *testing.T) { - root := CreateNewNode(".", nil) - // Add child with no children pool. - addAndAssertChild(t, nil, root, CreateNewNode("no-pool", root)) - // Add child with empty children pool. - addAndAssertChild(t, []*Node{}, root, CreateNewNode("empty-pool", root)) - // Add child with pool. - exists := CreateNewNode("exists", root) - addAndAssertChild(t, []*Node{exists}, root, exists) -} - -func addAndAssertChild(t *testing.T, childrenPool []*Node, root, expectedChild *Node) { - assert.NoError(t, root.AddChildNode(expectedChild.name, childrenPool)) - assert.Equal(t, expectedChild, getChild(root, expectedChild.name)) -} - func getChild(node *Node, childName string) *Node { for _, child := range node.children { if child.name == childName { diff --git a/xray/audit/commonutils.go b/xray/audit/commonutils.go index a50356dc7..b2f79997b 100644 --- a/xray/audit/commonutils.go +++ b/xray/audit/commonutils.go @@ -24,17 +24,22 @@ import ( ) func BuildXrayDependencyTree(treeHelper map[string][]string, nodeId string) *xrayUtils.GraphNode { - return buildXrayDependencyTree(treeHelper, []string{nodeId}) + exceededDepthCounter := 0 + xrayDependencyTree := buildXrayDependencyTree(treeHelper, []string{nodeId}, &exceededDepthCounter) + if exceededDepthCounter > 0 { + log.Debug("buildXrayDependencyTree exceeded max tree depth", exceededDepthCounter, "times") + } + return xrayDependencyTree } -func buildXrayDependencyTree(treeHelper map[string][]string, impactPath []string) *xrayUtils.GraphNode { +func buildXrayDependencyTree(treeHelper map[string][]string, impactPath []string, exceededDepthCounter *int) *xrayUtils.GraphNode { nodeId := impactPath[len(impactPath)-1] // Initialize the new node xrDependencyTree := &xrayUtils.GraphNode{} xrDependencyTree.Id = nodeId xrDependencyTree.Nodes = []*xrayUtils.GraphNode{} if len(impactPath) >= buildinfo.RequestedByMaxLength { - log.Debug("buildXrayDependencyTree exceeded max tree depth") + *exceededDepthCounter++ return xrDependencyTree } // Recursively create & append all node's dependencies. @@ -43,17 +48,12 @@ func buildXrayDependencyTree(treeHelper map[string][]string, impactPath []string if slices.Contains(impactPath, dependency) { continue } - xrDependencyTree.Nodes = append(xrDependencyTree.Nodes, buildXrayDependencyTree(treeHelper, append(impactPath, dependency))) + xrDependencyTree.Nodes = append(xrDependencyTree.Nodes, buildXrayDependencyTree(treeHelper, append(impactPath, dependency), exceededDepthCounter)) } return xrDependencyTree } -func Audit(modulesDependencyTrees []*xrayUtils.GraphNode, progress ioUtils.ProgressMgr, technology coreutils.Technology, scanGraphParams *xraycommands.ScanGraphParams) (results []services.ScanResponse, err error) { - if len(modulesDependencyTrees) == 0 { - err = errorutils.CheckErrorf("No dependencies were found. Please try to build your project and re-run the audit command.") - return - } - +func RunXrayDependenciesTreeScanGraph(modulesDependencyTrees []*xrayUtils.GraphNode, progress ioUtils.ProgressMgr, technology coreutils.Technology, scanGraphParams *xraycommands.ScanGraphParams) (results []services.ScanResponse, err error) { if progress != nil { progress.SetHeadlineMsg("Scanning for vulnerabilities") } @@ -64,7 +64,7 @@ func Audit(modulesDependencyTrees []*xrayUtils.GraphNode, progress ioUtils.Progr if progress != nil { progress.SetHeadlineMsg(scanMessage) } - log.Info(scanMessage, "...") + log.Info(scanMessage + "...") var scanResults *services.ScanResponse scanResults, err = xraycommands.RunScanGraphAndGetResults(scanGraphParams) if err != nil { @@ -205,41 +205,9 @@ func updateComponentsWithImpactPaths(components map[string]services.Component, i func setPathsForIssues(dependency *xrayUtils.GraphNode, issuesImpactPathsMap map[string]*services.Component, pathFromRoot []services.ImpactPathNode) { pathFromRoot = append(pathFromRoot, services.ImpactPathNode{ComponentId: dependency.Id}) if _, exists := issuesImpactPathsMap[dependency.Id]; exists { - appendPath(issuesImpactPathsMap, dependency, pathFromRoot) + issuesImpactPathsMap[dependency.Id].ImpactPaths = append(issuesImpactPathsMap[dependency.Id].ImpactPaths, pathFromRoot) } for _, depChild := range dependency.Nodes { setPathsForIssues(depChild, issuesImpactPathsMap, pathFromRoot) } } - -// Appends paths to impact paths tree. -// When we have multiple paths to the same CVE, the following logic applies: -// If we have a direct path for the vulnerable dependency, show only the direct paths, as fixing it will resolve all the vulnerabilities. -// If we have multiple different paths to an indirect dependency, show all possible paths. -func appendPath(currentTree map[string]*services.Component, dependency *xrayUtils.GraphNode, pathFromRoot []services.ImpactPathNode) { - if len(currentTree[dependency.Id].ImpactPaths) == 0 { - currentTree[dependency.Id].ImpactPaths = append(currentTree[dependency.Id].ImpactPaths, pathFromRoot) - return - } - currentHasDirectPath := atLeastOneDirectPath(currentTree[dependency.Id].ImpactPaths) - suggestHasDirectPath := atLeastOneDirectPath([][]services.ImpactPathNode{pathFromRoot}) - // If neither the current path nor the suggested path is direct, append the suggested path. - if !currentHasDirectPath && !suggestHasDirectPath { - currentTree[dependency.Id].ImpactPaths = append(currentTree[dependency.Id].ImpactPaths, pathFromRoot) - return - } - // If the current path is not direct but a direct path is found, overwrite the existing path. - if !currentHasDirectPath && suggestHasDirectPath { - currentTree[dependency.Id].ImpactPaths[0] = pathFromRoot - return - } -} - -func atLeastOneDirectPath(nodes [][]services.ImpactPathNode) bool { - for index := range nodes { - if len(nodes[index]) == 2 { - return true - } - } - return false -} diff --git a/xray/audit/commonutils_test.go b/xray/audit/commonutils_test.go index 008f354ad..db4e2158c 100644 --- a/xray/audit/commonutils_test.go +++ b/xray/audit/commonutils_test.go @@ -41,53 +41,6 @@ func TestSetPathsForIssues(t *testing.T) { assert.Equal(t, issuesMap["child5"].ImpactPaths[0][2].ComponentId, "child5") } -// In the edge case where we have the same CVE with direct & indirect dependency, -// we want to show only the direct path, as it will fix both problems -func TestSetPathsForIssuesAvoidsDuplicates_RemovePath(t *testing.T) { - rootNode := &xrayUtils.GraphNode{Id: "root"} - childNode1 := &xrayUtils.GraphNode{Id: "child4"} - childNode2 := &xrayUtils.GraphNode{Id: "child2", Nodes: []*xrayUtils.GraphNode{{Id: "child3", Nodes: []*xrayUtils.GraphNode{{Id: "child4"}}}}} - rootNode.Nodes = []*xrayUtils.GraphNode{childNode1, childNode2} - - issuesMap := make(map[string]*services.Component) - issuesMap["child4"] = &services.Component{ImpactPaths: [][]services.ImpactPathNode{}} - - setPathsForIssues(rootNode, issuesMap, []services.ImpactPathNode{}) - - assert.Equal(t, "root", issuesMap["child4"].ImpactPaths[0][0].ComponentId) - assert.Equal(t, "child4", issuesMap["child4"].ImpactPaths[0][1].ComponentId) - assert.Len(t, issuesMap["child4"].ImpactPaths, 1) - assert.Len(t, issuesMap["child4"].ImpactPaths[0], 2) -} - -// This verifies that we are not removing unwanted paths -// If we have multiple paths for the same vulnerable indirect dependency, show all the paths. -func TestSetPathsForIssuesAvoidsDuplicates_AppendPath(t *testing.T) { - rootNode := &xrayUtils.GraphNode{Id: "root"} - childNode1 := &xrayUtils.GraphNode{Id: "child1"} - childNode2 := &xrayUtils.GraphNode{Id: "child2"} - childNode3 := &xrayUtils.GraphNode{Id: "child3"} - childNode4 := &xrayUtils.GraphNode{Id: "child4"} - childNode5 := &xrayUtils.GraphNode{Id: "child5"} - - rootNode.Nodes = []*xrayUtils.GraphNode{childNode1, childNode2} - childNode1.Nodes = []*xrayUtils.GraphNode{childNode4, childNode5} - childNode2.Nodes = []*xrayUtils.GraphNode{childNode3, childNode5} - - issuesMap := make(map[string]*services.Component) - issuesMap["child5"] = &services.Component{ImpactPaths: [][]services.ImpactPathNode{}} - - setPathsForIssues(rootNode, issuesMap, []services.ImpactPathNode{}) - - assert.Equal(t, "root", issuesMap["child5"].ImpactPaths[0][0].ComponentId) - assert.Equal(t, "child1", issuesMap["child5"].ImpactPaths[0][1].ComponentId) - assert.Equal(t, "child5", issuesMap["child5"].ImpactPaths[0][2].ComponentId) - - assert.Equal(t, "root", issuesMap["child5"].ImpactPaths[1][0].ComponentId) - assert.Equal(t, "child2", issuesMap["child5"].ImpactPaths[1][1].ComponentId) - assert.Equal(t, "child5", issuesMap["child5"].ImpactPaths[1][2].ComponentId) -} - func TestUpdateVulnerableComponent(t *testing.T) { // Create test data components := map[string]services.Component{ diff --git a/xray/audit/jas/applicabilitymanager.go b/xray/audit/jas/applicabilitymanager.go index 818a66481..4c99df9cc 100644 --- a/xray/audit/jas/applicabilitymanager.go +++ b/xray/audit/jas/applicabilitymanager.go @@ -1,33 +1,21 @@ package jas import ( - "errors" - "fmt" - "github.com/jfrog/jfrog-cli-core/v2/utils/config" + "github.com/jfrog/gofrog/datastructures" "github.com/jfrog/jfrog-cli-core/v2/utils/coreutils" "github.com/jfrog/jfrog-cli-core/v2/xray/utils" "github.com/jfrog/jfrog-client-go/utils/errorutils" - "github.com/jfrog/jfrog-client-go/utils/io/fileutils" "github.com/jfrog/jfrog-client-go/utils/log" "github.com/jfrog/jfrog-client-go/xray/services" - xrayUtils "github.com/jfrog/jfrog-client-go/xray/services/utils" "github.com/owenrumney/go-sarif/v2/sarif" - "gopkg.in/yaml.v2" - "os" - "path/filepath" + "golang.org/x/exp/maps" + "golang.org/x/exp/slices" "strings" ) const ( - ApplicabilityFeatureId = "contextual_analysis" - applicabilityScanType = "analyze-applicability" - applicabilityScanFailureMessage = "failed to run applicability scan. Cause: %s" - applicabilityScanCommand = "ca" -) - -var ( - technologiesEligibleForApplicabilityScan = []coreutils.Technology{coreutils.Npm, coreutils.Pip, - coreutils.Poetry, coreutils.Pipenv, coreutils.Pypi} + applicabilityScanType = "analyze-applicability" + applicabilityScanCommand = "ca" ) // The getApplicabilityScanResults function runs the applicability scan flow, which includes the following steps: @@ -39,169 +27,101 @@ var ( // map[string]string: A map containing the applicability result of each XRAY CVE. // bool: true if the user is entitled to the applicability scan, false otherwise. // error: An error object (if any). -func getApplicabilityScanResults(results []services.ScanResponse, dependencyTrees []*xrayUtils.GraphNode, - serverDetails *config.ServerDetails, analyzerManager utils.AnalyzerManagerInterface) (map[string]string, bool, error) { - applicabilityScanManager, cleanupFunc, err := newApplicabilityScanManager(results, dependencyTrees, serverDetails, analyzerManager) - if err != nil { - return nil, false, fmt.Errorf(applicabilityScanFailureMessage, err.Error()) - } - defer func() { - if cleanupFunc != nil { - err = errors.Join(err, cleanupFunc()) - } - }() - if !applicabilityScanManager.eligibleForApplicabilityScan() { - log.Debug("The conditions for running the applicability scan are not met. Skipping the execution of the Analyzer Manager") - return nil, false, nil - } - if err = applicabilityScanManager.run(); err != nil { - if utils.IsNotEntitledError(err) || utils.IsUnsupportedCommandError(err) { - return nil, false, nil - } - return nil, true, fmt.Errorf(applicabilityScanFailureMessage, err.Error()) - } - return applicabilityScanManager.applicabilityScanResults, true, nil -} - -// Applicability scan is relevant only to specific programming languages (the languages in this list: -// technologiesEligibleForApplicabilityScan). therefore, the applicability scan will not be performed on projects that -// do not contain those technologies. -// resultsIncludeEligibleTechnologies() runs over the xray scan results, and check if at least one of them is one of -// the techs on technologiesEligibleForApplicabilityScan. otherwise, the applicability scan will not be executed. -func resultsIncludeEligibleTechnologies(xrayVulnerabilities []services.Vulnerability, xrayViolations []services.Violation) bool { - for _, vuln := range xrayVulnerabilities { - for _, technology := range technologiesEligibleForApplicabilityScan { - if vuln.Technology == technology.ToString() { - return true - } - } +func getApplicabilityScanResults(xrayResults []services.ScanResponse, directDependencies []string, + scannedTechnologies []coreutils.Technology, scanner *AdvancedSecurityScanner) (results map[string]string, err error) { + applicabilityScanManager := newApplicabilityScanManager(xrayResults, directDependencies, scanner) + if !applicabilityScanManager.shouldRunApplicabilityScan(scannedTechnologies) { + log.Debug("The technologies that have been scanned are currently not supported for contextual analysis scanning, or we couldn't find any vulnerable direct dependencies. Skipping....") + return } - for _, violation := range xrayViolations { - for _, technology := range technologiesEligibleForApplicabilityScan { - if violation.Technology == technology.ToString() { - return true - } - } + if err = applicabilityScanManager.scanner.Run(applicabilityScanManager); err != nil { + err = utils.ParseAnalyzerManagerError(utils.Applicability, err) + return } - return false + results = applicabilityScanManager.applicabilityScanResults + return } type ApplicabilityScanManager struct { applicabilityScanResults map[string]string - xrayVulnerabilities []services.Vulnerability - xrayViolations []services.Violation + directDependenciesCves *datastructures.Set[string] xrayResults []services.ScanResponse - configFileName string - resultsFileName string - analyzerManager utils.AnalyzerManagerInterface - serverDetails *config.ServerDetails + scanner *AdvancedSecurityScanner } -func newApplicabilityScanManager(xrayScanResults []services.ScanResponse, dependencyTrees []*xrayUtils.GraphNode, - serverDetails *config.ServerDetails, analyzerManager utils.AnalyzerManagerInterface) (manager *ApplicabilityScanManager, cleanup func() error, err error) { - directDependencies := getDirectDependenciesList(dependencyTrees) - tempDir, err := fileutils.CreateTempDir() - if err != nil { - return - } - cleanup = func() error { - return fileutils.RemoveTempDir(tempDir) - } +func newApplicabilityScanManager(xrayScanResults []services.ScanResponse, directDependencies []string, scanner *AdvancedSecurityScanner) (manager *ApplicabilityScanManager) { + directDependenciesCves := extractDirectDependenciesCvesFromScan(xrayScanResults, directDependencies) return &ApplicabilityScanManager{ applicabilityScanResults: map[string]string{}, - xrayVulnerabilities: extractXrayDirectVulnerabilities(xrayScanResults, directDependencies), - xrayViolations: extractXrayDirectViolations(xrayScanResults, directDependencies), - configFileName: filepath.Join(tempDir, "config.yaml"), - resultsFileName: filepath.Join(tempDir, "results.sarif"), + directDependenciesCves: directDependenciesCves, xrayResults: xrayScanResults, - analyzerManager: analyzerManager, - serverDetails: serverDetails, - }, cleanup, nil -} - -func (a *ApplicabilityScanManager) eligibleForApplicabilityScan() bool { - return resultsIncludeEligibleTechnologies(getXrayVulnerabilities(a.xrayResults), getXrayViolations(a.xrayResults)) -} - -// This function gets a liat of xray scan responses that contains direct and indirect violations, and returns only direct -// violation of the scanned project, ignoring indirect violations -func extractXrayDirectViolations(xrayScanResults []services.ScanResponse, directDependencies []string) []services.Violation { - xrayViolationsDirectDependency := []services.Violation{} - for _, violation := range getXrayViolations(xrayScanResults) { - for _, dep := range directDependencies { - if _, ok := violation.Components[dep]; ok { - xrayViolationsDirectDependency = append(xrayViolationsDirectDependency, violation) - } - } + scanner: scanner, } - return xrayViolationsDirectDependency } -// This function gets a liat of xray scan responses that contains direct and indirect vulnerabilities, and returns only direct +// This function gets a list of xray scan responses that contain direct and indirect vulnerabilities and returns only direct // vulnerabilities of the scanned project, ignoring indirect vulnerabilities -func extractXrayDirectVulnerabilities(xrayScanResults []services.ScanResponse, directDependencies []string) []services.Vulnerability { - xrayVulnerabilitiesDirectDependency := []services.Vulnerability{} - for _, vulnerability := range getXrayVulnerabilities(xrayScanResults) { - for _, dep := range directDependencies { - if _, ok := vulnerability.Components[dep]; ok { - xrayVulnerabilitiesDirectDependency = append(xrayVulnerabilitiesDirectDependency, vulnerability) +func extractDirectDependenciesCvesFromScan(xrayScanResults []services.ScanResponse, directDependencies []string) *datastructures.Set[string] { + directsCves := datastructures.MakeSet[string]() + for _, scanResult := range xrayScanResults { + for _, vulnerability := range scanResult.Vulnerabilities { + if isDirectComponents(maps.Keys(vulnerability.Components), directDependencies) { + for _, cve := range vulnerability.Cves { + if cve.Id != "" { + directsCves.Add(cve.Id) + } + } } } - } - return xrayVulnerabilitiesDirectDependency -} - -// This function gets the dependencies tress of the scanned project, and extract a list containing only directed -// dependencies node ids. -func getDirectDependenciesList(dependencyTrees []*xrayUtils.GraphNode) []string { - directDependencies := []string{} - for _, tree := range dependencyTrees { - for _, node := range tree.Nodes { - directDependencies = append(directDependencies, node.Id) + for _, violation := range scanResult.Violations { + if isDirectComponents(maps.Keys(violation.Components), directDependencies) { + for _, cve := range violation.Cves { + if cve.Id != "" { + directsCves.Add(cve.Id) + } + } + } } } - return directDependencies -} -// Gets xray scan response and returns only the vulnerabilities part of it -func getXrayVulnerabilities(xrayScanResults []services.ScanResponse) []services.Vulnerability { - xrayVulnerabilities := []services.Vulnerability{} - for _, result := range xrayScanResults { - xrayVulnerabilities = append(xrayVulnerabilities, result.Vulnerabilities...) - } - return xrayVulnerabilities + return directsCves } -// Gets xray scan response and returns only the violations part of it -func getXrayViolations(xrayScanResults []services.ScanResponse) []services.Violation { - xrayViolations := []services.Violation{} - for _, result := range xrayScanResults { - xrayViolations = append(xrayViolations, result.Violations...) +func isDirectComponents(components []string, directDependencies []string) bool { + for _, component := range components { + if slices.Contains(directDependencies, component) { + return true + } } - return xrayViolations + return false } -func (a *ApplicabilityScanManager) run() (err error) { - defer func() { - if deleteJasProcessFiles(a.configFileName, a.resultsFileName) != nil { - deleteFilesError := deleteJasProcessFiles(a.configFileName, a.resultsFileName) - err = errors.Join(err, deleteFilesError) - } - }() - if !a.directDependenciesExist() { - return nil +func (a *ApplicabilityScanManager) Run(wd string) (err error) { + if len(a.scanner.workingDirs) > 1 { + log.Info("Running applicability scanning in the", wd, "directory...") + } else { + log.Info("Running applicability scanning...") } - if err = a.createConfigFile(); err != nil { + if err = a.createConfigFile(wd); err != nil { return } if err = a.runAnalyzerManager(); err != nil { return } - return a.setScanResults() + var workingDirResults map[string]string + workingDirResults, err = a.getScanResults() + for cve, result := range workingDirResults { + a.applicabilityScanResults[cve] = result + } + return } func (a *ApplicabilityScanManager) directDependenciesExist() bool { - return len(createCveList(a.xrayVulnerabilities, a.xrayViolations)) > 0 + return a.directDependenciesCves.Size() > 0 +} + +func (a *ApplicabilityScanManager) shouldRunApplicabilityScan(technologies []coreutils.Technology) bool { + return a.directDependenciesExist() && coreutils.ContainsApplicabilityScannableTech(technologies) } type applicabilityScanConfig struct { @@ -217,86 +137,52 @@ type scanConfiguration struct { SkippedDirs []string `yaml:"skipped-folders"` } -func (a *ApplicabilityScanManager) createConfigFile() error { - currentDir, err := coreutils.GetWorkingDirectory() - if err != nil { - return err - } - cveWhiteList := utils.RemoveDuplicateValues(createCveList(a.xrayVulnerabilities, a.xrayViolations)) +func (a *ApplicabilityScanManager) createConfigFile(workingDir string) error { configFileContent := applicabilityScanConfig{ Scans: []scanConfiguration{ { - Roots: []string{currentDir}, - Output: a.resultsFileName, + Roots: []string{workingDir}, + Output: a.scanner.resultsFileName, Type: applicabilityScanType, GrepDisable: false, - CveWhitelist: cveWhiteList, + CveWhitelist: a.directDependenciesCves.ToSlice(), SkippedDirs: skippedDirs, }, }, } - yamlData, err := yaml.Marshal(&configFileContent) - if errorutils.CheckError(err) != nil { - return err - } - err = os.WriteFile(a.configFileName, yamlData, 0644) - return errorutils.CheckError(err) + return createScannersConfigFile(a.scanner.configFileName, configFileContent) } // Runs the analyzerManager app and returns a boolean to indicate whether the user is entitled for // advance security feature func (a *ApplicabilityScanManager) runAnalyzerManager() error { - if err := utils.SetAnalyzerManagerEnvVariables(a.serverDetails); err != nil { - return err - } - return a.analyzerManager.Exec(a.configFileName, applicabilityScanCommand) + return a.scanner.analyzerManager.Exec(a.scanner.configFileName, applicabilityScanCommand, a.scanner.serverDetails) } -func (a *ApplicabilityScanManager) setScanResults() error { - report, err := sarif.Open(a.resultsFileName) +func (a *ApplicabilityScanManager) getScanResults() (map[string]string, error) { + report, err := sarif.Open(a.scanner.resultsFileName) if errorutils.CheckError(err) != nil { - return err + return nil, err } var fullVulnerabilitiesList []*sarif.Result if len(report.Runs) > 0 { fullVulnerabilitiesList = report.Runs[0].Results } - xrayCves := utils.RemoveDuplicateValues(createCveList(a.xrayVulnerabilities, a.xrayViolations)) - for _, xrayCve := range xrayCves { - a.applicabilityScanResults[xrayCve] = utils.ApplicabilityUndeterminedStringValue + applicabilityScanResults := make(map[string]string) + for _, cve := range a.directDependenciesCves.ToSlice() { + applicabilityScanResults[cve] = utils.ApplicabilityUndeterminedStringValue } for _, vulnerability := range fullVulnerabilitiesList { applicableVulnerabilityName := getVulnerabilityName(*vulnerability.RuleID) if isVulnerabilityApplicable(vulnerability) { - a.applicabilityScanResults[applicableVulnerabilityName] = utils.ApplicableStringValue + applicabilityScanResults[applicableVulnerabilityName] = utils.ApplicableStringValue } else { - a.applicabilityScanResults[applicableVulnerabilityName] = utils.NotApplicableStringValue - } - } - return nil -} - -// This function iterate the direct vulnerabilities and violations of the scanned projects, and creates a string list -// of the CVEs ids. This list will be sent as input to analyzer manager. -func createCveList(xrayVulnerabilities []services.Vulnerability, xrayViolations []services.Violation) []string { - cveWhiteList := []string{} - for _, vulnerability := range xrayVulnerabilities { - for _, cve := range vulnerability.Cves { - if cve.Id != "" { - cveWhiteList = append(cveWhiteList, cve.Id) - } - } - } - for _, violation := range xrayViolations { - for _, cve := range violation.Cves { - if cve.Id != "" { - cveWhiteList = append(cveWhiteList, cve.Id) - } + applicabilityScanResults[applicableVulnerabilityName] = utils.NotApplicableStringValue } } - return cveWhiteList + return applicabilityScanResults, nil } // Gets a result of one CVE from the scanner, and returns true if the CVE is applicable, false otherwise diff --git a/xray/audit/jas/applicabilitymanager_test.go b/xray/audit/jas/applicabilitymanager_test.go index d7c06cbd6..3c8ffc5a8 100644 --- a/xray/audit/jas/applicabilitymanager_test.go +++ b/xray/audit/jas/applicabilitymanager_test.go @@ -1,12 +1,10 @@ package jas import ( - "errors" - "fmt" + rtutils "github.com/jfrog/jfrog-cli-core/v2/artifactory/utils" "github.com/jfrog/jfrog-cli-core/v2/utils/coreutils" "github.com/jfrog/jfrog-cli-core/v2/xray/utils" "github.com/jfrog/jfrog-client-go/xray/services" - xrayUtils "github.com/jfrog/jfrog-client-go/xray/services/utils" "github.com/stretchr/testify/assert" "os" "path/filepath" @@ -15,32 +13,47 @@ import ( func TestNewApplicabilityScanManager_InputIsValid(t *testing.T) { // Act - applicabilityManager, _, err := newApplicabilityScanManager(fakeBasicXrayResults, fakeBasicDependencyGraph, &fakeServerDetails, &analyzerManagerMock{}) + assert.NoError(t, rtutils.DownloadAnalyzerManagerIfNeeded()) + scanner, err := NewAdvancedSecurityScanner(nil, &fakeServerDetails) + assert.NoError(t, err) + defer func() { + if scanner.scannerDirCleanupFunc != nil { + assert.NoError(t, scanner.scannerDirCleanupFunc()) + } + }() + applicabilityManager := newApplicabilityScanManager(fakeBasicXrayResults, mockDirectDependencies, scanner) // Assert - assert.NoError(t, err) assert.NotEmpty(t, applicabilityManager) - assert.NotEmpty(t, applicabilityManager.configFileName) - assert.NotEmpty(t, applicabilityManager.resultsFileName) - assert.Equal(t, 1, len(applicabilityManager.xrayVulnerabilities)) - assert.Equal(t, 1, len(applicabilityManager.xrayViolations)) + assert.NotEmpty(t, applicabilityManager.scanner.configFileName) + assert.NotEmpty(t, applicabilityManager.scanner.resultsFileName) + assert.Equal(t, applicabilityManager.directDependenciesCves.Size(), 5) } func TestNewApplicabilityScanManager_DependencyTreeDoesntExist(t *testing.T) { // Act - applicabilityManager, _, err := newApplicabilityScanManager(fakeBasicXrayResults, nil, &fakeServerDetails, &analyzerManagerMock{}) + assert.NoError(t, rtutils.DownloadAnalyzerManagerIfNeeded()) + scanner, err := NewAdvancedSecurityScanner(nil, &fakeServerDetails) + assert.NoError(t, err) + defer func() { + if scanner.scannerDirCleanupFunc != nil { + assert.NoError(t, scanner.scannerDirCleanupFunc()) + } + }() + applicabilityManager := newApplicabilityScanManager(fakeBasicXrayResults, nil, scanner) // Assert - assert.NoError(t, err) assert.NotEmpty(t, applicabilityManager) - assert.NotEmpty(t, applicabilityManager.configFileName) - assert.NotEmpty(t, applicabilityManager.resultsFileName) - assert.Empty(t, applicabilityManager.xrayVulnerabilities) - assert.Empty(t, applicabilityManager.xrayViolations) + assert.NotNil(t, applicabilityManager.scanner.scannerDirCleanupFunc) + assert.Len(t, applicabilityManager.scanner.workingDirs, 1) + assert.NotEmpty(t, applicabilityManager.scanner.configFileName) + assert.NotEmpty(t, applicabilityManager.scanner.resultsFileName) + assert.Equal(t, applicabilityManager.directDependenciesCves.Size(), 0) } -func TestNewApplicabilityScanManager_NoDirectDependenciesInTree(t *testing.T) { +func TestNewApplicabilityScanManager_NoDirectDependenciesInScan(t *testing.T) { // Arrange + assert.NoError(t, rtutils.DownloadAnalyzerManagerIfNeeded()) var noDirectDependenciesResults = []services.ScanResponse{ { ScanId: "scanId_1", @@ -48,14 +61,12 @@ func TestNewApplicabilityScanManager_NoDirectDependenciesInTree(t *testing.T) { {IssueId: "issueId_1", Technology: coreutils.Pipenv.ToString(), Cves: []services.Cve{{Id: "testCve1"}, {Id: "testCve2"}, {Id: "testCve3"}}, Components: map[string]services.Component{ - "issueId_1_direct_dependency": {}, "issueId_1_non_direct_dependency": {}}}, }, Violations: []services.Violation{ {IssueId: "issueId_2", Technology: coreutils.Pipenv.ToString(), Cves: []services.Cve{{Id: "testCve4"}, {Id: "testCve5"}}, Components: map[string]services.Component{ - "issueId_2_direct_dependency": {}, "issueId_2_non_direct_dependency": {}}}, }, }, @@ -64,36 +75,47 @@ func TestNewApplicabilityScanManager_NoDirectDependenciesInTree(t *testing.T) { fakeBasicXrayResults[0].Violations[0].Components["issueId_2_non_direct_dependency"] = services.Component{} // Act - applicabilityManager, _, err := newApplicabilityScanManager(noDirectDependenciesResults, fakeBasicDependencyGraph, &fakeServerDetails, &analyzerManagerMock{}) + scanner, err := NewAdvancedSecurityScanner(nil, &fakeServerDetails) + assert.NoError(t, err) + defer func() { + if scanner.scannerDirCleanupFunc != nil { + assert.NoError(t, scanner.scannerDirCleanupFunc()) + } + }() + applicabilityManager := newApplicabilityScanManager(noDirectDependenciesResults, mockDirectDependencies, scanner) // Assert - assert.NoError(t, err) assert.NotEmpty(t, applicabilityManager) - assert.NotEmpty(t, applicabilityManager.configFileName) - assert.NotEmpty(t, applicabilityManager.resultsFileName) + assert.NotEmpty(t, applicabilityManager.scanner.configFileName) + assert.NotEmpty(t, applicabilityManager.scanner.resultsFileName) // Non-direct dependencies should not be added - assert.Equal(t, 1, len(applicabilityManager.xrayVulnerabilities)) - assert.Equal(t, 1, len(applicabilityManager.xrayViolations)) + assert.Equal(t, 0, applicabilityManager.directDependenciesCves.Size()) } func TestNewApplicabilityScanManager_MultipleDependencyTrees(t *testing.T) { // Arrange - multipleDependencyTrees := []*xrayUtils.GraphNode{fakeBasicDependencyGraph[0], fakeBasicDependencyGraph[0]} + assert.NoError(t, rtutils.DownloadAnalyzerManagerIfNeeded()) // Act - applicabilityManager, _, err := newApplicabilityScanManager(fakeBasicXrayResults, multipleDependencyTrees, &fakeServerDetails, &analyzerManagerMock{}) + scanner, err := NewAdvancedSecurityScanner(nil, &fakeServerDetails) + assert.NoError(t, err) + defer func() { + if scanner.scannerDirCleanupFunc != nil { + assert.NoError(t, scanner.scannerDirCleanupFunc()) + } + }() + applicabilityManager := newApplicabilityScanManager(fakeBasicXrayResults, mockMultiRootDirectDependencies, scanner) // Assert - assert.NoError(t, err) assert.NotEmpty(t, applicabilityManager) - assert.NotEmpty(t, applicabilityManager.configFileName) - assert.NotEmpty(t, applicabilityManager.resultsFileName) - assert.Equal(t, 2, len(applicabilityManager.xrayVulnerabilities)) - assert.Equal(t, 2, len(applicabilityManager.xrayViolations)) + assert.NotEmpty(t, applicabilityManager.scanner.configFileName) + assert.NotEmpty(t, applicabilityManager.scanner.resultsFileName) + assert.Equal(t, 5, applicabilityManager.directDependenciesCves.Size()) } func TestNewApplicabilityScanManager_ViolationsDontExistInResults(t *testing.T) { // Arrange + assert.NoError(t, rtutils.DownloadAnalyzerManagerIfNeeded()) noViolationScanResponse := []services.ScanResponse{ { ScanId: "scanId_1", @@ -106,19 +128,26 @@ func TestNewApplicabilityScanManager_ViolationsDontExistInResults(t *testing.T) } // Act - applicabilityManager, _, err := newApplicabilityScanManager(noViolationScanResponse, fakeBasicDependencyGraph, &fakeServerDetails, &analyzerManagerMock{}) + scanner, err := NewAdvancedSecurityScanner(nil, &fakeServerDetails) + assert.NoError(t, err) + defer func() { + if scanner.scannerDirCleanupFunc != nil { + assert.NoError(t, scanner.scannerDirCleanupFunc()) + } + }() + applicabilityManager := newApplicabilityScanManager(noViolationScanResponse, mockDirectDependencies, scanner) // Assert assert.NoError(t, err) assert.NotEmpty(t, applicabilityManager) - assert.NotEmpty(t, applicabilityManager.configFileName) - assert.NotEmpty(t, applicabilityManager.resultsFileName) - assert.Equal(t, 1, len(applicabilityManager.xrayVulnerabilities)) - assert.Empty(t, applicabilityManager.xrayViolations) + assert.NotEmpty(t, applicabilityManager.scanner.configFileName) + assert.NotEmpty(t, applicabilityManager.scanner.resultsFileName) + assert.Equal(t, 3, applicabilityManager.directDependenciesCves.Size()) } func TestNewApplicabilityScanManager_VulnerabilitiesDontExist(t *testing.T) { // Arrange + assert.NoError(t, rtutils.DownloadAnalyzerManagerIfNeeded()) noVulnerabilitiesScanResponse := []services.ScanResponse{ { ScanId: "scanId_1", @@ -131,89 +160,59 @@ func TestNewApplicabilityScanManager_VulnerabilitiesDontExist(t *testing.T) { } // Act - applicabilityManager, _, err := newApplicabilityScanManager(noVulnerabilitiesScanResponse, fakeBasicDependencyGraph, &fakeServerDetails, &analyzerManagerMock{}) - - // Assert + scanner, err := NewAdvancedSecurityScanner(nil, &fakeServerDetails) assert.NoError(t, err) - assert.NotEmpty(t, applicabilityManager) - assert.NotEmpty(t, applicabilityManager.configFileName) - assert.NotEmpty(t, applicabilityManager.resultsFileName) - assert.Equal(t, 1, len(applicabilityManager.xrayViolations)) - assert.Empty(t, applicabilityManager.xrayVulnerabilities) -} - -func TestApplicabilityScanManager_ShouldRun_AllConditionsMet(t *testing.T) { - // Arrange - analyzerManagerExecuter = &analyzerManagerMock{} - applicabilityManager, _, err := newApplicabilityScanManager(fakeBasicXrayResults, fakeBasicDependencyGraph, &fakeServerDetails, &analyzerManagerMock{}) - - // Act - eligible := applicabilityManager.eligibleForApplicabilityScan() + defer func() { + if scanner.scannerDirCleanupFunc != nil { + assert.NoError(t, scanner.scannerDirCleanupFunc()) + } + }() + applicabilityManager := newApplicabilityScanManager(noVulnerabilitiesScanResponse, mockDirectDependencies, scanner) // Assert - assert.NoError(t, err) - assert.True(t, eligible) + assert.NotEmpty(t, applicabilityManager) + assert.NotEmpty(t, applicabilityManager.scanner.configFileName) + assert.NotEmpty(t, applicabilityManager.scanner.resultsFileName) + assert.Equal(t, 2, applicabilityManager.directDependenciesCves.Size()) } func TestApplicabilityScanManager_ShouldRun_TechnologiesNotEligibleForScan(t *testing.T) { + assert.NoError(t, rtutils.DownloadAnalyzerManagerIfNeeded()) + scanner, err := NewAdvancedSecurityScanner(nil, &fakeServerDetails) + assert.NoError(t, err) defer func() { - fakeBasicXrayResults[0].Vulnerabilities[0].Technology = coreutils.Pipenv.ToString() - fakeBasicXrayResults[0].Violations[0].Technology = coreutils.Pipenv.ToString() + if scanner.scannerDirCleanupFunc != nil { + assert.NoError(t, scanner.scannerDirCleanupFunc()) + } }() - - // Arrange - analyzerManagerExecuter = &analyzerManagerMock{} - fakeBasicXrayResults[0].Vulnerabilities[0].Technology = coreutils.Nuget.ToString() - fakeBasicXrayResults[0].Violations[0].Technology = coreutils.Go.ToString() - applicabilityManager, _, err := newApplicabilityScanManager(fakeBasicXrayResults, fakeBasicDependencyGraph, - &fakeServerDetails, &analyzerManagerMock{}) - - // Act - eligible := applicabilityManager.eligibleForApplicabilityScan() + results, err := getApplicabilityScanResults(fakeBasicXrayResults, mockDirectDependencies, + []coreutils.Technology{coreutils.Nuget, coreutils.Go}, scanner) // Assert + assert.Nil(t, results) assert.NoError(t, err) - assert.False(t, eligible) } func TestApplicabilityScanManager_ShouldRun_ScanResultsAreEmpty(t *testing.T) { // Arrange - analyzerManagerExecuter = &analyzerManagerMock{} - applicabilityManager, _, err := newApplicabilityScanManager(nil, fakeBasicDependencyGraph, &fakeServerDetails, &analyzerManagerMock{}) - - // Act - eligible := applicabilityManager.eligibleForApplicabilityScan() - - // Assert + assert.NoError(t, rtutils.DownloadAnalyzerManagerIfNeeded()) + scanner, err := NewAdvancedSecurityScanner(nil, &fakeServerDetails) assert.NoError(t, err) + defer func() { + if scanner.scannerDirCleanupFunc != nil { + assert.NoError(t, scanner.scannerDirCleanupFunc()) + } + }() + applicabilityManager := newApplicabilityScanManager(nil, mockDirectDependencies, scanner) + assert.NoError(t, err) + // Assert + eligible := applicabilityManager.shouldRunApplicabilityScan([]coreutils.Technology{coreutils.Npm}) assert.False(t, eligible) } -func TestResultsIncludeEligibleTechnologies(t *testing.T) { - tests := []struct { - vulnerabilities []services.Vulnerability - violations []services.Violation - expectedResult bool - }{ - {vulnerabilities: []services.Vulnerability{{Technology: "npm"}}, violations: []services.Violation{{Technology: "go"}}, expectedResult: true}, - {vulnerabilities: []services.Vulnerability{{Technology: "go"}}, violations: []services.Violation{{Technology: "npm"}}, expectedResult: true}, - {vulnerabilities: []services.Vulnerability{{Technology: "npm"}}, violations: []services.Violation{{Technology: "npm"}}, expectedResult: true}, - {vulnerabilities: []services.Vulnerability{{Technology: "go"}}, violations: []services.Violation{{Technology: "go"}}, expectedResult: false}, - } - for _, test := range tests { - assert.Equal(t, test.expectedResult, resultsIncludeEligibleTechnologies(test.vulnerabilities, test.violations)) - } -} - func TestExtractXrayDirectViolations(t *testing.T) { var xrayResponseForDirectViolationsTest = []services.ScanResponse{ { - ScanId: "scanId_1", - Vulnerabilities: []services.Vulnerability{ - {IssueId: "issueId_1", Technology: coreutils.Pipenv.ToString(), - Cves: []services.Cve{{Id: "testCve1"}, {Id: "testCve2"}, {Id: "testCve3"}}, - Components: map[string]services.Component{"issueId_1_direct_dependency": {}}}, - }, Violations: []services.Violation{ {IssueId: "issueId_2", Technology: coreutils.Pipenv.ToString(), Cves: []services.Cve{{Id: "testCve4"}, {Id: "testCve5"}}, @@ -223,26 +222,23 @@ func TestExtractXrayDirectViolations(t *testing.T) { } tests := []struct { directDependencies []string - expectedResult []services.Violation + cvesCount int }{ {directDependencies: []string{"issueId_2_direct_dependency", "issueId_1_direct_dependency"}, - expectedResult: []services.Violation{ - {IssueId: "issueId_2", Technology: coreutils.Pipenv.ToString(), - Cves: []services.Cve{{Id: "testCve4"}, {Id: "testCve5"}}, - Components: map[string]services.Component{"issueId_2_direct_dependency": {}}}, - }, + cvesCount: 2, }, // Vulnerability dependency, should be ignored by function {directDependencies: []string{"issueId_1_direct_dependency"}, - expectedResult: []services.Violation{}, + cvesCount: 0, }, {directDependencies: []string{}, - expectedResult: []services.Violation{}, + cvesCount: 0, }, } for _, test := range tests { - assert.Equal(t, test.expectedResult, extractXrayDirectViolations(xrayResponseForDirectViolationsTest, test.directDependencies)) + cves := extractDirectDependenciesCvesFromScan(xrayResponseForDirectViolationsTest, test.directDependencies) + assert.Equal(t, test.cvesCount, cves.Size()) } } @@ -251,170 +247,146 @@ func TestExtractXrayDirectVulnerabilities(t *testing.T) { { ScanId: "scanId_1", Vulnerabilities: []services.Vulnerability{ - {IssueId: "issueId_1", Technology: coreutils.Pipenv.ToString(), + { + IssueId: "issueId_1", Technology: coreutils.Pipenv.ToString(), Cves: []services.Cve{{Id: "testCve1"}, {Id: "testCve2"}, {Id: "testCve3"}}, - Components: map[string]services.Component{"issueId_1_direct_dependency": {}}}, - }, - Violations: []services.Violation{ - {IssueId: "issueId_2", Technology: coreutils.Pipenv.ToString(), + Components: map[string]services.Component{"issueId_1_direct_dependency": {}}, + }, + { + IssueId: "issueId_2", Technology: coreutils.Pipenv.ToString(), Cves: []services.Cve{{Id: "testCve4"}, {Id: "testCve5"}}, - Components: map[string]services.Component{"issueId_2_direct_dependency": {}}}, + Components: map[string]services.Component{"issueId_2_direct_dependency": {}}, + }, }, }, } tests := []struct { directDependencies []string - expectedResult []services.Vulnerability - }{ - {directDependencies: []string{"issueId_2_direct_dependency", "issueId_1_direct_dependency"}, - expectedResult: []services.Vulnerability{ - {IssueId: "issueId_1", Technology: coreutils.Pipenv.ToString(), - Cves: []services.Cve{{Id: "testCve1"}, {Id: "testCve2"}, {Id: "testCve3"}}, - Components: map[string]services.Component{"issueId_1_direct_dependency": {}}}, - }, - }, - {directDependencies: []string{"issueId_2_direct_dependency"}, // violation dependency, should be ignored by function - expectedResult: []services.Vulnerability{}, - }, - {directDependencies: []string{}, - expectedResult: []services.Vulnerability{}, - }, - } - - for _, test := range tests { - assert.Equal(t, test.expectedResult, extractXrayDirectVulnerabilities(xrayResponseForDirectVulnerabilitiesTest, test.directDependencies)) - } -} - -func TestGetDirectDependenciesList(t *testing.T) { - tests := []struct { - dependenciesTrees []*xrayUtils.GraphNode - expectedResult []string + cvesCount int }{ { - dependenciesTrees: nil, - expectedResult: []string{}, + directDependencies: []string{"issueId_1_direct_dependency"}, + cvesCount: 3, }, { - dependenciesTrees: []*xrayUtils.GraphNode{ - {Id: "parent_node_id", Nodes: []*xrayUtils.GraphNode{ - {Id: "issueId_1_direct_dependency", Nodes: []*xrayUtils.GraphNode{{Id: "issueId_1_non_direct_dependency"}}}, - {Id: "issueId_2_direct_dependency", Nodes: nil}, - }, - }, - }, - expectedResult: []string{"issueId_1_direct_dependency", "issueId_2_direct_dependency"}, + directDependencies: []string{"issueId_2_direct_dependency"}, + cvesCount: 2, }, - { - dependenciesTrees: []*xrayUtils.GraphNode{ - {Id: "parent_node_id", Nodes: []*xrayUtils.GraphNode{ - {Id: "issueId_1_direct_dependency", Nodes: nil}, - {Id: "issueId_2_direct_dependency", Nodes: nil}, - }, - }, - }, - expectedResult: []string{"issueId_1_direct_dependency", "issueId_2_direct_dependency"}, + {directDependencies: []string{}, + cvesCount: 0, }, } for _, test := range tests { - assert.ElementsMatch(t, test.expectedResult, getDirectDependenciesList(test.dependenciesTrees)) + assert.Equal(t, test.cvesCount, extractDirectDependenciesCvesFromScan(xrayResponseForDirectVulnerabilitiesTest, test.directDependencies).Size()) } } func TestCreateConfigFile_VerifyFileWasCreated(t *testing.T) { // Arrange - analyzerManagerExecuter = &analyzerManagerMock{} - applicabilityManager, _, applicabilityManagerError := newApplicabilityScanManager(fakeBasicXrayResults, fakeBasicDependencyGraph, &fakeServerDetails, &analyzerManagerMock{}) + assert.NoError(t, rtutils.DownloadAnalyzerManagerIfNeeded()) + scanner, err := NewAdvancedSecurityScanner(nil, &fakeServerDetails) + assert.NoError(t, err) + defer func() { + if scanner.scannerDirCleanupFunc != nil { + assert.NoError(t, scanner.scannerDirCleanupFunc()) + } + }() + applicabilityManager := newApplicabilityScanManager(fakeBasicXrayResults, []string{"issueId_1_direct_dependency", "issueId_2_direct_dependency"}, scanner) - // Act - err := applicabilityManager.createConfigFile() + currWd, err := coreutils.GetWorkingDirectory() + assert.NoError(t, err) + err = applicabilityManager.createConfigFile(currWd) + assert.NoError(t, err) defer func() { - err = os.Remove(applicabilityManager.configFileName) + err = os.Remove(applicabilityManager.scanner.configFileName) assert.NoError(t, err) }() - // Assert - assert.NoError(t, applicabilityManagerError) - assert.NoError(t, err) - _, fileNotExistError := os.Stat(applicabilityManager.configFileName) + _, fileNotExistError := os.Stat(applicabilityManager.scanner.configFileName) assert.NoError(t, fileNotExistError) - fileContent, err := os.ReadFile(applicabilityManager.configFileName) + fileContent, err := os.ReadFile(applicabilityManager.scanner.configFileName) assert.NoError(t, err) assert.True(t, len(fileContent) > 0) } func TestParseResults_EmptyResults_AllCvesShouldGetUnknown(t *testing.T) { // Arrange - analyzerManagerExecuter = &analyzerManagerMock{} - applicabilityManager, _, applicabilityManagerError := newApplicabilityScanManager(fakeBasicXrayResults, fakeBasicDependencyGraph, &fakeServerDetails, &analyzerManagerMock{}) - applicabilityManager.resultsFileName = filepath.Join("..", "..", "commands", "testdata", "applicability-scan", "empty-results.sarif") + assert.NoError(t, rtutils.DownloadAnalyzerManagerIfNeeded()) + scanner, err := NewAdvancedSecurityScanner(nil, &fakeServerDetails) + assert.NoError(t, err) + defer func() { + if scanner.scannerDirCleanupFunc != nil { + assert.NoError(t, scanner.scannerDirCleanupFunc()) + } + }() + applicabilityManager := newApplicabilityScanManager(fakeBasicXrayResults, mockDirectDependencies, scanner) + applicabilityManager.scanner.resultsFileName = filepath.Join("..", "..", "commands", "testdata", "applicability-scan", "empty-results.sarif") // Act - err := applicabilityManager.setScanResults() + results, err := applicabilityManager.getScanResults() // Assert - assert.NoError(t, applicabilityManagerError) assert.NoError(t, err) - assert.NotEmpty(t, applicabilityManager.applicabilityScanResults) - assert.Equal(t, 5, len(applicabilityManager.applicabilityScanResults)) - for _, cveResult := range applicabilityManager.applicabilityScanResults { + assert.Equal(t, 5, len(results)) + for _, cveResult := range results { assert.Equal(t, utils.ApplicabilityUndeterminedStringValue, cveResult) } } func TestParseResults_ApplicableCveExist(t *testing.T) { // Arrange - analyzerManagerExecuter = &analyzerManagerMock{} - applicabilityManager, _, applicabilityManagerError := newApplicabilityScanManager(fakeBasicXrayResults, fakeBasicDependencyGraph, &fakeServerDetails, &analyzerManagerMock{}) - applicabilityManager.resultsFileName = filepath.Join("..", "..", "commands", "testdata", "applicability-scan", "applicable-cve-results.sarif") + assert.NoError(t, rtutils.DownloadAnalyzerManagerIfNeeded()) + scanner, err := NewAdvancedSecurityScanner(nil, &fakeServerDetails) + assert.NoError(t, err) + defer func() { + if scanner.scannerDirCleanupFunc != nil { + assert.NoError(t, scanner.scannerDirCleanupFunc()) + } + }() + applicabilityManager := newApplicabilityScanManager(fakeBasicXrayResults, mockDirectDependencies, scanner) + applicabilityManager.scanner.resultsFileName = filepath.Join("..", "..", "commands", "testdata", "applicability-scan", "applicable-cve-results.sarif") // Act - err := applicabilityManager.setScanResults() + results, err := applicabilityManager.getScanResults() // Assert - assert.NoError(t, applicabilityManagerError) assert.NoError(t, err) - assert.NotEmpty(t, applicabilityManager.applicabilityScanResults) - assert.Equal(t, 5, len(applicabilityManager.applicabilityScanResults)) - assert.Equal(t, utils.ApplicableStringValue, applicabilityManager.applicabilityScanResults["testCve1"]) - assert.Equal(t, utils.NotApplicableStringValue, applicabilityManager.applicabilityScanResults["testCve3"]) + assert.Equal(t, 5, len(results)) + assert.Equal(t, utils.ApplicableStringValue, results["testCve1"]) + assert.Equal(t, utils.NotApplicableStringValue, results["testCve3"]) } func TestParseResults_AllCvesNotApplicable(t *testing.T) { // Arrange - analyzerManagerExecuter = &analyzerManagerMock{} - applicabilityManager, _, applicabilityManagerError := newApplicabilityScanManager(fakeBasicXrayResults, fakeBasicDependencyGraph, &fakeServerDetails, &analyzerManagerMock{}) - applicabilityManager.resultsFileName = filepath.Join("..", "..", "commands", "testdata", "applicability-scan", "no-applicable-cves-results.sarif") + assert.NoError(t, rtutils.DownloadAnalyzerManagerIfNeeded()) + scanner, err := NewAdvancedSecurityScanner(nil, &fakeServerDetails) + assert.NoError(t, err) + defer func() { + if scanner.scannerDirCleanupFunc != nil { + assert.NoError(t, scanner.scannerDirCleanupFunc()) + } + }() + applicabilityManager := newApplicabilityScanManager(fakeBasicXrayResults, mockDirectDependencies, scanner) + applicabilityManager.scanner.resultsFileName = filepath.Join("..", "..", "commands", "testdata", "applicability-scan", "no-applicable-cves-results.sarif") // Act - err := applicabilityManager.setScanResults() + results, err := applicabilityManager.getScanResults() // Assert - assert.NoError(t, applicabilityManagerError) assert.NoError(t, err) - assert.NotEmpty(t, applicabilityManager.applicabilityScanResults) - assert.Equal(t, 5, len(applicabilityManager.applicabilityScanResults)) - for _, cveResult := range applicabilityManager.applicabilityScanResults { + assert.Equal(t, 5, len(results)) + for _, cveResult := range results { assert.Equal(t, utils.NotApplicableStringValue, cveResult) } } func TestGetExtendedScanResults_AnalyzerManagerReturnsError(t *testing.T) { - defer func() { - analyzerManagerExecutionError = nil - }() - // Arrange - analyzerManagerErrorMessage := "analyzer manager failure message" - analyzerManagerExecutionError = errors.New(analyzerManagerErrorMessage) - analyzerManagerExecuter = &analyzerManagerMock{} + assert.NoError(t, rtutils.DownloadAnalyzerManagerIfNeeded()) + scanResults := &utils.ExtendedScanResults{XrayResults: fakeBasicXrayResults, ScannedTechnologies: []coreutils.Technology{coreutils.Yarn}} + err := RunScannersAndSetResults(scanResults, mockDirectDependencies, &fakeServerDetails, nil, nil) - // Act - extendedResults, err := GetExtendedScanResults(fakeBasicXrayResults, fakeBasicDependencyGraph, &fakeServerDetails) - - // Assert - assert.Error(t, err) - assert.Equal(t, fmt.Sprintf(applicabilityScanFailureMessage, analyzerManagerErrorMessage), err.Error()) - assert.Nil(t, extendedResults) + // Expect error: + assert.ErrorContains(t, err, "failed to run Applicability scan") } diff --git a/xray/audit/jas/iacscanner.go b/xray/audit/jas/iacscanner.go index 291fa1452..629cd384f 100644 --- a/xray/audit/jas/iacscanner.go +++ b/xray/audit/jas/iacscanner.go @@ -1,32 +1,18 @@ package jas import ( - "errors" - "fmt" - "github.com/jfrog/jfrog-cli-core/v2/utils/config" - "github.com/jfrog/jfrog-cli-core/v2/utils/coreutils" "github.com/jfrog/jfrog-cli-core/v2/xray/utils" - "github.com/jfrog/jfrog-client-go/utils/errorutils" - "github.com/jfrog/jfrog-client-go/utils/io/fileutils" - "github.com/owenrumney/go-sarif/v2/sarif" - "gopkg.in/yaml.v2" - "os" - "path/filepath" + "github.com/jfrog/jfrog-client-go/utils/log" ) const ( - iacScannerType = "iac-scan-modules" - iacScanFailureMessage = "failed to run Infrastructure as Code scan. Cause: %s" - iacScanCommand = "iac" + iacScannerType = "iac-scan-modules" + iacScanCommand = "iac" ) type IacScanManager struct { iacScannerResults []utils.IacOrSecretResult - configFileName string - resultsFileName string - analyzerManager utils.AnalyzerManagerInterface - serverDetails *config.ServerDetails - projectRootPath string + scanner *AdvancedSecurityScanner } // The getIacScanResults function runs the iac scan flow, which includes the following steps: @@ -37,58 +23,39 @@ type IacScanManager struct { // []utils.IacOrSecretResult: a list of the iac violations that were found. // bool: true if the user is entitled to iac scan, false otherwise. // error: An error object (if any). -func getIacScanResults(serverDetails *config.ServerDetails, analyzerManager utils.AnalyzerManagerInterface) ([]utils.IacOrSecretResult, - bool, error) { - iacScanManager, cleanupFunc, err := newIacScanManager(serverDetails, analyzerManager) - if err != nil { - return nil, false, fmt.Errorf(iacScanFailureMessage, err.Error()) +func getIacScanResults(scanner *AdvancedSecurityScanner) (results []utils.IacOrSecretResult, err error) { + iacScanManager := newIacScanManager(scanner) + log.Info("Running IaC scanning...") + if err = iacScanManager.scanner.Run(iacScanManager); err != nil { + err = utils.ParseAnalyzerManagerError(utils.IaC, err) + return } - defer func() { - if cleanupFunc != nil { - err = errors.Join(err, cleanupFunc()) - } - }() - if err = iacScanManager.run(); err != nil { - if utils.IsNotEntitledError(err) || utils.IsUnsupportedCommandError(err) { - return nil, false, nil - } - return nil, true, fmt.Errorf(iacScanFailureMessage, err.Error()) + if len(iacScanManager.iacScannerResults) > 0 { + log.Info("Found", len(iacScanManager.iacScannerResults), "IaC vulnerabilities") } - return iacScanManager.iacScannerResults, true, nil + results = iacScanManager.iacScannerResults + return } -func newIacScanManager(serverDetails *config.ServerDetails, analyzerManager utils.AnalyzerManagerInterface) (manager *IacScanManager, - cleanup func() error, err error) { - tempDir, err := fileutils.CreateTempDir() - if err != nil { - return - } - cleanup = func() error { - return fileutils.RemoveTempDir(tempDir) - } +func newIacScanManager(scanner *AdvancedSecurityScanner) (manager *IacScanManager) { return &IacScanManager{ iacScannerResults: []utils.IacOrSecretResult{}, - configFileName: filepath.Join(tempDir, "config.yaml"), - resultsFileName: filepath.Join(tempDir, "results.sarif"), - analyzerManager: analyzerManager, - serverDetails: serverDetails, - }, cleanup, nil + scanner: scanner, + } } -func (iac *IacScanManager) run() (err error) { - defer func() { - if deleteJasProcessFiles(iac.configFileName, iac.resultsFileName) != nil { - deleteFilesError := deleteJasProcessFiles(iac.configFileName, iac.resultsFileName) - err = errors.Join(err, deleteFilesError) - } - }() - if err = iac.createConfigFile(); err != nil { +func (iac *IacScanManager) Run(wd string) (err error) { + scanner := iac.scanner + if err = iac.createConfigFile(wd); err != nil { return } if err = iac.runAnalyzerManager(); err != nil { return } - return iac.setScanResults() + var workingDirResults []utils.IacOrSecretResult + workingDirResults, err = getIacOrSecretsScanResults(scanner.resultsFileName, wd, false) + iac.iacScannerResults = append(iac.iacScannerResults, workingDirResults...) + return } type iacScanConfig struct { @@ -102,59 +69,20 @@ type iacScanConfiguration struct { SkippedDirs []string `yaml:"skipped-folders"` } -func (iac *IacScanManager) createConfigFile() error { - currentDir, err := coreutils.GetWorkingDirectory() - if err != nil { - return err - } - iac.projectRootPath = currentDir +func (iac *IacScanManager) createConfigFile(currentWd string) error { configFileContent := iacScanConfig{ Scans: []iacScanConfiguration{ { - Roots: []string{currentDir}, - Output: iac.resultsFileName, + Roots: []string{currentWd}, + Output: iac.scanner.resultsFileName, Type: iacScannerType, SkippedDirs: skippedDirs, }, }, } - yamlData, err := yaml.Marshal(&configFileContent) - if errorutils.CheckError(err) != nil { - return err - } - err = os.WriteFile(iac.configFileName, yamlData, 0644) - return errorutils.CheckError(err) + return createScannersConfigFile(iac.scanner.configFileName, configFileContent) } func (iac *IacScanManager) runAnalyzerManager() error { - if err := utils.SetAnalyzerManagerEnvVariables(iac.serverDetails); err != nil { - return err - } - return iac.analyzerManager.Exec(iac.configFileName, iacScanCommand) -} - -func (iac *IacScanManager) setScanResults() error { - report, err := sarif.Open(iac.resultsFileName) - if errorutils.CheckError(err) != nil { - return err - } - var iacResults []*sarif.Result - if len(report.Runs) > 0 { - iacResults = report.Runs[0].Results - } - - finalIacList := []utils.IacOrSecretResult{} - - for _, result := range iacResults { - newIac := utils.IacOrSecretResult{ - Severity: utils.GetResultSeverity(result), - File: utils.ExtractRelativePath(utils.GetResultFileName(result), iac.projectRootPath), - LineColumn: utils.GetResultLocationInFile(result), - Text: *result.Message.Text, - Type: *result.RuleID, - } - finalIacList = append(finalIacList, newIac) - } - iac.iacScannerResults = finalIacList - return nil + return iac.scanner.analyzerManager.Exec(iac.scanner.configFileName, iacScanCommand, iac.scanner.serverDetails) } diff --git a/xray/audit/jas/iacscanner_test.go b/xray/audit/jas/iacscanner_test.go index 7377e1045..430b75fdb 100644 --- a/xray/audit/jas/iacscanner_test.go +++ b/xray/audit/jas/iacscanner_test.go @@ -1,6 +1,8 @@ package jas import ( + rtutils "github.com/jfrog/jfrog-cli-core/v2/artifactory/utils" + "github.com/jfrog/jfrog-cli-core/v2/utils/coreutils" "github.com/stretchr/testify/assert" "os" "path/filepath" @@ -9,62 +11,89 @@ import ( func TestNewIacScanManager(t *testing.T) { // Act - iacScanManager, _, err := newIacScanManager(&fakeServerDetails, &analyzerManagerMock{}) + assert.NoError(t, rtutils.DownloadAnalyzerManagerIfNeeded()) + scanner, err := NewAdvancedSecurityScanner([]string{"currentDir"}, &fakeServerDetails) + assert.NoError(t, err) + defer func() { + if scanner.scannerDirCleanupFunc != nil { + assert.NoError(t, scanner.scannerDirCleanupFunc()) + } + }() + iacScanManager := newIacScanManager(scanner) // Assert assert.NoError(t, err) assert.NotEmpty(t, iacScanManager) - assert.NotEmpty(t, iacScanManager.configFileName) - assert.NotEmpty(t, iacScanManager.resultsFileName) - assert.Equal(t, &fakeServerDetails, iacScanManager.serverDetails) + assert.NotEmpty(t, iacScanManager.scanner.configFileName) + assert.NotEmpty(t, iacScanManager.scanner.resultsFileName) + assert.NotEmpty(t, iacScanManager.scanner.workingDirs) + assert.Equal(t, &fakeServerDetails, iacScanManager.scanner.serverDetails) } func TestIacScan_CreateConfigFile_VerifyFileWasCreated(t *testing.T) { - // Arrange - iacScanManager, _, iacManagerError := newIacScanManager(&fakeServerDetails, &analyzerManagerMock{}) + assert.NoError(t, rtutils.DownloadAnalyzerManagerIfNeeded()) + scanner, err := NewAdvancedSecurityScanner([]string{"currentDir"}, &fakeServerDetails) + assert.NoError(t, err) + defer func() { + if scanner.scannerDirCleanupFunc != nil { + assert.NoError(t, scanner.scannerDirCleanupFunc()) + } + }() + iacScanManager := newIacScanManager(scanner) - // Act - err := iacScanManager.createConfigFile() + currWd, err := coreutils.GetWorkingDirectory() + assert.NoError(t, err) + err = iacScanManager.createConfigFile(currWd) defer func() { - err = os.Remove(iacScanManager.configFileName) + err = os.Remove(iacScanManager.scanner.configFileName) assert.NoError(t, err) }() - // Assert - assert.NoError(t, iacManagerError) - assert.NoError(t, err) - _, fileNotExistError := os.Stat(iacScanManager.configFileName) + _, fileNotExistError := os.Stat(iacScanManager.scanner.configFileName) assert.NoError(t, fileNotExistError) - fileContent, err := os.ReadFile(iacScanManager.configFileName) + fileContent, err := os.ReadFile(iacScanManager.scanner.configFileName) assert.NoError(t, err) assert.True(t, len(fileContent) > 0) } func TestIacParseResults_EmptyResults(t *testing.T) { // Arrange - iacScanManager, _, iacManagerError := newIacScanManager(&fakeServerDetails, &analyzerManagerMock{}) - iacScanManager.resultsFileName = filepath.Join("..", "..", "commands", "testdata", "iac-scan", "no-violations.sarif") + assert.NoError(t, rtutils.DownloadAnalyzerManagerIfNeeded()) + scanner, err := NewAdvancedSecurityScanner(nil, &fakeServerDetails) + assert.NoError(t, err) + defer func() { + if scanner.scannerDirCleanupFunc != nil { + assert.NoError(t, scanner.scannerDirCleanupFunc()) + } + }() + iacScanManager := newIacScanManager(scanner) + iacScanManager.scanner.resultsFileName = filepath.Join("..", "..", "commands", "testdata", "iac-scan", "no-violations.sarif") // Act - err := iacScanManager.setScanResults() + iacScanManager.iacScannerResults, err = getIacOrSecretsScanResults(iacScanManager.scanner.resultsFileName, scanner.workingDirs[0], false) // Assert - assert.NoError(t, iacManagerError) assert.NoError(t, err) assert.Empty(t, iacScanManager.iacScannerResults) } -func TestIacParseResults_ResultsContainSecrets(t *testing.T) { +func TestIacParseResults_ResultsContainIacViolations(t *testing.T) { // Arrange - iacScanManager, _, iacManagerError := newIacScanManager(&fakeServerDetails, &analyzerManagerMock{}) - iacScanManager.resultsFileName = filepath.Join("..", "..", "commands", "testdata", "iac-scan", "contains-iac-violations.sarif") + scanner, err := NewAdvancedSecurityScanner(nil, &fakeServerDetails) + assert.NoError(t, err) + defer func() { + if scanner.scannerDirCleanupFunc != nil { + assert.NoError(t, scanner.scannerDirCleanupFunc()) + } + }() + iacScanManager := newIacScanManager(scanner) + iacScanManager.scanner.resultsFileName = filepath.Join("..", "..", "commands", "testdata", "iac-scan", "contains-iac-violations.sarif") // Act - err := iacScanManager.setScanResults() + iacScanManager.iacScannerResults, err = getIacOrSecretsScanResults(iacScanManager.scanner.resultsFileName, scanner.workingDirs[0], false) // Assert - assert.NoError(t, iacManagerError) assert.NoError(t, err) assert.NotEmpty(t, iacScanManager.iacScannerResults) assert.Equal(t, 4, len(iacScanManager.iacScannerResults)) diff --git a/xray/audit/jas/jasmanager.go b/xray/audit/jas/jasmanager.go index d24ca8beb..c3304ecdf 100644 --- a/xray/audit/jas/jasmanager.go +++ b/xray/audit/jas/jasmanager.go @@ -5,63 +5,98 @@ import ( "github.com/jfrog/jfrog-cli-core/v2/utils/config" "github.com/jfrog/jfrog-cli-core/v2/xray/utils" "github.com/jfrog/jfrog-client-go/utils/errorutils" + "github.com/jfrog/jfrog-client-go/utils/io" "github.com/jfrog/jfrog-client-go/utils/io/fileutils" "github.com/jfrog/jfrog-client-go/utils/log" - "github.com/jfrog/jfrog-client-go/xray/services" - xrayUtils "github.com/jfrog/jfrog-client-go/xray/services/utils" + "github.com/owenrumney/go-sarif/v2/sarif" + "gopkg.in/yaml.v3" "os" + "path/filepath" ) -const serverDetailsErrorMessage = "cant get xray server details" - var ( - analyzerManagerExecuter utils.AnalyzerManagerInterface = &utils.AnalyzerManager{} - skippedDirs = []string{"**/*test*/**", "**/*venv*/**", "**/*node_modules*/**", "**/*target*/**"} + skippedDirs = []string{"**/*test*/**", "**/*venv*/**", "**/*node_modules*/**", "**/*target*/**"} ) -func GetExtendedScanResults(xrayResults []services.ScanResponse, dependencyTrees []*xrayUtils.GraphNode, - serverDetails *config.ServerDetails) (*utils.ExtendedScanResults, error) { - if serverDetails == nil { - return nil, errors.New(serverDetailsErrorMessage) +type ScannerCmd interface { + Run(wd string) (err error) +} + +type AdvancedSecurityScanner struct { + configFileName string + resultsFileName string + analyzerManager utils.AnalyzerManager + serverDetails *config.ServerDetails + workingDirs []string + scannerDirCleanupFunc func() error +} + +func NewAdvancedSecurityScanner(workingDirs []string, serverDetails *config.ServerDetails) (scanner *AdvancedSecurityScanner, err error) { + scanner = &AdvancedSecurityScanner{} + if scanner.analyzerManager.AnalyzerManagerFullPath, err = utils.GetAnalyzerManagerExecutable(); err != nil { + return } - if len(serverDetails.Url) == 0 { - log.Warn("To include 'Contextual Analysis' information as part of the audit output, please run the 'jf c add' command before running this command.") - return &utils.ExtendedScanResults{XrayResults: xrayResults}, nil + var tempDir string + if tempDir, err = fileutils.CreateTempDir(); err != nil { + return } - analyzerManagerExist, err := analyzerManagerExecuter.ExistLocally() - if err != nil { - return nil, err + scanner.scannerDirCleanupFunc = func() error { + return fileutils.RemoveTempDir(tempDir) } - if !analyzerManagerExist { - log.Debug("Since the 'Analyzer Manager' doesn't exist locally, its execution is skipped.") - return &utils.ExtendedScanResults{XrayResults: xrayResults}, nil + scanner.serverDetails = serverDetails + scanner.configFileName = filepath.Join(tempDir, "config.yaml") + scanner.resultsFileName = filepath.Join(tempDir, "results.sarif") + scanner.workingDirs, err = utils.GetFullPathsWorkingDirs(workingDirs) + return +} + +func (a *AdvancedSecurityScanner) Run(scannerCmd ScannerCmd) (err error) { + for _, workingDir := range a.workingDirs { + func() { + defer func() { + err = errors.Join(err, deleteJasProcessFiles(a.configFileName, a.resultsFileName)) + }() + if err = scannerCmd.Run(workingDir); err != nil { + return + } + }() } - if err = utils.CreateAnalyzerManagerLogDir(); err != nil { - return nil, err + return +} + +func RunScannersAndSetResults(scanResults *utils.ExtendedScanResults, directDependencies []string, + serverDetails *config.ServerDetails, workingDirs []string, progress io.ProgressMgr) (err error) { + if serverDetails == nil || len(serverDetails.Url) == 0 { + log.Warn("To include 'Advanced Security' scan as part of the audit output, please run the 'jf c add' command before running this command.") + return } - applicabilityScanResults, eligibleForApplicabilityScan, err := getApplicabilityScanResults(xrayResults, - dependencyTrees, serverDetails, analyzerManagerExecuter) + scanner, err := NewAdvancedSecurityScanner(workingDirs, serverDetails) if err != nil { - return nil, err + return } - secretsScanResults, eligibleForSecretsScan, err := getSecretsScanResults(serverDetails, analyzerManagerExecuter) + defer func() { + cleanup := scanner.scannerDirCleanupFunc + err = errors.Join(err, cleanup()) + }() + if progress != nil { + progress.SetHeadlineMsg("Running applicability scanning") + } + scanResults.ApplicabilityScanResults, err = getApplicabilityScanResults(scanResults.XrayResults, directDependencies, scanResults.ScannedTechnologies, scanner) if err != nil { - return nil, err + return } - iacScanResults, eligibleForIacScan, err := getIacScanResults(serverDetails, analyzerManagerExecuter) + if progress != nil { + progress.SetHeadlineMsg("Running secrets scanning") + } + scanResults.SecretsScanResults, err = getSecretsScanResults(scanner) if err != nil { - return nil, err + return } - return &utils.ExtendedScanResults{ - XrayResults: xrayResults, - ApplicabilityScanResults: applicabilityScanResults, - SecretsScanResults: secretsScanResults, - IacScanResults: iacScanResults, - EntitledForJas: true, - EligibleForApplicabilityScan: eligibleForApplicabilityScan, - EligibleForSecretScan: eligibleForSecretsScan, - EligibleForIacScan: eligibleForIacScan, - }, nil + if progress != nil { + progress.SetHeadlineMsg("Running IaC scanning") + } + scanResults.IacScanResults, err = getIacScanResults(scanner) + return } func deleteJasProcessFiles(configFile string, resultFile string) error { @@ -83,3 +118,44 @@ func deleteJasProcessFiles(configFile string, resultFile string) error { } return errorutils.CheckError(err) } + +func getIacOrSecretsScanResults(resultsFileName, workingDir string, isSecret bool) ([]utils.IacOrSecretResult, error) { + report, err := sarif.Open(resultsFileName) + if errorutils.CheckError(err) != nil { + return nil, err + } + var results []*sarif.Result + if len(report.Runs) > 0 { + results = report.Runs[0].Results + } + + var iacOrSecretResults []utils.IacOrSecretResult + for _, result := range results { + // Describes a request to “suppress” a result (to exclude it from result lists) + if len(result.Suppressions) > 0 { + continue + } + text := *result.Message.Text + if isSecret { + text = hideSecret(*result.Locations[0].PhysicalLocation.Region.Snippet.Text) + } + newResult := utils.IacOrSecretResult{ + Severity: utils.GetResultSeverity(result), + File: utils.ExtractRelativePath(utils.GetResultFileName(result), workingDir), + LineColumn: utils.GetResultLocationInFile(result), + Text: text, + Type: *result.RuleID, + } + iacOrSecretResults = append(iacOrSecretResults, newResult) + } + return iacOrSecretResults, nil +} + +func createScannersConfigFile(fileName string, fileContent interface{}) error { + yamlData, err := yaml.Marshal(&fileContent) + if errorutils.CheckError(err) != nil { + return err + } + err = os.WriteFile(fileName, yamlData, 0644) + return errorutils.CheckError(err) +} diff --git a/xray/audit/jas/jasmanager_test.go b/xray/audit/jas/jasmanager_test.go index 9fd56367b..960c1e9d3 100644 --- a/xray/audit/jas/jasmanager_test.go +++ b/xray/audit/jas/jasmanager_test.go @@ -1,55 +1,35 @@ package jas import ( + "github.com/jfrog/jfrog-cli-core/v2/xray/utils" + "os" + "testing" + "github.com/jfrog/jfrog-cli-core/v2/utils/config" "github.com/jfrog/jfrog-cli-core/v2/utils/coreutils" + "github.com/jfrog/jfrog-client-go/utils/io/fileutils" "github.com/jfrog/jfrog-client-go/xray/services" - xrayUtils "github.com/jfrog/jfrog-client-go/xray/services/utils" "github.com/stretchr/testify/assert" - "testing" ) -var ( - analyzerManagerExecutionError error = nil - analyzerManagerExists = true -) - -type analyzerManagerMock struct { -} - -func (am *analyzerManagerMock) Exec(string, string) error { - return analyzerManagerExecutionError -} - -func (am *analyzerManagerMock) ExistLocally() (bool, error) { - return analyzerManagerExists, nil -} - var fakeBasicXrayResults = []services.ScanResponse{ { ScanId: "scanId_1", Vulnerabilities: []services.Vulnerability{ {IssueId: "issueId_1", Technology: coreutils.Pipenv.ToString(), Cves: []services.Cve{{Id: "testCve1"}, {Id: "testCve2"}, {Id: "testCve3"}}, - Components: map[string]services.Component{"issueId_1_direct_dependency": {}}}, + Components: map[string]services.Component{"issueId_1_direct_dependency": {}, "issueId_3_direct_dependency": {}}}, }, Violations: []services.Violation{ {IssueId: "issueId_2", Technology: coreutils.Pipenv.ToString(), Cves: []services.Cve{{Id: "testCve4"}, {Id: "testCve5"}}, - Components: map[string]services.Component{"issueId_2_direct_dependency": {}}}, + Components: map[string]services.Component{"issueId_2_direct_dependency": {}, "issueId_4_direct_dependency": {}}}, }, }, } -var fakeBasicDependencyGraph = []*xrayUtils.GraphNode{ - { - Id: "parent_node_id", - Nodes: []*xrayUtils.GraphNode{ - {Id: "issueId_1_direct_dependency", Nodes: []*xrayUtils.GraphNode{{Id: "issueId_1_non_direct_dependency"}}}, - {Id: "issueId_2_direct_dependency", Nodes: nil}, - }, - }, -} +var mockDirectDependencies = []string{"issueId_2_direct_dependency", "issueId_1_direct_dependency"} +var mockMultiRootDirectDependencies = []string{"issueId_2_direct_dependency", "issueId_1_direct_dependency", "issueId_3_direct_dependency", "issueId_4_direct_dependency"} var fakeServerDetails = config.ServerDetails{ Url: "platformUrl", @@ -58,26 +38,23 @@ var fakeServerDetails = config.ServerDetails{ } func TestGetExtendedScanResults_AnalyzerManagerDoesntExist(t *testing.T) { - // Arrange - analyzerManagerExists = false - analyzerManagerExecuter = &analyzerManagerMock{} - - // Act - extendedResults, err := GetExtendedScanResults(fakeBasicXrayResults, fakeBasicDependencyGraph, &fakeServerDetails) - - // Assert + tmpDir, err := fileutils.CreateTempDir() + defer func() { + assert.NoError(t, fileutils.RemoveTempDir(tmpDir)) + }() assert.NoError(t, err) - assert.False(t, extendedResults.EntitledForJas) - assert.Equal(t, 1, len(extendedResults.XrayResults)) - assert.Nil(t, extendedResults.ApplicabilityScanResults) + assert.NoError(t, os.Setenv(coreutils.HomeDir, tmpDir)) + defer func() { + assert.NoError(t, os.Unsetenv(coreutils.HomeDir)) + }() + scanResults := &utils.ExtendedScanResults{XrayResults: fakeBasicXrayResults, ScannedTechnologies: []coreutils.Technology{coreutils.Yarn}} + err = RunScannersAndSetResults(scanResults, []string{"issueId_1_direct_dependency", "issueId_2_direct_dependency"}, &fakeServerDetails, nil, nil) + // Expect error: + assert.Error(t, err) } func TestGetExtendedScanResults_ServerNotValid(t *testing.T) { - // Act - extendedResults, err := GetExtendedScanResults(fakeBasicXrayResults, fakeBasicDependencyGraph, nil) - - // Assert - assert.Nil(t, extendedResults) - assert.Error(t, err) - assert.Equal(t, "cant get xray server details", err.Error()) + scanResults := &utils.ExtendedScanResults{XrayResults: fakeBasicXrayResults, ScannedTechnologies: []coreutils.Technology{coreutils.Pip}} + err := RunScannersAndSetResults(scanResults, []string{"issueId_1_direct_dependency", "issueId_2_direct_dependency"}, nil, nil, nil) + assert.NoError(t, err) } diff --git a/xray/audit/jas/secretsscanner.go b/xray/audit/jas/secretsscanner.go index c249afff6..cd1159a49 100644 --- a/xray/audit/jas/secretsscanner.go +++ b/xray/audit/jas/secretsscanner.go @@ -1,33 +1,20 @@ package jas import ( - "errors" - "fmt" - "github.com/jfrog/jfrog-cli-core/v2/utils/config" - "github.com/jfrog/jfrog-cli-core/v2/utils/coreutils" + "strings" + "github.com/jfrog/jfrog-cli-core/v2/xray/utils" - "github.com/jfrog/jfrog-client-go/utils/errorutils" - "github.com/jfrog/jfrog-client-go/utils/io/fileutils" - "github.com/owenrumney/go-sarif/v2/sarif" - "gopkg.in/yaml.v2" - "os" - "path/filepath" + "github.com/jfrog/jfrog-client-go/utils/log" ) const ( - secretsScanCommand = "sec" - secretsScannersNames = "tokens, entropy" - secretsScannerType = "secrets-scan" - secScanFailureMessage = "failed to run secrets scan. Cause: %s" + secretsScanCommand = "sec" + secretsScannerType = "secrets-scan" ) type SecretScanManager struct { secretsScannerResults []utils.IacOrSecretResult - configFileName string - resultsFileName string - analyzerManager utils.AnalyzerManagerInterface - serverDetails *config.ServerDetails - projectRootPath string + scanner *AdvancedSecurityScanner } // The getSecretsScanResults function runs the secrets scan flow, which includes the following steps: @@ -36,60 +23,40 @@ type SecretScanManager struct { // Parsing the analyzer manager results. // Return values: // []utils.IacOrSecretResult: a list of the secrets that were found. -// bool: true if the user is entitled to secrets scan, false otherwise. // error: An error object (if any). -func getSecretsScanResults(serverDetails *config.ServerDetails, analyzerManager utils.AnalyzerManagerInterface) ([]utils.IacOrSecretResult, - bool, error) { - secretScanManager, cleanupFunc, err := newSecretsScanManager(serverDetails, analyzerManager) - if err != nil { - return nil, false, fmt.Errorf(secScanFailureMessage, err.Error()) +func getSecretsScanResults(scanner *AdvancedSecurityScanner) (results []utils.IacOrSecretResult, err error) { + secretScanManager := newSecretsScanManager(scanner) + log.Info("Running secrets scanning...") + if err = secretScanManager.scanner.Run(secretScanManager); err != nil { + err = utils.ParseAnalyzerManagerError(utils.Secrets, err) + return } - defer func() { - if cleanupFunc != nil { - err = errors.Join(err, cleanupFunc()) - } - }() - if err = secretScanManager.run(); err != nil { - if utils.IsNotEntitledError(err) || utils.IsUnsupportedCommandError(err) { - return nil, false, nil - } - return nil, true, fmt.Errorf(secScanFailureMessage, err.Error()) + results = secretScanManager.secretsScannerResults + if len(results) > 0 { + log.Info(len(results), "secrets were found") } - return secretScanManager.secretsScannerResults, true, nil + return } -func newSecretsScanManager(serverDetails *config.ServerDetails, analyzerManager utils.AnalyzerManagerInterface) (manager *SecretScanManager, - cleanup func() error, err error) { - tempDir, err := fileutils.CreateTempDir() - if err != nil { - return - } - cleanup = func() error { - return fileutils.RemoveTempDir(tempDir) - } +func newSecretsScanManager(scanner *AdvancedSecurityScanner) (manager *SecretScanManager) { return &SecretScanManager{ secretsScannerResults: []utils.IacOrSecretResult{}, - configFileName: filepath.Join(tempDir, "config.yaml"), - resultsFileName: filepath.Join(tempDir, "results.sarif"), - analyzerManager: analyzerManager, - serverDetails: serverDetails, - }, cleanup, nil + scanner: scanner, + } } -func (s *SecretScanManager) run() (err error) { - defer func() { - if deleteJasProcessFiles(s.configFileName, s.resultsFileName) != nil { - deleteFilesError := deleteJasProcessFiles(s.configFileName, s.resultsFileName) - err = errors.Join(err, deleteFilesError) - } - }() - if err = s.createConfigFile(); err != nil { +func (s *SecretScanManager) Run(wd string) (err error) { + scanner := s.scanner + if err = s.createConfigFile(wd); err != nil { return } if err = s.runAnalyzerManager(); err != nil { return } - return s.setScanResults() + var workingDirResults []utils.IacOrSecretResult + workingDirResults, err = getIacOrSecretsScanResults(scanner.resultsFileName, wd, true) + s.secretsScannerResults = append(s.secretsScannerResults, workingDirResults...) + return } type secretsScanConfig struct { @@ -100,81 +67,30 @@ type secretsScanConfiguration struct { Roots []string `yaml:"roots"` Output string `yaml:"output"` Type string `yaml:"type"` - Scanners string `yaml:"scanners"` SkippedDirs []string `yaml:"skipped-folders"` } -func (s *SecretScanManager) createConfigFile() error { - currentDir, err := coreutils.GetWorkingDirectory() - if err != nil { - return err - } - s.projectRootPath = currentDir +func (s *SecretScanManager) createConfigFile(currentWd string) error { configFileContent := secretsScanConfig{ Scans: []secretsScanConfiguration{ { - Roots: []string{currentDir}, - Output: s.resultsFileName, + Roots: []string{currentWd}, + Output: s.scanner.resultsFileName, Type: secretsScannerType, - Scanners: secretsScannersNames, SkippedDirs: skippedDirs, }, }, } - yamlData, err := yaml.Marshal(&configFileContent) - if errorutils.CheckError(err) != nil { - return err - } - err = os.WriteFile(s.configFileName, yamlData, 0644) - return errorutils.CheckError(err) + return createScannersConfigFile(s.scanner.configFileName, configFileContent) } func (s *SecretScanManager) runAnalyzerManager() error { - if err := utils.SetAnalyzerManagerEnvVariables(s.serverDetails); err != nil { - return err - } - return s.analyzerManager.Exec(s.configFileName, secretsScanCommand) -} - -func (s *SecretScanManager) setScanResults() error { - report, err := sarif.Open(s.resultsFileName) - if errorutils.CheckError(err) != nil { - return err - } - var secretsResults []*sarif.Result - if len(report.Runs) > 0 { - secretsResults = report.Runs[0].Results - } - - finalSecretsList := []utils.IacOrSecretResult{} - - for _, secret := range secretsResults { - newSecret := utils.IacOrSecretResult{ - Severity: utils.GetResultSeverity(secret), - File: utils.ExtractRelativePath(utils.GetResultFileName(secret), s.projectRootPath), - LineColumn: utils.GetResultLocationInFile(secret), - Text: hideSecret(*secret.Locations[0].PhysicalLocation.Region.Snippet.Text), - Type: *secret.RuleID, - } - finalSecretsList = append(finalSecretsList, newSecret) - } - s.secretsScannerResults = finalSecretsList - return nil + return s.scanner.analyzerManager.Exec(s.scanner.configFileName, secretsScanCommand, s.scanner.serverDetails) } func hideSecret(secret string) string { if len(secret) <= 3 { return "***" } - hiddenSecret := "" - i := 0 - for i < 3 { // Show first 3 digits - hiddenSecret += string(secret[i]) - i++ - } - for i < 15 { - hiddenSecret += "*" - i++ - } - return hiddenSecret + return secret[:3] + strings.Repeat("*", 12) } diff --git a/xray/audit/jas/secretsscanner_test.go b/xray/audit/jas/secretsscanner_test.go index b7fb7994a..9ec80fa61 100644 --- a/xray/audit/jas/secretsscanner_test.go +++ b/xray/audit/jas/secretsscanner_test.go @@ -1,8 +1,8 @@ package jas import ( - "errors" - "fmt" + rtutils "github.com/jfrog/jfrog-cli-core/v2/artifactory/utils" + "github.com/jfrog/jfrog-cli-core/v2/utils/coreutils" "github.com/stretchr/testify/assert" "os" "path/filepath" @@ -10,35 +10,47 @@ import ( ) func TestNewSecretsScanManager(t *testing.T) { - // Act - secretScanManager, _, err := newSecretsScanManager(&fakeServerDetails, &analyzerManagerMock{}) + assert.NoError(t, rtutils.DownloadAnalyzerManagerIfNeeded()) + scanner, err := NewAdvancedSecurityScanner(nil, &fakeServerDetails) + assert.NoError(t, err) + defer func() { + if scanner.scannerDirCleanupFunc != nil { + assert.NoError(t, scanner.scannerDirCleanupFunc()) + } + }() + secretScanManager := newSecretsScanManager(scanner) - // Assert assert.NoError(t, err) assert.NotEmpty(t, secretScanManager) - assert.NotEmpty(t, secretScanManager.configFileName) - assert.NotEmpty(t, secretScanManager.resultsFileName) - assert.Equal(t, &fakeServerDetails, secretScanManager.serverDetails) + assert.NotEmpty(t, secretScanManager.scanner.configFileName) + assert.NotEmpty(t, secretScanManager.scanner.resultsFileName) + assert.Equal(t, &fakeServerDetails, secretScanManager.scanner.serverDetails) } func TestSecretsScan_CreateConfigFile_VerifyFileWasCreated(t *testing.T) { - // Arrange - secretScanManager, _, secretsManagerError := newSecretsScanManager(&fakeServerDetails, &analyzerManagerMock{}) + assert.NoError(t, rtutils.DownloadAnalyzerManagerIfNeeded()) + scanner, err := NewAdvancedSecurityScanner(nil, &fakeServerDetails) + assert.NoError(t, err) + defer func() { + if scanner.scannerDirCleanupFunc != nil { + assert.NoError(t, scanner.scannerDirCleanupFunc()) + } + }() + secretScanManager := newSecretsScanManager(scanner) - // Act - err := secretScanManager.createConfigFile() + currWd, err := coreutils.GetWorkingDirectory() + assert.NoError(t, err) + err = secretScanManager.createConfigFile(currWd) + assert.NoError(t, err) defer func() { - err = os.Remove(secretScanManager.configFileName) + err = os.Remove(secretScanManager.scanner.configFileName) assert.NoError(t, err) }() - // Assert - assert.NoError(t, secretsManagerError) - assert.NoError(t, err) - _, fileNotExistError := os.Stat(secretScanManager.configFileName) + _, fileNotExistError := os.Stat(secretScanManager.scanner.configFileName) assert.NoError(t, fileNotExistError) - fileContent, err := os.ReadFile(secretScanManager.configFileName) + fileContent, err := os.ReadFile(secretScanManager.scanner.configFileName) assert.NoError(t, err) assert.True(t, len(fileContent) > 0) } @@ -46,68 +58,83 @@ func TestSecretsScan_CreateConfigFile_VerifyFileWasCreated(t *testing.T) { func TestRunAnalyzerManager_ReturnsGeneralError(t *testing.T) { defer func() { os.Clearenv() - analyzerManagerExecutionError = nil }() // Arrange - analyzerManagerExecutionError = errors.New("analyzer manager error") - secretScanManager, _, secretsManagerError := newSecretsScanManager(&fakeServerDetails, &analyzerManagerMock{}) + assert.NoError(t, rtutils.DownloadAnalyzerManagerIfNeeded()) + scanner, err := NewAdvancedSecurityScanner(nil, &fakeServerDetails) + assert.NoError(t, err) + defer func() { + if scanner.scannerDirCleanupFunc != nil { + assert.NoError(t, scanner.scannerDirCleanupFunc()) + } + }() + secretScanManager := newSecretsScanManager(scanner) // Act - err := secretScanManager.runAnalyzerManager() + err = secretScanManager.runAnalyzerManager() // Assert - assert.NoError(t, secretsManagerError) assert.Error(t, err) - assert.Equal(t, analyzerManagerExecutionError.Error(), err.Error()) } func TestParseResults_EmptyResults(t *testing.T) { // Arrange - secretScanManager, _, secretsManagerError := newSecretsScanManager(&fakeServerDetails, &analyzerManagerMock{}) - secretScanManager.resultsFileName = filepath.Join("..", "..", "commands", "testdata", "secrets-scan", "no-secrets.sarif") + assert.NoError(t, rtutils.DownloadAnalyzerManagerIfNeeded()) + scanner, err := NewAdvancedSecurityScanner(nil, &fakeServerDetails) + assert.NoError(t, err) + defer func() { + if scanner.scannerDirCleanupFunc != nil { + assert.NoError(t, scanner.scannerDirCleanupFunc()) + } + }() + secretScanManager := newSecretsScanManager(scanner) + secretScanManager.scanner.resultsFileName = filepath.Join("..", "..", "commands", "testdata", "secrets-scan", "no-secrets.sarif") // Act - err := secretScanManager.setScanResults() + secretScanManager.secretsScannerResults, err = getIacOrSecretsScanResults(secretScanManager.scanner.resultsFileName, scanner.workingDirs[0], false) // Assert - assert.NoError(t, secretsManagerError) assert.NoError(t, err) assert.Empty(t, secretScanManager.secretsScannerResults) } func TestParseResults_ResultsContainSecrets(t *testing.T) { // Arrange - secretScanManager, _, secretsManagerError := newSecretsScanManager(&fakeServerDetails, &analyzerManagerMock{}) - secretScanManager.resultsFileName = filepath.Join("..", "..", "commands", "testdata", "secrets-scan", "contain-secrets.sarif") + assert.NoError(t, rtutils.DownloadAnalyzerManagerIfNeeded()) + scanner, err := NewAdvancedSecurityScanner(nil, &fakeServerDetails) + assert.NoError(t, err) + defer func() { + if scanner.scannerDirCleanupFunc != nil { + assert.NoError(t, scanner.scannerDirCleanupFunc()) + } + }() + secretScanManager := newSecretsScanManager(scanner) + secretScanManager.scanner.resultsFileName = filepath.Join("..", "..", "commands", "testdata", "secrets-scan", "contain-secrets.sarif") // Act - err := secretScanManager.setScanResults() + secretScanManager.secretsScannerResults, err = getIacOrSecretsScanResults(secretScanManager.scanner.resultsFileName, scanner.workingDirs[0], false) // Assert - assert.NoError(t, secretsManagerError) assert.NoError(t, err) assert.NotEmpty(t, secretScanManager.secretsScannerResults) - assert.Equal(t, 8, len(secretScanManager.secretsScannerResults)) + assert.Equal(t, 7, len(secretScanManager.secretsScannerResults)) } func TestGetSecretsScanResults_AnalyzerManagerReturnsError(t *testing.T) { + assert.NoError(t, rtutils.DownloadAnalyzerManagerIfNeeded()) + scanner, err := NewAdvancedSecurityScanner(nil, &fakeServerDetails) + assert.NoError(t, err) defer func() { - analyzerManagerExecutionError = nil + if scanner.scannerDirCleanupFunc != nil { + assert.NoError(t, scanner.scannerDirCleanupFunc()) + } }() + secretsResults, err := getSecretsScanResults(scanner) - // Arrange - analyzerManagerErrorMessage := "analyzer manager failure message" - analyzerManagerExecutionError = errors.New(analyzerManagerErrorMessage) - - // Act - secretsResults, entitledForSecrets, err := getSecretsScanResults(&fakeServerDetails, &analyzerManagerMock{}) - - // Assert assert.Error(t, err) - assert.Equal(t, fmt.Sprintf(secScanFailureMessage, analyzerManagerErrorMessage), err.Error()) + assert.ErrorContains(t, err, "failed to run Secrets scan") assert.Nil(t, secretsResults) - assert.True(t, entitledForSecrets) } func TestHideSecret(t *testing.T) { diff --git a/xray/audit/java/gradle-dep-tree.jar b/xray/audit/java/gradle-dep-tree.jar new file mode 100644 index 000000000..2762a71ba Binary files /dev/null and b/xray/audit/java/gradle-dep-tree.jar differ diff --git a/xray/audit/java/gradle.go b/xray/audit/java/gradle.go index a1a21592d..5db177045 100644 --- a/xray/audit/java/gradle.go +++ b/xray/audit/java/gradle.go @@ -1,43 +1,47 @@ package java import ( + _ "embed" "encoding/base64" "encoding/json" "errors" "fmt" + "os" + "os/exec" + "path/filepath" + "strings" + "github.com/jfrog/build-info-go/build" "github.com/jfrog/jfrog-cli-core/v2/artifactory/utils" "github.com/jfrog/jfrog-cli-core/v2/utils/config" "github.com/jfrog/jfrog-cli-core/v2/utils/coreutils" + "github.com/jfrog/jfrog-cli-core/v2/utils/ioutils" "github.com/jfrog/jfrog-client-go/auth" "github.com/jfrog/jfrog-client-go/utils/errorutils" "github.com/jfrog/jfrog-client-go/utils/io/fileutils" "github.com/jfrog/jfrog-client-go/utils/log" xrayUtils "github.com/jfrog/jfrog-client-go/xray/services/utils" - "os" - "os/exec" - "path/filepath" - "strings" ) const ( - remoteDepTreePath = "artifactory/oss-release-local" - gradlew = "gradlew" - depTreeInitFile = "gradledeptree.init" - depTreeOutputFile = "gradledeptree.out" - depTreeInitScript = `initscript { - repositories { %s + remoteDepTreePath = "artifactory/oss-release-local" + gradlew = "gradlew" + gradleDepTreeJarFile = "gradle-dep-tree.jar" + depTreeInitFile = "gradledeptree.init" + depTreeOutputFile = "gradledeptree.out" + depTreeInitScript = `initscript { + repositories { %s mavenCentral() - } - dependencies { - classpath 'com.jfrog:gradle-dep-tree:2.2.0' - } + } + dependencies { + classpath files('%s') + } } allprojects { repositories { %s } - apply plugin: com.jfrog.GradleDepTree + apply plugin: com.jfrog.GradleDepTree }` artifactoryRepository = ` maven { @@ -49,6 +53,9 @@ allprojects { }` ) +//go:embed gradle-dep-tree.jar +var gradleDepTreeJar []byte + type depTreeManager struct { dependenciesTree server *config.ServerDetails @@ -102,21 +109,20 @@ func (dtp *depTreeManager) appendDependenciesPaths(jsonDepTree []byte, fileName return nil } -func buildGradleDependencyTree(useWrapper bool, server *config.ServerDetails, depsRepo, releasesRepo string) (dependencyTree []*xrayUtils.GraphNode, err error) { - if (server != nil && server.IsEmpty()) || depsRepo == "" { - depsRepo, server, err = getGradleConfig() +func buildGradleDependencyTree(params *DependencyTreeParams) (dependencyTree []*xrayUtils.GraphNode, err error) { + manager := &depTreeManager{useWrapper: params.UseWrapper} + if params.IgnoreConfigFile { + // In case we don't need to use the gradle config file, + // use the server and depsRepo values that were usually given from Frogbot + manager.depsRepo = params.DepsRepo + manager.server = params.Server + } else { + manager.depsRepo, manager.server, err = getGradleConfig() if err != nil { return } } - manager := &depTreeManager{ - server: server, - releasesRepo: releasesRepo, - depsRepo: depsRepo, - useWrapper: useWrapper, - } - outputFileContent, err := manager.runGradleDepTree() if err != nil { return nil, err @@ -131,10 +137,7 @@ func (dtp *depTreeManager) runGradleDepTree() (outputFileContent []byte, err err return } defer func() { - e := fileutils.RemoveTempDir(depTreeDir) - if err == nil { - err = e - } + err = errors.Join(err, fileutils.RemoveTempDir(depTreeDir)) }() if dtp.useWrapper { @@ -152,23 +155,26 @@ func (dtp *depTreeManager) createDepTreeScriptAndGetDir() (tmpDir string, err er if err != nil { return } - if dtp.server != nil { - dtp.releasesRepo, dtp.depsRepo, err = getRemoteRepos(dtp.releasesRepo, dtp.depsRepo, dtp.server) - if err != nil { - return - } + dtp.releasesRepo, dtp.depsRepo, err = getRemoteRepos(dtp.depsRepo, dtp.server) + if err != nil { + return + } + gradleDepTreeJarPath := filepath.Join(tmpDir, string(gradleDepTreeJarFile)) + if err = errorutils.CheckError(os.WriteFile(gradleDepTreeJarPath, gradleDepTreeJar, 0666)); err != nil { + return } - depTreeInitScript := fmt.Sprintf(depTreeInitScript, dtp.releasesRepo, dtp.depsRepo) + gradleDepTreeJarPath = ioutils.DoubleWinPathSeparator(gradleDepTreeJarPath) + + depTreeInitScript := fmt.Sprintf(depTreeInitScript, dtp.releasesRepo, gradleDepTreeJarPath, dtp.depsRepo) return tmpDir, errorutils.CheckError(os.WriteFile(filepath.Join(tmpDir, depTreeInitFile), []byte(depTreeInitScript), 0666)) } // getRemoteRepos constructs the sections of Artifactory's remote repositories in the gradle-dep-tree init script. -// releasesRepoName - name of the remote repository that proxies https://releases.jfrog.io // depsRemoteRepo - name of the remote repository that proxies the dependencies server, e.g. maven central. // server - the Artifactory server details on which the repositories reside in. // Returns the constructed sections. -func getRemoteRepos(releasesRepo, depsRepo string, server *config.ServerDetails) (string, string, error) { - constructedReleasesRepo, err := constructReleasesRemoteRepo(releasesRepo, server) +func getRemoteRepos(depsRepo string, server *config.ServerDetails) (string, string, error) { + constructedReleasesRepo, err := constructReleasesRemoteRepo() if err != nil { return "", "", err } @@ -180,21 +186,20 @@ func getRemoteRepos(releasesRepo, depsRepo string, server *config.ServerDetails) return constructedReleasesRepo, constructedDepsRepo, nil } -func constructReleasesRemoteRepo(releasesRepo string, server *config.ServerDetails) (string, error) { - releasesServer := server - if releasesRepo == "" { - // Try to get releases repository from the environment variable - serverId, repoName, err := coreutils.GetServerIdAndRepo(coreutils.ReleasesRemoteEnv) - if err != nil || serverId == "" || repoName == "" { - return "", err - } - releasesServer, err = config.GetSpecificConfig(serverId, false, true) - if err != nil { - return "", err - } - releasesRepo = repoName +func constructReleasesRemoteRepo() (string, error) { + // Try to retrieve the serverID and remote repository that proxies https://releases.jfrog.io, from the environment variable + serverId, repoName, err := coreutils.GetServerIdAndRepo(coreutils.ReleasesRemoteEnv) + if err != nil || serverId == "" || repoName == "" { + return "", err } - releasesPath := fmt.Sprintf("%s/%s", releasesRepo, remoteDepTreePath) + + releasesServer, err := config.GetSpecificConfig(serverId, false, true) + if err != nil { + return "", err + } + + releasesPath := fmt.Sprintf("%s/%s", repoName, remoteDepTreePath) + log.Debug("The `gradledeptree` will be resolved from", repoName) return getDepTreeArtifactoryRepository(releasesPath, releasesServer) } @@ -263,7 +268,7 @@ func populateGradleDependencyTree(currNode *xrayUtils.GraphNode, currNodeChildre } func getDepTreeArtifactoryRepository(remoteRepo string, server *config.ServerDetails) (string, error) { - if remoteRepo == "" { + if remoteRepo == "" || server.IsEmpty() { return "", nil } pass := server.Password @@ -283,6 +288,7 @@ func getDepTreeArtifactoryRepository(remoteRepo string, server *config.ServerDet } return "", errors.New(errString) } + log.Debug("The project dependencies will be resolved from", server.ArtifactoryUrl, "from the", remoteRepo, "repository") return fmt.Sprintf(artifactoryRepository, strings.TrimSuffix(server.ArtifactoryUrl, "/"), remoteRepo, diff --git a/xray/audit/java/gradle_test.go b/xray/audit/java/gradle_test.go index c14c3a559..591e7ef88 100644 --- a/xray/audit/java/gradle_test.go +++ b/xray/audit/java/gradle_test.go @@ -3,19 +3,42 @@ package java import ( "errors" "fmt" - "github.com/jfrog/jfrog-cli-core/v2/utils/config" - testsutils "github.com/jfrog/jfrog-cli-core/v2/utils/config/tests" - "github.com/jfrog/jfrog-cli-core/v2/utils/coreutils" - "github.com/jfrog/jfrog-client-go/utils/io/fileutils" "os" "path/filepath" "testing" + "github.com/jfrog/jfrog-cli-core/v2/utils/config" + testsutils "github.com/jfrog/jfrog-cli-core/v2/utils/config/tests" + "github.com/jfrog/jfrog-cli-core/v2/utils/coreutils" + "github.com/jfrog/jfrog-cli-core/v2/utils/ioutils" + "github.com/jfrog/jfrog-cli-core/v2/xray/audit" "github.com/stretchr/testify/assert" ) +const expectedInitScriptWithRepos = `initscript { + repositories { + mavenCentral() + } + dependencies { + classpath files('%s') + } +} + +allprojects { + repositories { + maven { + url "https://myartifactory.com/artifactory/deps-repo" + credentials { + username = '' + password = 'my-access-token' + } + } + } + apply plugin: com.jfrog.GradleDepTree +}` + func TestGradleTreesWithoutConfig(t *testing.T) { // Create and change directory to test workspace tempDirPath, cleanUp := audit.CreateTestWorkspace(t, "gradle-example-ci-server") @@ -23,7 +46,7 @@ func TestGradleTreesWithoutConfig(t *testing.T) { assert.NoError(t, os.Chmod(filepath.Join(tempDirPath, "gradlew"), 0700)) // Run getModulesDependencyTrees - modulesDependencyTrees, err := buildGradleDependencyTree(false, nil, "", "") + modulesDependencyTrees, err := buildGradleDependencyTree(&DependencyTreeParams{}) if assert.NoError(t, err) && assert.NotNil(t, modulesDependencyTrees) { assert.Len(t, modulesDependencyTrees, 5) // Check module @@ -46,7 +69,7 @@ func TestGradleTreesWithConfig(t *testing.T) { assert.NoError(t, os.Chmod(filepath.Join(tempDirPath, "gradlew"), 0700)) // Run getModulesDependencyTrees - modulesDependencyTrees, err := buildGradleDependencyTree(true, nil, "", "") + modulesDependencyTrees, err := buildGradleDependencyTree(&DependencyTreeParams{UseWrapper: true}) if assert.NoError(t, err) && assert.NotNil(t, modulesDependencyTrees) { assert.Len(t, modulesDependencyTrees, 5) @@ -70,7 +93,7 @@ func TestGradleTreesExcludeTestDeps(t *testing.T) { assert.NoError(t, os.Chmod(filepath.Join(tempDirPath, "gradlew"), 0700)) // Run getModulesDependencyTrees - modulesDependencyTrees, err := buildGradleDependencyTree(true, nil, "", "") + modulesDependencyTrees, err := buildGradleDependencyTree(&DependencyTreeParams{UseWrapper: true}) if assert.NoError(t, err) && assert.NotNil(t, modulesDependencyTrees) { assert.Len(t, modulesDependencyTrees, 5) @@ -192,65 +215,36 @@ func TestGetGraphFromDepTree(t *testing.T) { } func TestCreateDepTreeScript(t *testing.T) { - tmpDir, err := fileutils.CreateTempDir() - assert.NoError(t, err) - defer func() { - assert.NoError(t, fileutils.RemoveTempDir(tmpDir)) - }() - currDir, err := os.Getwd() - assert.NoError(t, err) - assert.NoError(t, os.Chdir(tmpDir)) - defer func() { - assert.NoError(t, os.Chdir(currDir)) - }() manager := &depTreeManager{} - tmpDir, err = manager.createDepTreeScriptAndGetDir() + tmpDir, err := manager.createDepTreeScriptAndGetDir() assert.NoError(t, err) defer func() { assert.NoError(t, os.Remove(filepath.Join(tmpDir, depTreeInitFile))) }() content, err := os.ReadFile(filepath.Join(tmpDir, depTreeInitFile)) assert.NoError(t, err) - assert.Equal(t, fmt.Sprintf(depTreeInitScript, "", ""), string(content)) + gradleDepTreeJarPath := ioutils.DoubleWinPathSeparator(filepath.Join(tmpDir, gradleDepTreeJarFile)) + assert.Equal(t, fmt.Sprintf(depTreeInitScript, "", gradleDepTreeJarPath, ""), string(content)) +} + +func TestCreateDepTreeScriptWithRepositories(t *testing.T) { + manager := &depTreeManager{} manager.depsRepo = "deps-repo" - manager.releasesRepo = "release-repo" manager.server = &config.ServerDetails{ + Url: "https://myartifactory.com/", ArtifactoryUrl: "https://myartifactory.com/artifactory", AccessToken: "my-access-token", } - tmpDir, err = manager.createDepTreeScriptAndGetDir() + tmpDir, err := manager.createDepTreeScriptAndGetDir() assert.NoError(t, err) - expectedInitScript := `initscript { - repositories { - maven { - url "https://myartifactory.com/artifactory/release-repo/artifactory/oss-release-local" - credentials { - username = '' - password = 'my-access-token' - } - } - mavenCentral() - } - dependencies { - classpath 'com.jfrog:gradle-dep-tree:2.2.0' - } -} + defer func() { + assert.NoError(t, os.Remove(filepath.Join(tmpDir, depTreeInitFile))) + }() -allprojects { - repositories { - maven { - url "https://myartifactory.com/artifactory/deps-repo" - credentials { - username = '' - password = 'my-access-token' - } - } - } - apply plugin: com.jfrog.GradleDepTree -}` - content, err = os.ReadFile(filepath.Join(tmpDir, depTreeInitFile)) + content, err := os.ReadFile(filepath.Join(tmpDir, depTreeInitFile)) assert.NoError(t, err) - assert.Equal(t, expectedInitScript, string(content)) + gradleDepTreeJarPath := ioutils.DoubleWinPathSeparator(filepath.Join(tmpDir, gradleDepTreeJarFile)) + assert.Equal(t, fmt.Sprintf(expectedInitScriptWithRepos, gradleDepTreeJarPath), string(content)) } func TestConstructReleasesRemoteRepo(t *testing.T) { @@ -264,21 +258,14 @@ func TestConstructReleasesRemoteRepo(t *testing.T) { err := config.SaveServersConf([]*config.ServerDetails{serverDetails}) assert.NoError(t, err) defer cleanUp() - server := &config.ServerDetails{ - ArtifactoryUrl: "https://myartifactory.com/artifactory", - User: "myuser", - Password: "mypass", - } testCases := []struct { - releasesRepo string envVar string expectedRepo string expectedErr error }{ - {releasesRepo: "", envVar: "", expectedRepo: "", expectedErr: nil}, - {releasesRepo: "", envVar: "test/repo1", expectedRepo: "\n\t\tmaven {\n\t\t\turl \"https://domain.com/artifactory/repo1/artifactory/oss-release-local\"\n\t\t\tcredentials {\n\t\t\t\tusername = 'user'\n\t\t\t\tpassword = 'pass'\n\t\t\t}\n\t\t}", expectedErr: nil}, - {releasesRepo: "", envVar: "notexist/repo1", expectedRepo: "", expectedErr: errors.New("Server ID 'notexist' does not exist.")}, - {releasesRepo: "repo2", envVar: "", expectedRepo: "\n\t\tmaven {\n\t\t\turl \"https://myartifactory.com/artifactory/repo2/artifactory/oss-release-local\"\n\t\t\tcredentials {\n\t\t\t\tusername = 'myuser'\n\t\t\t\tpassword = 'mypass'\n\t\t\t}\n\t\t}", expectedErr: nil}, + {envVar: "", expectedRepo: "", expectedErr: nil}, + {envVar: "test/repo1", expectedRepo: "\n\t\tmaven {\n\t\t\turl \"https://domain.com/artifactory/repo1/artifactory/oss-release-local\"\n\t\t\tcredentials {\n\t\t\t\tusername = 'user'\n\t\t\t\tpassword = 'pass'\n\t\t\t}\n\t\t}", expectedErr: nil}, + {envVar: "notexist/repo1", expectedRepo: "", expectedErr: errors.New("Server ID 'notexist' does not exist.")}, } for _, tc := range testCases { @@ -289,7 +276,7 @@ func TestConstructReleasesRemoteRepo(t *testing.T) { // Reset the environment variable after each test case assert.NoError(t, os.Unsetenv(coreutils.ReleasesRemoteEnv)) }() - actualRepo, actualErr := constructReleasesRemoteRepo(tc.releasesRepo, server) + actualRepo, actualErr := constructReleasesRemoteRepo() assert.Equal(t, tc.expectedRepo, actualRepo) assert.Equal(t, tc.expectedErr, actualErr) }() diff --git a/xray/audit/java/javautils.go b/xray/audit/java/javautils.go index 0e5e0d36b..93421aaf1 100644 --- a/xray/audit/java/javautils.go +++ b/xray/audit/java/javautils.go @@ -23,10 +23,8 @@ type DependencyTreeParams struct { IgnoreConfigFile bool ExcludeTestDeps bool UseWrapper bool - JavaProps map[string]any Server *config.ServerDetails DepsRepo string - ReleasesRepo string } func createBuildConfiguration(buildName string) (*artifactoryUtils.BuildConfiguration, func() error) { @@ -133,17 +131,9 @@ func hasLoop(idsAdded []string, idToAdd string) bool { func BuildDependencyTree(params *DependencyTreeParams) (modules []*xrayUtils.GraphNode, err error) { if params.Tool == coreutils.Maven { - return buildMvnDependencyTree(params.InsecureTls, params.IgnoreConfigFile, params.UseWrapper, params.JavaProps) + return buildMvnDependencyTree(params) } - server := &config.ServerDetails{} - depsRepo := "" - releaseRepo := "" - if params.IgnoreConfigFile { - server = params.Server - depsRepo = params.DepsRepo - releaseRepo = params.ReleasesRepo - } - return buildGradleDependencyTree(params.UseWrapper, server, depsRepo, releaseRepo) + return buildGradleDependencyTree(params) } type dependencyMultimap struct { diff --git a/xray/audit/java/mvn.go b/xray/audit/java/mvn.go index de62bad70..f4039d585 100644 --- a/xray/audit/java/mvn.go +++ b/xray/audit/java/mvn.go @@ -1,25 +1,26 @@ package java import ( + "errors" "fmt" "github.com/jfrog/jfrog-cli-core/v2/artifactory/utils" + "github.com/jfrog/jfrog-cli-core/v2/utils/config" "github.com/jfrog/jfrog-cli-core/v2/utils/coreutils" mvnutils "github.com/jfrog/jfrog-cli-core/v2/utils/mvn" + "github.com/jfrog/jfrog-client-go/auth" "github.com/jfrog/jfrog-client-go/utils/io/fileutils" "github.com/jfrog/jfrog-client-go/utils/log" xrayUtils "github.com/jfrog/jfrog-client-go/xray/services/utils" ) -func buildMvnDependencyTree(insecureTls, ignoreConfigFile, useWrapper bool, mvnProps map[string]any) (modules []*xrayUtils.GraphNode, err error) { +func buildMvnDependencyTree(params *DependencyTreeParams) (modules []*xrayUtils.GraphNode, err error) { buildConfiguration, cleanBuild := createBuildConfiguration("audit-mvn") defer func() { - e := cleanBuild() - if err == nil { - err = e - } + err = errors.Join(err, cleanBuild()) }() - err = runMvn(buildConfiguration, insecureTls, ignoreConfigFile, useWrapper, mvnProps) + mvnProps := CreateMvnProps(params.DepsRepo, params.Server) + err = runMvn(buildConfiguration, params.InsecureTls, params.IgnoreConfigFile, params.UseWrapper, mvnProps) if err != nil { return } @@ -27,6 +28,29 @@ func buildMvnDependencyTree(insecureTls, ignoreConfigFile, useWrapper bool, mvnP return createGavDependencyTree(buildConfiguration) } +func CreateMvnProps(resolverRepo string, serverDetails *config.ServerDetails) map[string]any { + if serverDetails == nil || serverDetails.IsEmpty() { + return nil + } + authPass := serverDetails.Password + if serverDetails.AccessToken != "" { + authPass = serverDetails.AccessToken + } + authUser := serverDetails.User + if authUser == "" { + authUser = auth.ExtractUsernameFromAccessToken(serverDetails.AccessToken) + } + return map[string]any{ + "resolver.username": authUser, + "resolver.password": authPass, + "resolver.url": serverDetails.ArtifactoryUrl, + "resolver.releaseRepo": resolverRepo, + "resolver.repo": resolverRepo, + "resolver.snapshotRepo": resolverRepo, + "buildInfoConfig.artifactoryResolutionEnabled": true, + } +} + func runMvn(buildConfiguration *utils.BuildConfiguration, insecureTls, ignoreConfigFile, useWrapper bool, mvnProps map[string]any) (err error) { goals := []string{"-B", "compile", "test-compile", "-Dcheckstyle.skip", "-Denforcer.skip"} log.Debug(fmt.Sprintf("mvn command goals: %v", goals)) diff --git a/xray/audit/java/mvn_test.go b/xray/audit/java/mvn_test.go index 539691efb..8ff234572 100644 --- a/xray/audit/java/mvn_test.go +++ b/xray/audit/java/mvn_test.go @@ -15,7 +15,7 @@ func TestMavenTreesMultiModule(t *testing.T) { defer cleanUp() // Run getModulesDependencyTrees - modulesDependencyTrees, err := buildMvnDependencyTree(false, true, false, nil) + modulesDependencyTrees, err := buildMvnDependencyTree(&DependencyTreeParams{IgnoreConfigFile: true}) if assert.NoError(t, err) && assert.NotEmpty(t, modulesDependencyTrees) { // Check root module multi := audit.GetAndAssertNode(t, modulesDependencyTrees, "org.jfrog.test:multi:3.7-SNAPSHOT") @@ -42,7 +42,7 @@ func TestMavenWrapperTrees(t *testing.T) { err := os.Chmod("mvnw", 0700) defer cleanUp() assert.NoError(t, err) - modulesDependencyTrees, err := buildMvnDependencyTree(false, true, true, nil) + modulesDependencyTrees, err := buildMvnDependencyTree(&DependencyTreeParams{IgnoreConfigFile: true, UseWrapper: true}) if assert.NoError(t, err) && assert.NotEmpty(t, modulesDependencyTrees) { // Check root module multi := audit.GetAndAssertNode(t, modulesDependencyTrees, "org.jfrog.test:multi:3.7-SNAPSHOT") diff --git a/xray/audit/npm/npm.go b/xray/audit/npm/npm.go index 1590412dc..cc1041966 100644 --- a/xray/audit/npm/npm.go +++ b/xray/audit/npm/npm.go @@ -13,8 +13,6 @@ import ( const ( npmPackageTypeIdentifier = "npm://" ignoreScriptsFlag = "--ignore-scripts" - // When parsing the npm depth tree, ensure that the requested paths are checked up to a certain depth to prevent an infinite loop. - MaxNpmRequestedByDepth = 2 ) func BuildDependencyTree(npmArgs []string) (dependencyTree []*xrayUtils.GraphNode, err error) { @@ -33,11 +31,16 @@ func BuildDependencyTree(npmArgs []string) (dependencyTree []*xrayUtils.GraphNod npmArgs = addIgnoreScriptsFlag(npmArgs) // Calculate npm dependencies - dependenciesList, err := biutils.CalculateNpmDependenciesList(npmExecutablePath, currentDir, packageInfo.BuildInfoModuleId(), npmArgs, false, log.Logger) + dependenciesMap, err := biutils.CalculateDependenciesMap(npmExecutablePath, currentDir, packageInfo.BuildInfoModuleId(), npmArgs, log.Logger) if err != nil { log.Info("Used npm version:", npmVersion.GetVersion()) return } + var dependenciesList []buildinfo.Dependency + for _, dependency := range dependenciesMap { + dependenciesList = append(dependenciesList, dependency.Dependency) + } + // Parse the dependencies into Xray dependency tree format dependencyTree = []*xrayUtils.GraphNode{parseNpmDependenciesList(dependenciesList, packageInfo)} return @@ -56,13 +59,10 @@ func parseNpmDependenciesList(dependencies []buildinfo.Dependency, packageInfo * treeMap := make(map[string][]string) for _, dependency := range dependencies { dependencyId := npmPackageTypeIdentifier + dependency.Id - for depth, requestedByNode := range dependency.RequestedBy { - if depth > MaxNpmRequestedByDepth { - continue - } + for _, requestedByNode := range dependency.RequestedBy { parent := npmPackageTypeIdentifier + requestedByNode[0] if children, ok := treeMap[parent]; ok { - treeMap[parent] = append(children, dependencyId) + treeMap[parent] = appendUniqueChild(children, dependencyId) } else { treeMap[parent] = []string{dependencyId} } @@ -70,3 +70,12 @@ func parseNpmDependenciesList(dependencies []buildinfo.Dependency, packageInfo * } return audit.BuildXrayDependencyTree(treeMap, npmPackageTypeIdentifier+packageInfo.BuildInfoModuleId()) } + +func appendUniqueChild(children []string, candidateDependency string) []string { + for _, existingChild := range children { + if existingChild == candidateDependency { + return children + } + } + return append(children, candidateDependency) +} diff --git a/xray/audit/yarn/yarn.go b/xray/audit/yarn/yarn.go index ddd45c7df..12d987982 100644 --- a/xray/audit/yarn/yarn.go +++ b/xray/audit/yarn/yarn.go @@ -2,7 +2,6 @@ package yarn import ( biUtils "github.com/jfrog/build-info-go/build/utils" - "github.com/jfrog/gofrog/version" "github.com/jfrog/jfrog-cli-core/v2/utils/coreutils" "github.com/jfrog/jfrog-cli-core/v2/xray/audit" "github.com/jfrog/jfrog-client-go/utils/errorutils" @@ -12,8 +11,6 @@ import ( const ( npmPackageTypeIdentifier = "npm://" - yarnV2Version = "2.0.0" - YarnV1ErrorPrefix = "jf audit is only supported for yarn v2 and above." ) func BuildDependencyTree() (dependencyTree []*xrayUtils.GraphNode, err error) { @@ -25,9 +22,6 @@ func BuildDependencyTree() (dependencyTree []*xrayUtils.GraphNode, err error) { if errorutils.CheckError(err) != nil { return } - if err = logAndValidateYarnVersion(executablePath); err != nil { - return - } packageInfo, err := biUtils.ReadPackageInfoFromPackageJson(currentDir, nil) if errorutils.CheckError(err) != nil { @@ -43,19 +37,6 @@ func BuildDependencyTree() (dependencyTree []*xrayUtils.GraphNode, err error) { return } -// Yarn audit is only supported from yarn v2. -func logAndValidateYarnVersion(executablePath string) error { - versionStr, err := audit.GetExecutableVersion(executablePath) - if errorutils.CheckError(err) != nil { - return err - } - yarnVer := version.NewVersion(versionStr) - if yarnVer.Compare(yarnV2Version) > 0 { - return errorutils.CheckErrorf(YarnV1ErrorPrefix + "The current version is: " + versionStr) - } - return nil -} - // Parse the dependencies into a Xray dependency tree format func parseYarnDependenciesMap(dependencies map[string]*biUtils.YarnDependency, rootXrayId string) (xrDependencyTree *xrayUtils.GraphNode) { treeMap := make(map[string][]string) diff --git a/xray/commands/audit/generic/auditmanager.go b/xray/commands/audit/generic/auditmanager.go index 6534532fb..55015a9ce 100644 --- a/xray/commands/audit/generic/auditmanager.go +++ b/xray/commands/audit/generic/auditmanager.go @@ -3,31 +3,27 @@ package audit import ( "errors" "fmt" - "github.com/jfrog/gofrog/version" - rtutils "github.com/jfrog/jfrog-cli-core/v2/artifactory/utils" - "github.com/jfrog/jfrog-cli-core/v2/xray/audit/jas" - "golang.org/x/sync/errgroup" - "os" - "path/filepath" - "strings" - "github.com/jfrog/build-info-go/utils/pythonutils" - "github.com/jfrog/jfrog-cli-core/v2/utils/config" + "github.com/jfrog/gofrog/datastructures" + rtutils "github.com/jfrog/jfrog-cli-core/v2/artifactory/utils" "github.com/jfrog/jfrog-cli-core/v2/utils/coreutils" "github.com/jfrog/jfrog-cli-core/v2/xray/audit" _go "github.com/jfrog/jfrog-cli-core/v2/xray/audit/go" + "github.com/jfrog/jfrog-cli-core/v2/xray/audit/jas" "github.com/jfrog/jfrog-cli-core/v2/xray/audit/java" "github.com/jfrog/jfrog-cli-core/v2/xray/audit/npm" "github.com/jfrog/jfrog-cli-core/v2/xray/audit/nuget" "github.com/jfrog/jfrog-cli-core/v2/xray/audit/python" "github.com/jfrog/jfrog-cli-core/v2/xray/audit/yarn" commandsutils "github.com/jfrog/jfrog-cli-core/v2/xray/commands/utils" - clientUtils "github.com/jfrog/jfrog-cli-core/v2/xray/utils" - "github.com/jfrog/jfrog-client-go/auth" + xrayutils "github.com/jfrog/jfrog-cli-core/v2/xray/utils" "github.com/jfrog/jfrog-client-go/utils/errorutils" "github.com/jfrog/jfrog-client-go/utils/log" + "github.com/jfrog/jfrog-client-go/xray" "github.com/jfrog/jfrog-client-go/xray/services" xrayCmdUtils "github.com/jfrog/jfrog-client-go/xray/services/utils" + "golang.org/x/sync/errgroup" + "os" ) type Params struct { @@ -36,14 +32,14 @@ type Params struct { installFunc func(tech string) error fixableOnly bool minSeverityFilter string - *clientUtils.GraphBasicParams + *xrayutils.GraphBasicParams xrayVersion string } func NewAuditParams() *Params { return &Params{ xrayGraphScanParams: &services.XrayGraphScanParams{}, - GraphBasicParams: &clientUtils.GraphBasicParams{}, + GraphBasicParams: &xrayutils.GraphBasicParams{}, } } @@ -68,7 +64,7 @@ func (params *Params) SetXrayGraphScanParams(xrayGraphScanParams *services.XrayG return params } -func (params *Params) SetGraphBasicParams(gbp *clientUtils.GraphBasicParams) *Params { +func (params *Params) SetGraphBasicParams(gbp *xrayutils.GraphBasicParams) *Params { params.GraphBasicParams = gbp return params } @@ -101,146 +97,128 @@ func (params *Params) SetMinSeverityFilter(minSeverityFilter string) *Params { return params } -func (params *Params) SetXrayVersion(version string) *Params { - params.xrayVersion = version - return params +type Results struct { + IsMultipleRootProject bool + ScaError error + JasError error + ExtendedScanResults *xrayutils.ExtendedScanResults +} + +func NewAuditResults() *Results { + return &Results{ExtendedScanResults: &xrayutils.ExtendedScanResults{}} } // Runs an audit scan based on the provided auditParams. // Returns an audit Results object containing all the scan results. // If the current server is entitled for JAS, the advanced security results will be included in the scan results. func RunAudit(auditParams *Params) (results *Results, err error) { + // Initialize Results struct + results = NewAuditResults() + serverDetails, err := auditParams.ServerDetails() if err != nil { return } - isEntitled, xrayVersion, err := isEntitledForJas(serverDetails) + var xrayManager *xray.XrayServicesManager + xrayManager, auditParams.xrayVersion, err = commandsutils.CreateXrayServiceManagerAndGetVersion(serverDetails) + if err != nil { + return + } + if err = coreutils.ValidateMinimumVersion(coreutils.Xray, auditParams.xrayVersion, commandsutils.GraphScanMinXrayVersion); err != nil { + return + } + results.ExtendedScanResults.EntitledForJas, err = isEntitledForJas(xrayManager, auditParams.xrayVersion) if err != nil { return } - auditParams.SetXrayVersion(xrayVersion) errGroup := new(errgroup.Group) - if isEntitled { + if results.ExtendedScanResults.EntitledForJas { // Download (if needed) the analyzer manager in a background routine. errGroup.Go(rtutils.DownloadAnalyzerManagerIfNeeded) } - // The audit scan doesn't require the analyzer manager, so it can run separately from the analyzer manager download routine. - scanResults, isMultipleRootProject, auditError := genericAudit(auditParams) + // The sca scan doesn't require the analyzer manager, so it can run separately from the analyzer manager download routine. + results.ScaError = runScaScan(auditParams, results) // Wait for the Download of the AnalyzerManager to complete. if err = errGroup.Wait(); err != nil { return } - extendedScanResults := &clientUtils.ExtendedScanResults{XrayResults: scanResults} - // Try to run contextual analysis only if the user is entitled for advance security - if isEntitled { - extendedScanResults, err = jas.GetExtendedScanResults(scanResults, auditParams.FullDependenciesTree(), serverDetails) - if err != nil { - return - } - } - results = &Results{ - IsMultipleRootProject: isMultipleRootProject, - AuditError: auditError, - ExtendedScanResults: extendedScanResults, + // Run scanners only if the user is entitled for Advanced Security + if results.ExtendedScanResults.EntitledForJas { + results.JasError = jas.RunScannersAndSetResults(results.ExtendedScanResults, auditParams.DirectDependencies(), serverDetails, auditParams.workingDirs, auditParams.Progress()) } return } -func isEntitledForJas(serverDetails *config.ServerDetails) (entitled bool, xrayVersion string, err error) { - xrayManager, xrayVersion, err := commandsutils.CreateXrayServiceManagerAndGetVersion(serverDetails) - if err != nil { - return - } - if !version.NewVersion(xrayVersion).AtLeast(clientUtils.EntitlementsMinVersion) { - log.Debug("Entitlements check for ‘Advanced Security’ package failed:") - log.Debug(coreutils.MinimumVersionMsg, coreutils.Xray, xrayVersion, clientUtils.EntitlementsMinVersion) +func isEntitledForJas(xrayManager *xray.XrayServicesManager, xrayVersion string) (entitled bool, err error) { + if e := coreutils.ValidateMinimumVersion(coreutils.Xray, xrayVersion, xrayutils.EntitlementsMinVersion); e != nil { + log.Debug(e) return } - entitled, err = xrayManager.IsEntitled(clientUtils.ApplicabilityFeatureId) + entitled, err = xrayManager.IsEntitled(xrayutils.ApplicabilityFeatureId) return } -// genericAudit audits all the projects found in the given workingDirs -func genericAudit(params *Params) (results []services.ScanResponse, isMultipleRoot bool, err error) { - if err = coreutils.ValidateMinimumVersion(coreutils.Xray, params.xrayVersion, commandsutils.GraphScanMinXrayVersion); err != nil { - return - } - log.Info("JFrog Xray version is:", params.xrayVersion) - - if len(params.workingDirs) == 0 { - log.Info("Auditing project...") - return doAudit(params) - } - - return auditMultipleWorkingDirs(params) -} - -func auditMultipleWorkingDirs(params *Params) (results []services.ScanResponse, isMultipleRoot bool, err error) { - projectDir, err := os.Getwd() +func runScaScan(params *Params, results *Results) (err error) { + rootDir, err := os.Getwd() if errorutils.CheckError(err) != nil { return } - defer func() { - err = errors.Join(err, os.Chdir(projectDir)) - }() - var errorList strings.Builder for _, wd := range params.workingDirs { - absWd, e := filepath.Abs(wd) - if e != nil { - errorList.WriteString(fmt.Sprintf("the audit command couldn't find the following path: %s\n%s\n", wd, e.Error())) - continue + if len(params.workingDirs) > 1 { + log.Info("Running SCA scan for vulnerable dependencies scan in", wd, "directory...") + } else { + log.Info("Running SCA scan for vulnerable dependencies...") } - log.Info("Auditing project:", absWd, "...") - e = os.Chdir(absWd) - if e != nil { - errorList.WriteString(fmt.Sprintf("the audit command couldn't change the current working directory to the following path: %s\n%s\n", absWd, e.Error())) + wdScanErr := runScaScanOnWorkingDir(params, results, wd, rootDir) + if wdScanErr != nil { + err = errors.Join(err, fmt.Errorf("audit command in '%s' failed:\n%s\n", wd, wdScanErr.Error())) continue } - - techResults, isMultipleRootProject, e := doAudit(params) - if e != nil { - errorList.WriteString(fmt.Sprintf("audit command in %s failed:\n%s\n", absWd, e.Error())) - continue - } - - results = append(results, techResults...) - isMultipleRoot = isMultipleRootProject } - - if errorList.Len() > 0 { - err = errorutils.CheckErrorf(errorList.String()) - } - return } // Audits the project found in the current directory using Xray. -func doAudit(params *Params) (results []services.ScanResponse, isMultipleRoot bool, err error) { - // If no technologies were given, try to detect all types of technologies used. - // Otherwise, run audit for requested technologies only. - technologies := params.Technologies() +func runScaScanOnWorkingDir(params *Params, results *Results, workingDir, rootDir string) (err error) { + err = os.Chdir(workingDir) + if err != nil { + return + } + defer func() { + err = errors.Join(err, os.Chdir(rootDir)) + }() + + var technologies []string + requestedTechnologies := params.Technologies() + if len(requestedTechnologies) != 0 { + technologies = requestedTechnologies + } else { + technologies = commandsutils.DetectedTechnologies() + } if len(technologies) == 0 { - technologies, err = commandsutils.DetectedTechnologies() - if err != nil { - return - } + log.Info("Couldn't determine a package manager or build tool used by this project. Skipping the SCA scan...") + return } - var errorList strings.Builder serverDetails, err := params.ServerDetails() if err != nil { return } + for _, tech := range coreutils.ToTechnologies(technologies) { if tech == coreutils.Dotnet { continue } - flattenTree, e := GetTechDependencyTree(params.GraphBasicParams, tech) - if e != nil { - errorList.WriteString(fmt.Sprintf("audit failed while building %s dependency tree:\n%s\n", tech, e.Error())) + flattenTree, fullDependencyTrees, techErr := GetTechDependencyTree(params.GraphBasicParams, tech) + if techErr != nil { + err = errors.Join(err, fmt.Errorf("failed while building '%s' dependency tree:\n%s\n", tech, techErr.Error())) + continue + } + if len(flattenTree) == 0 { + err = errors.Join(err, errors.New("no dependencies were found. Please try to build your project and re-run the audit command")) continue } @@ -250,22 +228,39 @@ func doAudit(params *Params) (results []services.ScanResponse, isMultipleRoot bo SetXrayVersion(params.xrayVersion). SetFixableOnly(params.fixableOnly). SetSeverityLevel(params.minSeverityFilter) - techResults, e := audit.Audit(flattenTree, params.Progress(), tech, scanGraphParams) - if e != nil { - errorList.WriteString(fmt.Sprintf("'%s' audit request failed:\n%s\n", tech, e.Error())) + techResults, techErr := audit.RunXrayDependenciesTreeScanGraph(flattenTree, params.Progress(), tech, scanGraphParams) + if techErr != nil { + err = errors.Join(err, fmt.Errorf("'%s' Xray dependency tree scan request failed:\n%s\n", tech, techErr.Error())) continue } - techResults = audit.BuildImpactPathsForScanResponse(techResults, params.FullDependenciesTree()) - results = append(results, techResults...) - isMultipleRoot = len(flattenTree) > 1 - } - if errorList.Len() > 0 { - err = errorutils.CheckErrorf(errorList.String()) + techResults = audit.BuildImpactPathsForScanResponse(techResults, fullDependencyTrees) + if tech == coreutils.Pip { + params.AppendDirectDependencies(getDirectDependenciesFromTree(flattenTree)) + + } else { + params.AppendDirectDependencies(getDirectDependenciesFromTree(fullDependencyTrees)) + } + results.ExtendedScanResults.XrayResults = append(results.ExtendedScanResults.XrayResults, techResults...) + if !results.IsMultipleRootProject { + results.IsMultipleRootProject = len(flattenTree) > 1 + } + results.ExtendedScanResults.ScannedTechnologies = append(results.ExtendedScanResults.ScannedTechnologies, tech) } return } -func GetTechDependencyTree(params *clientUtils.GraphBasicParams, tech coreutils.Technology) (flatTree []*xrayCmdUtils.GraphNode, err error) { +// This function retrieves the dependency trees of the scanned project and extracts a set that contains only the direct dependencies. +func getDirectDependenciesFromTree(dependencyTrees []*xrayCmdUtils.GraphNode) []string { + directDependencies := datastructures.MakeSet[string]() + for _, tree := range dependencyTrees { + for _, node := range tree.Nodes { + directDependencies.Add(node.Id) + } + } + return directDependencies.ToSlice() +} + +func GetTechDependencyTree(params *xrayutils.GraphBasicParams, tech coreutils.Technology) (flatTree []*xrayCmdUtils.GraphNode, fullDependencyTrees []*xrayCmdUtils.GraphNode, err error) { if params.Progress() != nil { params.Progress().SetHeadlineMsg(fmt.Sprintf("Calculating %v dependencies", tech.ToFormal())) } @@ -273,75 +268,46 @@ func GetTechDependencyTree(params *clientUtils.GraphBasicParams, tech coreutils. if err != nil { return } - var dependencyTrees []*xrayCmdUtils.GraphNode switch tech { case coreutils.Maven, coreutils.Gradle: - dependencyTrees, err = getJavaDependencyTree(params, tech) + fullDependencyTrees, err = getJavaDependencyTree(params, tech) case coreutils.Npm: - dependencyTrees, err = npm.BuildDependencyTree(params.Args()) + fullDependencyTrees, err = npm.BuildDependencyTree(params.Args()) case coreutils.Yarn: - dependencyTrees, err = yarn.BuildDependencyTree() + fullDependencyTrees, err = yarn.BuildDependencyTree() case coreutils.Go: - dependencyTrees, err = _go.BuildDependencyTree(serverDetails, params.DepsRepo()) + fullDependencyTrees, err = _go.BuildDependencyTree(serverDetails, params.DepsRepo()) case coreutils.Pipenv, coreutils.Pip, coreutils.Poetry: - dependencyTrees, err = python.BuildDependencyTree(&python.AuditPython{ + fullDependencyTrees, err = python.BuildDependencyTree(&python.AuditPython{ Server: serverDetails, Tool: pythonutils.PythonTool(tech), RemotePypiRepo: params.DepsRepo(), PipRequirementsFile: params.PipRequirementsFile()}) case coreutils.Nuget: - dependencyTrees, err = nuget.BuildDependencyTree() + fullDependencyTrees, err = nuget.BuildDependencyTree() default: err = errorutils.CheckErrorf("%s is currently not supported", string(tech)) } if err != nil { - return nil, err + return nil, nil, err } - // Save the full dependencyTree to build impact paths for vulnerable dependencies - params.SetFullDependenciesTree(dependencyTrees) - // Flatten the graph to speed up the ScanGraph request - return services.FlattenGraph(dependencyTrees) + flatTree, err = services.FlattenGraph(fullDependencyTrees) + return } -func getJavaDependencyTree(params *clientUtils.GraphBasicParams, tech coreutils.Technology) ([]*xrayCmdUtils.GraphNode, error) { - var javaProps map[string]any +func getJavaDependencyTree(params *xrayutils.GraphBasicParams, tech coreutils.Technology) ([]*xrayCmdUtils.GraphNode, error) { serverDetails, err := params.ServerDetails() if err != nil { return nil, err } - if params.DepsRepo() != "" && tech == coreutils.Maven { - javaProps = CreateJavaProps(params.DepsRepo(), serverDetails) - } return java.BuildDependencyTree(&java.DependencyTreeParams{ Tool: tech, InsecureTls: params.InsecureTls(), IgnoreConfigFile: params.IgnoreConfigFile(), ExcludeTestDeps: params.ExcludeTestDependencies(), UseWrapper: params.UseWrapper(), - JavaProps: javaProps, Server: serverDetails, DepsRepo: params.DepsRepo(), - ReleasesRepo: params.ReleasesRepo(), }) } - -func CreateJavaProps(depsRepo string, serverDetails *config.ServerDetails) map[string]any { - authPass := serverDetails.Password - if serverDetails.AccessToken != "" { - authPass = serverDetails.AccessToken - } - authUser := serverDetails.User - if authUser == "" { - authUser = auth.ExtractUsernameFromAccessToken(serverDetails.AccessToken) - } - return map[string]any{ - "resolver.username": authUser, - "resolver.password": authPass, - "resolver.url": serverDetails.ArtifactoryUrl, - "resolver.releaseRepo": depsRepo, - "resolver.repo": depsRepo, - "resolver.snapshotRepo": depsRepo, - "buildInfoConfig.artifactoryResolutionEnabled": true, - } -} diff --git a/xray/commands/audit/generic/auditmanager_test.go b/xray/commands/audit/generic/auditmanager_test.go new file mode 100644 index 000000000..3e3d198d7 --- /dev/null +++ b/xray/commands/audit/generic/auditmanager_test.go @@ -0,0 +1,44 @@ +package audit + +import ( + xrayUtils "github.com/jfrog/jfrog-client-go/xray/services/utils" + "github.com/stretchr/testify/assert" + "testing" +) + +func TestGetDirectDependenciesList(t *testing.T) { + tests := []struct { + dependenciesTrees []*xrayUtils.GraphNode + expectedResult []string + }{ + { + dependenciesTrees: nil, + expectedResult: []string{}, + }, + { + dependenciesTrees: []*xrayUtils.GraphNode{ + {Id: "parent_node_id", Nodes: []*xrayUtils.GraphNode{ + {Id: "issueId_1_direct_dependency", Nodes: []*xrayUtils.GraphNode{{Id: "issueId_1_non_direct_dependency"}}}, + {Id: "issueId_2_direct_dependency", Nodes: nil}, + }, + }, + }, + expectedResult: []string{"issueId_1_direct_dependency", "issueId_2_direct_dependency"}, + }, + { + dependenciesTrees: []*xrayUtils.GraphNode{ + {Id: "parent_node_id", Nodes: []*xrayUtils.GraphNode{ + {Id: "issueId_1_direct_dependency", Nodes: nil}, + {Id: "issueId_2_direct_dependency", Nodes: nil}, + }, + }, + }, + expectedResult: []string{"issueId_1_direct_dependency", "issueId_2_direct_dependency"}, + }, + } + + for _, test := range tests { + result := getDirectDependenciesFromTree(test.dependenciesTrees) + assert.ElementsMatch(t, test.expectedResult, result) + } +} diff --git a/xray/commands/audit/generic/generic.go b/xray/commands/audit/generic/generic.go index 16b7e4fa4..4e182632b 100644 --- a/xray/commands/audit/generic/generic.go +++ b/xray/commands/audit/generic/generic.go @@ -1,6 +1,7 @@ package audit import ( + "errors" "os" "github.com/jfrog/jfrog-cli-core/v2/utils/coreutils" @@ -19,12 +20,6 @@ type GenericAuditCommand struct { Params } -type Results struct { - IsMultipleRootProject bool - AuditError error - ExtendedScanResults *xrutils.ExtendedScanResults -} - func NewGenericAuditCommand() *GenericAuditCommand { return &GenericAuditCommand{Params: *NewAuditParams()} } @@ -81,15 +76,19 @@ func (auditCmd *GenericAuditCommand) CreateXrayGraphScanParams() *services.XrayG } func (auditCmd *GenericAuditCommand) Run() (err error) { + workingDirs, err := xrutils.GetFullPathsWorkingDirs(auditCmd.workingDirs) + if err != nil { + return + } auditParams := NewAuditParams(). SetXrayGraphScanParams(auditCmd.CreateXrayGraphScanParams()). - SetWorkingDirs(auditCmd.workingDirs). + SetWorkingDirs(workingDirs). SetMinSeverityFilter(auditCmd.minSeverityFilter). SetFixableOnly(auditCmd.fixableOnly). SetGraphBasicParams(auditCmd.GraphBasicParams) auditResults, err := RunAudit(auditParams) if err != nil { - return err + return } if auditCmd.Progress() != nil { if err = auditCmd.Progress().Quit(); err != nil { @@ -98,10 +97,10 @@ func (auditCmd *GenericAuditCommand) Run() (err error) { } var messages []string if !auditResults.ExtendedScanResults.EntitledForJas { - messages = []string{coreutils.PrintTitle("The ‘jf audit’ command also supports the ‘Contextual Analysis’ feature, which is included as part of the ‘Advanced Security’ package. This package isn't enabled on your system. Read more - ") + coreutils.PrintLink("https://jfrog.com/security-and-compliance")} + messages = []string{coreutils.PrintTitle("The ‘jf audit’ command also supports JFrog Advanced Security features, such as 'Contextual Analysis', 'Secret Detection', 'IaC Scan'.\nThis feature isn't enabled on your system. Read more - ") + coreutils.PrintLink("https://jfrog.com/xray/")} } - // Print Scan results on all cases except if errors accrued on Generic Audit command and no security/license issues found. - printScanResults := !(auditResults.AuditError != nil && xrutils.IsEmptyScanResponse(auditResults.ExtendedScanResults.XrayResults)) + // Print Scan results on all cases except if errors accrued on SCA scan and no security/license issues found. + printScanResults := !(auditResults.ScaError != nil && xrutils.IsEmptyScanResponse(auditResults.ExtendedScanResults.XrayResults)) if printScanResults { err = xrutils.PrintScanResults(auditResults.ExtendedScanResults, nil, @@ -115,8 +114,7 @@ func (auditCmd *GenericAuditCommand) Run() (err error) { return } } - if auditResults.AuditError != nil { - err = auditResults.AuditError + if err = errors.Join(auditResults.ScaError, auditResults.JasError); err != nil { return } diff --git a/xray/commands/curation/audit.go b/xray/commands/curation/audit.go index 970df2076..9529f59d9 100644 --- a/xray/commands/curation/audit.go +++ b/xray/commands/curation/audit.go @@ -4,6 +4,7 @@ import ( "encoding/json" "errors" "fmt" + "github.com/jfrog/gofrog/datastructures" "github.com/jfrog/gofrog/parallel" rtUtils "github.com/jfrog/jfrog-cli-core/v2/artifactory/utils" "github.com/jfrog/jfrog-cli-core/v2/utils/coreutils" @@ -32,9 +33,8 @@ const ( BlockingReasonPolicy = "Policy violations" BlockingReasonNotFound = "Package pending update" - totalConcurrentRequests = 10 - directRelation = "direct" - indirectRelation = "indirect" + directRelation = "direct" + indirectRelation = "indirect" BlockMessageKey = "jfrog packages curation" NotBeingFoundKey = "not being found" @@ -64,35 +64,33 @@ type PackageStatus struct { Action string `json:"action"` ParentName string `json:"direct_dependency_package_name"` ParentVersion string `json:"direct_dependency_package_version"` - BlockedPackageUrl string `json:"blocked_package_url"` + BlockedPackageUrl string `json:"blocked_package_url,omitempty"` PackageName string `json:"blocked_package_name"` PackageVersion string `json:"blocked_package_version"` BlockingReason string `json:"blocking_reason"` DepRelation string `json:"dependency_relation"` PkgType string `json:"type"` - Policy []Policy `json:"policies"` + Policy []Policy `json:"policies,omitempty"` } type Policy struct { - Policy string `json:"policy"` - Condition string `json:"condition"` + Policy string `json:"policy"` + Condition string `json:"condition"` + Explanation string `json:"explanation"` + Recommendation string `json:"recommendation"` } type PackageStatusTable struct { - Status string `col-name:"Action"` - ParentName string `col-name:"Direct Dependency\nPackage Name"` - ParentVersion string `col-name:"Direct Dependency\nPackage Version"` - BlockedPackageUrl string `col-name:"Blocked Package URL"` - PackageName string `col-name:"Blocked Package\nName"` - PackageVersion string `col-name:"Blocked Package\nVersion"` - BlockingReason string `col-name:"Blocking Reason"` - PkgType string `col-name:"Package Type"` - Policy []policyTable `embed-table:"true"` -} - -type policyTable struct { - Policy string `col-name:"Violated Policy\nName"` - Condition string `col-name:"Violated Condition\nName"` + ParentName string `col-name:"Direct\nDependency\nPackage\nName" auto-merge:"true"` + ParentVersion string `col-name:"Direct\nDependency\nPackage\nVersion" auto-merge:"true"` + PackageName string `col-name:"Blocked\nPackage\nName" auto-merge:"true"` + PackageVersion string `col-name:"Blocked\nPackage\nVersion" auto-merge:"true"` + BlockingReason string `col-name:"Blocking Reason" auto-merge:"true"` + PkgType string `col-name:"Package\nType" auto-merge:"true"` + Policy string `col-name:"Violated\nPolicy\nName"` + Condition string `col-name:"Violated Condition\nName"` + Explanation string `col-name:"Explanation"` + Recommendation string `col-name:"Recommendation"` } type treeAnalyzer struct { @@ -180,16 +178,13 @@ func (ca *CurationAuditCommand) Run() (err error) { } func (ca *CurationAuditCommand) doCurateAudit(results map[string][]*PackageStatus) error { - techs, err := cmdUtils.DetectedTechnologies() - if err != nil { - return err - } + techs := cmdUtils.DetectedTechnologies() for _, tech := range techs { if _, ok := supportedTech[coreutils.Technology(tech)]; !ok { log.Info(fmt.Sprintf(errorTemplateUnsupportedTech, tech)) continue } - if err = ca.auditTree(coreutils.Technology(tech), results); err != nil { + if err := ca.auditTree(coreutils.Technology(tech), results); err != nil { return err } } @@ -197,12 +192,12 @@ func (ca *CurationAuditCommand) doCurateAudit(results map[string][]*PackageStatu } func (ca *CurationAuditCommand) auditTree(tech coreutils.Technology, results map[string][]*PackageStatus) error { - flattenGraph, err := audit.GetTechDependencyTree(ca.GraphBasicParams, tech) + flattenGraph, fullDependenciesTree, err := audit.GetTechDependencyTree(ca.GraphBasicParams, tech) if err != nil { return err } // Validate the graph isn't empty. - if len(ca.FullDependenciesTree()) == 0 { + if len(fullDependenciesTree) == 0 { return errorutils.CheckErrorf("found no dependencies for the audited project using '%v' as the package manager", tech.ToString()) } if err = ca.SetRepo(tech); err != nil { @@ -221,10 +216,14 @@ func (ca *CurationAuditCommand) auditTree(tech coreutils.Technology, results map if err != nil { return err } - _, projectName, projectVersion := getUrlNameAndVersionByTech(tech, ca.FullDependenciesTree()[0].Id, "", "") + rootNode := fullDependenciesTree[0] + _, projectName, projectScope, projectVersion := getUrlNameAndVersionByTech(tech, rootNode.Id, "", "") if ca.Progress() != nil { ca.Progress().SetHeadlineMsg(fmt.Sprintf("Fetch curation status for %s graph with %v nodes project name: %s:%s", tech.ToFormal(), len(flattenGraph[0].Nodes)-1, projectName, projectVersion)) } + if projectScope != "" { + projectName = projectScope + "/" + projectName + } if ca.parallelRequests == 0 { ca.parallelRequests = cmdUtils.TotalConcurrentRequests } @@ -240,12 +239,10 @@ func (ca *CurationAuditCommand) auditTree(tech coreutils.Technology, results map parallelRequests: ca.parallelRequests, } packagesStatusMap := sync.Map{} - // Root node id represents the project name and shouldn't be validated with curation - rootNodeId := ca.FullDependenciesTree()[0].Id // Fetch status for each node from a flatten graph which, has no duplicate nodes. - err = analyzer.fetchNodesStatus(flattenGraph[0], &packagesStatusMap, rootNodeId) - analyzer.fillGraphRelations(ca.FullDependenciesTree()[0], &packagesStatusMap, - &packagesStatus, "", "", true) + err = analyzer.fetchNodesStatus(flattenGraph[0], &packagesStatusMap, rootNode.Id) + analyzer.fillGraphRelations(rootNode, &packagesStatusMap, + &packagesStatus, "", "", datastructures.MakeSet[string](), true) sort.Slice(packagesStatus, func(i, j int) bool { return packagesStatus[i].ParentName < packagesStatus[j].ParentName }) @@ -257,7 +254,7 @@ func printResult(format utils.OutputFormat, projectPath string, packagesStatus [ if format == "" { format = utils.Table } - log.Output(fmt.Sprintf("Found %v blocked packges for project %s", len(packagesStatus), projectPath)) + log.Output(fmt.Sprintf("Found %v blocked packages for project %s", len(packagesStatus), projectPath)) switch format { case utils.Json: if len(packagesStatus) > 0 { @@ -279,24 +276,34 @@ func printResult(format utils.OutputFormat, projectPath string, packagesStatus [ func convertToPackageStatusTable(packagesStatus []*PackageStatus) []PackageStatusTable { var pkgStatusTable []PackageStatusTable - for _, pkgStatus := range packagesStatus { + for index, pkgStatus := range packagesStatus { + // We use auto-merge supported by the 'go-pretty' library. It doesn't have an option to merge lines by a group of unique fields. + // In order to so, we make each group merge only with itself by adding or not adding space. This way, it won't be merged with the next group. + uniqLineSep := "" + if index%2 == 0 { + uniqLineSep = " " + } pkgTable := PackageStatusTable{ - Status: pkgStatus.Action, - ParentName: pkgStatus.ParentName, - ParentVersion: pkgStatus.ParentVersion, - BlockedPackageUrl: pkgStatus.BlockedPackageUrl, - PackageName: pkgStatus.PackageName, - PackageVersion: pkgStatus.PackageVersion, - BlockingReason: pkgStatus.BlockingReason, - PkgType: pkgStatus.PkgType, + ParentName: pkgStatus.ParentName + uniqLineSep, + ParentVersion: pkgStatus.ParentVersion + uniqLineSep, + PackageName: pkgStatus.PackageName + uniqLineSep, + PackageVersion: pkgStatus.PackageVersion + uniqLineSep, + BlockingReason: pkgStatus.BlockingReason + uniqLineSep, + PkgType: pkgStatus.PkgType + uniqLineSep, + } + if len(pkgStatus.Policy) == 0 { + pkgStatusTable = append(pkgStatusTable, pkgTable) + continue } - var policiesCondTable []policyTable for _, policyCond := range pkgStatus.Policy { - policiesCondTable = append(policiesCondTable, policyTable(policyCond)) + pkgTable.Policy = policyCond.Policy + pkgTable.Explanation = policyCond.Explanation + pkgTable.Recommendation = policyCond.Recommendation + pkgTable.Condition = policyCond.Condition + pkgStatusTable = append(pkgStatusTable, pkgTable) } - pkgTable.Policy = policiesCondTable - pkgStatusTable = append(pkgStatusTable, pkgTable) } + return pkgStatusTable } @@ -312,8 +319,8 @@ func (ca *CurationAuditCommand) SetRepo(tech coreutils.Technology) error { return err } if !exists { - return errorutils.CheckError(errors.New("no config file was found! Before running the npm command on a " + - "project for the first time, the project should be configured using the 'jf npmc' command")) + return errorutils.CheckErrorf("no config file was found! Before running the npm command on a " + + "project for the first time, the project should be configured using the 'jf npmc' command") } vConfig, err := rtUtils.ReadConfigFile(configFilePath, rtUtils.YAML) if err != nil { @@ -331,13 +338,21 @@ func (ca *CurationAuditCommand) SetRepo(tech coreutils.Technology) error { } func (nc *treeAnalyzer) fillGraphRelations(node *xrayUtils.GraphNode, preProcessMap *sync.Map, - packagesStatus *[]*PackageStatus, parent, parentVersion string, isRoot bool) { + packagesStatus *[]*PackageStatus, parent, parentVersion string, visited *datastructures.Set[string], isRoot bool) { for _, child := range node.Nodes { - packageUrl, name, version := getUrlNameAndVersionByTech(nc.tech, child.Id, nc.url, nc.repo) + packageUrl, name, scope, version := getUrlNameAndVersionByTech(nc.tech, child.Id, nc.url, nc.repo) if isRoot { parent = name parentVersion = version + if scope != "" { + parent = scope + "/" + parent + } } + if visited.Exists(scope + name + version + "-" + parent + parentVersion) { + continue + } + + visited.Add(scope + name + version + "-" + parent + parentVersion) if pkgStatus, exist := preProcessMap.Load(packageUrl); exist { relation := indirectRelation if isRoot { @@ -352,7 +367,7 @@ func (nc *treeAnalyzer) fillGraphRelations(node *xrayUtils.GraphNode, preProcess *packagesStatus = append(*packagesStatus, &pkgStatusClone) } } - nc.fillGraphRelations(child, preProcessMap, packagesStatus, parent, parentVersion, false) + nc.fillGraphRelations(child, preProcessMap, packagesStatus, parent, parentVersion, visited, false) } } func (nc *treeAnalyzer) fetchNodesStatus(graph *xrayUtils.GraphNode, p *sync.Map, rootNodeId string) error { @@ -383,7 +398,10 @@ func (nc *treeAnalyzer) fetchNodesStatus(graph *xrayUtils.GraphNode, p *sync.Map } func (nc *treeAnalyzer) fetchNodeStatus(node xrayUtils.GraphNode, p *sync.Map) error { - packageUrl, name, version := getUrlNameAndVersionByTech(nc.tech, node.Id, nc.url, nc.repo) + packageUrl, name, scope, version := getUrlNameAndVersionByTech(nc.tech, node.Id, nc.url, nc.repo) + if scope != "" { + name = scope + "/" + name + } resp, _, err := nc.rtManager.Client().SendHead(packageUrl, &nc.httpClientDetails) if err != nil { if resp != nil && resp.StatusCode >= 400 { @@ -450,7 +468,7 @@ func (nc *treeAnalyzer) getBlockedPackageDetails(packageUrl string, name string, } // Return policies and conditions names from the FORBIDDEN HTTP error message. -// Message structure: Package %s:%s download was blocked by JFrog Packages Curation service due to the following policies violated {%s, %s},{%s, %s}. +// Message structure: Package %s:%s download was blocked by JFrog Packages Curation service due to the following policies violated {%s, %s, %s, %s},{%s, %s, %s, %s}. func (nc *treeAnalyzer) extractPoliciesFromMsg(respError *ErrorsResp) []Policy { var policies []Policy msg := respError.Errors[0].Message @@ -458,25 +476,39 @@ func (nc *treeAnalyzer) extractPoliciesFromMsg(respError *ErrorsResp) []Policy { for _, match := range allMatches { match = strings.TrimSuffix(strings.TrimPrefix(match, "{"), "}") polCond := strings.Split(match, ",") - if len(polCond) == 2 { + if len(polCond) >= 2 { pol := polCond[0] cond := polCond[1] + + if len(polCond) == 4 { + exp, rec := makeLegiblePolicyDetails(polCond[2], polCond[3]) + policies = append(policies, Policy{Policy: strings.TrimSpace(pol), + Condition: strings.TrimSpace(cond), Explanation: strings.TrimSpace(exp), Recommendation: strings.TrimSpace(rec)}) + continue + } policies = append(policies, Policy{Policy: strings.TrimSpace(pol), Condition: strings.TrimSpace(cond)}) } } return policies } -func getUrlNameAndVersionByTech(tech coreutils.Technology, nodeId, artiUrl, repo string) (downloadUrl string, name string, version string) { +// Adding a new line after the headline and replace every "|" with a new line. +func makeLegiblePolicyDetails(explanation, recommendation string) (string, string) { + explanation = strings.ReplaceAll(strings.Replace(explanation, ": ", ":\n", 1), " | ", "\n") + recommendation = strings.ReplaceAll(strings.Replace(recommendation, ": ", ":\n", 1), " | ", "\n") + return explanation, recommendation +} + +func getUrlNameAndVersionByTech(tech coreutils.Technology, nodeId, artiUrl, repo string) (downloadUrl string, name string, scope string, version string) { if tech == coreutils.Npm { - return getNameScopeAndVersion(nodeId, artiUrl, repo, coreutils.Npm.ToString()) + return getNpmNameScopeAndVersion(nodeId, artiUrl, repo, coreutils.Npm.ToString()) } return } // The graph holds, for each node, the component ID (xray representation) // from which we extract the package name, version, and construct the Artifactory download URL. -func getNameScopeAndVersion(id, artiUrl, repo, tech string) (downloadUrl, name, version string) { +func getNpmNameScopeAndVersion(id, artiUrl, repo, tech string) (downloadUrl, name, scope, version string) { id = strings.TrimPrefix(id, tech+"://") nameVersion := strings.Split(id, ":") @@ -485,12 +517,11 @@ func getNameScopeAndVersion(id, artiUrl, repo, tech string) (downloadUrl, name, version = nameVersion[1] } scopeSplit := strings.Split(name, "/") - var scope string if len(scopeSplit) > 1 { scope = scopeSplit[0] name = scopeSplit[1] } - return buildNpmDownloadUrl(artiUrl, repo, name, scope, version), name, version + return buildNpmDownloadUrl(artiUrl, repo, name, scope, version), name, scope, version } func buildNpmDownloadUrl(url, repo, name, scope, version string) string { diff --git a/xray/commands/curation/audit_test.go b/xray/commands/curation/audit_test.go index f4323d2ff..9a6a81377 100644 --- a/xray/commands/curation/audit_test.go +++ b/xray/commands/curation/audit_test.go @@ -3,6 +3,7 @@ package curation import ( "encoding/json" "fmt" + "github.com/jfrog/gofrog/datastructures" tests2 "github.com/jfrog/jfrog-cli-core/v2/common/tests" "github.com/jfrog/jfrog-cli-core/v2/utils/config" "github.com/jfrog/jfrog-cli-core/v2/utils/coreutils" @@ -14,6 +15,7 @@ import ( "os" "path/filepath" "regexp" + "sort" "strconv" "strings" "sync" @@ -67,14 +69,16 @@ func getTestCasesForExtractPoliciesFromMsg() []struct { Errors: []ErrorResp{ { Status: 403, - Message: "Package test:1.0.0 download was blocked by JFrog Packages Curation service due to the following policies violated {policy1, condition1}.", + Message: "Package test:1.0.0 download was blocked by JFrog Packages Curation service due to the following policies violated {policy1, condition1, Package is 3339 days old, Upgrade to version 0.2.4 (latest)}.", }, }, }, expect: []Policy{ { - Policy: "policy1", - Condition: "condition1", + Policy: "policy1", + Condition: "condition1", + Explanation: "Package is 3339 days old", + Recommendation: "Upgrade to version 0.2.4 (latest)", }, }, }, @@ -126,6 +130,7 @@ func TestGetNameScopeAndVersion(t *testing.T) { wantDownloadUrl string wantName string wantVersion string + wantScope string }{ { name: "npm component", @@ -146,20 +151,16 @@ func TestGetNameScopeAndVersion(t *testing.T) { wantDownloadUrl: "http://localhost:8000/artifactory/api/npm/npm/dev/test/-/test-1.0.0.tgz", wantName: "test", wantVersion: "1.0.0", + wantScope: "dev", }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - gotDownloadUrl, gotName, gotVersion := getNameScopeAndVersion(tt.componentId, tt.artiUrl, tt.repo, tt.repo) - if gotDownloadUrl != tt.wantDownloadUrl { - t.Errorf("getNameScopeAndVersion() gotDownloadUrl = %v, want %v", gotDownloadUrl, tt.wantDownloadUrl) - } - if gotName != tt.wantName { - t.Errorf("getNameScopeAndVersion() gotName = %v, want %v", gotName, tt.wantName) - } - if gotVersion != tt.wantVersion { - t.Errorf("getNameScopeAndVersion() gotVersion = %v, want %v", gotVersion, tt.wantVersion) - } + gotDownloadUrl, gotName, gotScope, gotVersion := getNpmNameScopeAndVersion(tt.componentId, tt.artiUrl, tt.repo, tt.repo) + assert.Equal(t, tt.wantDownloadUrl, gotDownloadUrl, "getNameScopeAndVersion() gotDownloadUrl = %v, want %v", gotDownloadUrl, tt.wantDownloadUrl) + assert.Equal(t, tt.wantName, gotName, "getNpmNameScopeAndVersion() gotName = %v, want %v", gotName, tt.wantName) + assert.Equal(t, tt.wantScope, gotScope, "getNpmNameScopeAndVersion() gotScope = %v, want %v", gotScope, tt.wantScope) + assert.Equal(t, tt.wantVersion, gotVersion, "getNpmNameScopeAndVersion() gotVersion = %v, want %v", gotVersion, tt.wantVersion) }) } } @@ -173,10 +174,22 @@ func TestTreeAnalyzerFillGraphRelations(t *testing.T) { repo: "npm-repo", tech: "npm", } - packageStatus := &[]*PackageStatus{} + var packageStatus []*PackageStatus preProcessedMap := fillSyncedMap(tt.givenMap) - nc.fillGraphRelations(tt.givenGraph, preProcessedMap, packageStatus, "", "", true) - assert.Equal(t, *tt.expectedPackagesStatus, *packageStatus) + nc.fillGraphRelations(tt.givenGraph, preProcessedMap, &packageStatus, "", "", datastructures.MakeSet[string](), true) + sort.Slice(packageStatus, func(i, j int) bool { + if packageStatus[i].BlockedPackageUrl == packageStatus[j].BlockedPackageUrl { + return packageStatus[i].ParentName < packageStatus[j].ParentName + } + return packageStatus[i].BlockedPackageUrl < packageStatus[j].BlockedPackageUrl + }) + sort.Slice(tt.expectedPackagesStatus, func(i, j int) bool { + if tt.expectedPackagesStatus[i].BlockedPackageUrl == tt.expectedPackagesStatus[j].BlockedPackageUrl { + return tt.expectedPackagesStatus[i].ParentName < tt.expectedPackagesStatus[j].ParentName + } + return tt.expectedPackagesStatus[i].BlockedPackageUrl < tt.expectedPackagesStatus[j].BlockedPackageUrl + }) + assert.Equal(t, tt.expectedPackagesStatus, packageStatus) }) } } @@ -185,24 +198,18 @@ func getTestCasesForFillGraphRelations() []struct { name string givenGraph *xrayUtils.GraphNode givenMap []*PackageStatus - expectedPackagesStatus *[]*PackageStatus + expectedPackagesStatus []*PackageStatus } { tests := []struct { name string givenGraph *xrayUtils.GraphNode givenMap []*PackageStatus - expectedPackagesStatus *[]*PackageStatus + expectedPackagesStatus []*PackageStatus }{ { name: "block indirect", givenGraph: &xrayUtils.GraphNode{ - Id: "npm://root-test", - Sha256: "", - Sha1: "", - Path: "", - DownloadUrl: "", - Licenses: nil, - Properties: nil, + Id: "npm://root-test", Nodes: []*xrayUtils.GraphNode{ { Id: "npm://test-parent:1.0.0", @@ -211,8 +218,6 @@ func getTestCasesForFillGraphRelations() []struct { }, }, }, - OtherComponentIds: nil, - Parent: nil, }, givenMap: []*PackageStatus{ { @@ -230,7 +235,7 @@ func getTestCasesForFillGraphRelations() []struct { }, }, }, - expectedPackagesStatus: &[]*PackageStatus{ + expectedPackagesStatus: []*PackageStatus{ { Action: "blocked", BlockedPackageUrl: "http://localhost:8046/artifactory/api/npm/npm-repo/test-child/-/test-child-2.0.0.tgz", @@ -250,6 +255,134 @@ func getTestCasesForFillGraphRelations() []struct { }, }, }, + { + name: "no duplications", + givenGraph: &xrayUtils.GraphNode{ + Id: "npm://root-test", + Nodes: []*xrayUtils.GraphNode{ + { + Id: "npm://test-parent:1.0.0", + Nodes: []*xrayUtils.GraphNode{ + { + Id: "npm://test-child:2.0.0", + Nodes: []*xrayUtils.GraphNode{ + { + Id: "npm://@dev/test-child:4.0.0", + }, + }, + }, + { + Id: "npm://test-child:3.0.0", + Nodes: []*xrayUtils.GraphNode{ + { + Id: "npm://@dev/test-child:4.0.0", + }, + }, + }, + { + Id: "npm://@dev/test-child:5.0.0", + Nodes: []*xrayUtils.GraphNode{ + { + Id: "npm://test-child:4.0.0", + }, + }, + }, + }, + }, + { + Id: "npm://@dev/test-parent:1.0.0", + Nodes: []*xrayUtils.GraphNode{ + { + Id: "npm://test-child:4.0.0", + }, + }, + }, + }, + }, + givenMap: []*PackageStatus{ + { + Action: "blocked", + BlockedPackageUrl: "http://localhost:8046/artifactory/api/npm/npm-repo/@dev/test-child/-/test-child-4.0.0.tgz", + PackageName: "@dev/test-child", + PackageVersion: "4.0.0", + BlockingReason: "Policy violations", + PkgType: "npm", + Policy: []Policy{ + { + Policy: "policy1", + Condition: "condition1", + }, + }, + }, + { + Action: "blocked", + BlockedPackageUrl: "http://localhost:8046/artifactory/api/npm/npm-repo/test-child/-/test-child-4.0.0.tgz", + PackageName: "test-child", + PackageVersion: "4.0.0", + BlockingReason: "Policy violations", + PkgType: "npm", + Policy: []Policy{ + { + Policy: "policy1", + Condition: "condition1", + }, + }, + }, + }, + expectedPackagesStatus: []*PackageStatus{ + { + Action: "blocked", + BlockedPackageUrl: "http://localhost:8046/artifactory/api/npm/npm-repo/test-child/-/test-child-4.0.0.tgz", + PackageName: "test-child", + PackageVersion: "4.0.0", + BlockingReason: "Policy violations", + PkgType: "npm", + Policy: []Policy{ + { + Policy: "policy1", + Condition: "condition1", + }, + }, + ParentName: "test-parent", + ParentVersion: "1.0.0", + DepRelation: "indirect", + }, + { + Action: "blocked", + BlockedPackageUrl: "http://localhost:8046/artifactory/api/npm/npm-repo/test-child/-/test-child-4.0.0.tgz", + PackageName: "test-child", + PackageVersion: "4.0.0", + BlockingReason: "Policy violations", + PkgType: "npm", + Policy: []Policy{ + { + Policy: "policy1", + Condition: "condition1", + }, + }, + ParentName: "@dev/test-parent", + ParentVersion: "1.0.0", + DepRelation: "indirect", + }, + { + Action: "blocked", + BlockedPackageUrl: "http://localhost:8046/artifactory/api/npm/npm-repo/@dev/test-child/-/test-child-4.0.0.tgz", + PackageName: "@dev/test-child", + PackageVersion: "4.0.0", + BlockingReason: "Policy violations", + PkgType: "npm", + Policy: []Policy{ + { + Policy: "policy1", + Condition: "condition1", + }, + }, + ParentName: "test-parent", + ParentVersion: "1.0.0", + DepRelation: "indirect", + }, + }, + }, } return tests } diff --git a/xray/commands/scan/scan.go b/xray/commands/scan/scan.go index 8a228fde4..0bd67b67f 100644 --- a/xray/commands/scan/scan.go +++ b/xray/commands/scan/scan.go @@ -393,7 +393,7 @@ func collectPatternMatchingFiles(fileData spec.File, rootPath string, dataHandle if err != nil { return err } - excludePathPattern := fspatterns.PrepareExcludePathPattern(fileParams) + excludePathPattern := fspatterns.PrepareExcludePathPattern(fileParams.Exclusions, fileParams.GetPatternType(), fileParams.IsRecursive()) patternRegex, err := regexp.Compile(fileData.Pattern) if errorutils.CheckError(err) != nil { return err diff --git a/xray/commands/testdata/iac-scan/contains-iac-violations-working-dir.sarif b/xray/commands/testdata/iac-scan/contains-iac-violations-working-dir.sarif new file mode 100644 index 000000000..cb7d1aa51 --- /dev/null +++ b/xray/commands/testdata/iac-scan/contains-iac-violations-working-dir.sarif @@ -0,0 +1,669 @@ +{ + "runs": [ + { + "tool": { + "driver": { + "name": "JFrog Terraform scanner", + "rules": [ + { + "id": "aws_alb_https_only", + "fullDescription": { + "text": "Resources `aws_lb_listener` and `aws_alb_listener` should set `protocol = \"HTTPS\"` (default is `\"HTTP\"`)\n\nVulnerable example -\n```\nresource \"aws_lb_listener\" \"vulnerable_example\" {\n protocol = \"HTTP\"\n}\n```\n\nSecure example -\n```\nresource \"aws_lb_listener\" \"secure_example\" {\n protocol = \"HTTPS\"\n}\n```", + "markdown": "Resources `aws_lb_listener` and `aws_alb_listener` should set `protocol = \"HTTPS\"` (default is `\"HTTP\"`)\n\nVulnerable example -\n```\nresource \"aws_lb_listener\" \"vulnerable_example\" {\n protocol = \"HTTP\"\n}\n```\n\nSecure example -\n```\nresource \"aws_lb_listener\" \"secure_example\" {\n protocol = \"HTTPS\"\n}\n```" + }, + "shortDescription": { + "text": "Scanner for aws_alb_https_only" + } + }, + { + "id": "aws_cloudwatch_log_encrypt", + "fullDescription": { + "text": "Resource `aws_cloudwatch_log_group` should have `kms_key_id`\n\nVulnerable example - \n```\nresource \"aws_cloudwatch_log_group\" \"vulnerable_example\" {\n # kms_key_id is not set\n}\n```\n\nSecure example -\n```\nresource \"aws_cloudwatch_log_group\" \"secure_example\" {\n kms_key_id = aws_kms_key.example.arn\n}\n```", + "markdown": "Resource `aws_cloudwatch_log_group` should have `kms_key_id`\n\nVulnerable example - \n```\nresource \"aws_cloudwatch_log_group\" \"vulnerable_example\" {\n # kms_key_id is not set\n}\n```\n\nSecure example -\n```\nresource \"aws_cloudwatch_log_group\" \"secure_example\" {\n kms_key_id = aws_kms_key.example.arn\n}\n```" + }, + "shortDescription": { + "text": "Scanner for aws_cloudwatch_log_encrypt" + } + }, + { + "id": "aws_docdb_encrypt_cluster", + "fullDescription": { + "text": "Resource `aws_docdb_cluster` should have `storage_encrypted=true`\n\nVulnerable example - \n```\nresource \"aws_docdb_cluster\" \"vulnerable_example\" {\n # storage_encrypted is unset\n}\n```\n\nSecure example -\n```\nresource \"aws_docdb_cluster\" \"secure_example\" {\n storage_encrypted = true\n}\n```", + "markdown": "Resource `aws_docdb_cluster` should have `storage_encrypted=true`\n\nVulnerable example - \n```\nresource \"aws_docdb_cluster\" \"vulnerable_example\" {\n # storage_encrypted is unset\n}\n```\n\nSecure example -\n```\nresource \"aws_docdb_cluster\" \"secure_example\" {\n storage_encrypted = true\n}\n```" + }, + "shortDescription": { + "text": "Scanner for aws_docdb_encrypt_cluster" + } + }, + { + "id": "aws_eks_encrypt_cluster", + "fullDescription": { + "text": "Resource `aws_eks_cluster` should have key `encryption_config`\n\nVulnerable example -\n```\nresource \"aws_eks_cluster\" \"vulnerable_example\" {\n # encryption_config is not set\n}\n```\n\nSecure example -\n```\nresource \"aws_eks_cluster\" \"secure_example\" {\n encryption_config {\n resources = [ \"secrets\" ]\n provider {\n key_arn = aws_kms_key.example.arn\n }\n }\n}\n```", + "markdown": "Resource `aws_eks_cluster` should have key `encryption_config`\n\nVulnerable example -\n```\nresource \"aws_eks_cluster\" \"vulnerable_example\" {\n # encryption_config is not set\n}\n```\n\nSecure example -\n```\nresource \"aws_eks_cluster\" \"secure_example\" {\n encryption_config {\n resources = [ \"secrets\" ]\n provider {\n key_arn = aws_kms_key.example.arn\n }\n }\n}\n```" + }, + "shortDescription": { + "text": "Scanner for aws_eks_encrypt_cluster" + } + }, + { + "id": "aws_eks_no_cidr", + "fullDescription": { + "text": "Resource `aws_eks_cluster` should have key `public_access_cidrs` (default is `0.0.0.0/0` which is overly permissive). Note that this endpoint is only enabled when `endpoint_public_access = true` (default is `true`)\n\nVulnerable example -\n```\nresource \"aws_eks_cluster\" \"vulnerable_example\" {\n vpc_config {\n endpoint_public_access = true # or unset\n public_access_cidrs = [\"0.0.0.0/0\"] # or unset \n }\n }\n```\n\nSecure example #1 -\n```\nresource \"aws_eks_cluster\" \"secure_example_1\" {\n vpc_config {\n endpoint_public_access = false\n }\n }\n```\n\nSecure example #2 -\n```\nresource \"aws_eks_cluster\" \"secure_example_2\" {\n vpc_config {\n endpoint_public_access = true\n public_access_cidrs = [\"192.168.0.0/24\"]\n }\n }\n```", + "markdown": "Resource `aws_eks_cluster` should have key `public_access_cidrs` (default is `0.0.0.0/0` which is overly permissive). Note that this endpoint is only enabled when `endpoint_public_access = true` (default is `true`)\n\nVulnerable example -\n```\nresource \"aws_eks_cluster\" \"vulnerable_example\" {\n vpc_config {\n endpoint_public_access = true # or unset\n public_access_cidrs = [\"0.0.0.0/0\"] # or unset \n }\n }\n```\n\nSecure example #1 -\n```\nresource \"aws_eks_cluster\" \"secure_example_1\" {\n vpc_config {\n endpoint_public_access = false\n }\n }\n```\n\nSecure example #2 -\n```\nresource \"aws_eks_cluster\" \"secure_example_2\" {\n vpc_config {\n endpoint_public_access = true\n public_access_cidrs = [\"192.168.0.0/24\"]\n }\n }\n```" + }, + "shortDescription": { + "text": "Scanner for aws_eks_no_cidr" + } + }, + { + "id": "aws_rds_encrypt_instance", + "fullDescription": { + "text": "Resource `aws_db_instance` should have `storage_encrypted=true`\r\n\r\nVulnerable example -\r\n```\r\nresource \"aws_db_instance\" \"vulnerable_example\" {\r\n # storage_encrypted is not set\r\n}\r\n```\r\n\r\nSecure example -\r\n```\r\nresource \"aws_db_instance\" \"secure_example\" {\r\n kms_key_id = aws_kms_key.example.arn\r\n storage_encrypted = true\r\n}\r\n```", + "markdown": "Resource `aws_db_instance` should have `storage_encrypted=true`\r\n\r\nVulnerable example -\r\n```\r\nresource \"aws_db_instance\" \"vulnerable_example\" {\r\n # storage_encrypted is not set\r\n}\r\n```\r\n\r\nSecure example -\r\n```\r\nresource \"aws_db_instance\" \"secure_example\" {\r\n kms_key_id = aws_kms_key.example.arn\r\n storage_encrypted = true\r\n}\r\n```" + }, + "shortDescription": { + "text": "Scanner for aws_rds_encrypt_instance" + } + }, + { + "id": "aws_rds_iam_auth", + "fullDescription": { + "text": "Resource `aws_db_instance` should have `iam_database_authentication_enabled=true`\r\n\r\nVulnerable example -\r\n```\r\nresource \"aws_db_instance\" \"vulnerable_example\" {\r\n # iam_database_authentication_enabled is unset (or false)\r\n}\r\n```\r\n\r\nSecure example -\r\n```\r\nresource \"aws_db_instance\" \"secure_example\" {\r\n iam_database_authentication_enabled = true\r\n}\r\n```", + "markdown": "Resource `aws_db_instance` should have `iam_database_authentication_enabled=true`\r\n\r\nVulnerable example -\r\n```\r\nresource \"aws_db_instance\" \"vulnerable_example\" {\r\n # iam_database_authentication_enabled is unset (or false)\r\n}\r\n```\r\n\r\nSecure example -\r\n```\r\nresource \"aws_db_instance\" \"secure_example\" {\r\n iam_database_authentication_enabled = true\r\n}\r\n```" + }, + "shortDescription": { + "text": "Scanner for aws_rds_iam_auth" + } + }, + { + "id": "aws_s3_block_public_acl", + "fullDescription": { + "text": "If resource `aws_s3_bucket` exists, then resource `aws_s3_bucket_public_access_block` must also exist and have `block_public_acls=true`\r\n\r\nVulnerable example -\r\n```\r\nresource \"aws_s3_bucket\" \"example_bucket\" {\r\n bucket = \"mybucket\"\r\n}\r\n\r\nresource \"aws_s3_bucket_public_access_block\" \"vulnerable_example\" {\r\n bucket = aws_s3_bucket.example_bucket.id\r\n # block_public_acls is not set\r\n}\r\n```\r\n\r\nSecure example -\r\n```\r\nresource \"aws_s3_bucket\" \"example_bucket\" {\r\n bucket = \"mybucket\"\r\n}\r\n\r\nresource \"aws_s3_bucket_public_access_block\" \"secure_example\" {\r\n bucket = aws_s3_bucket.example_bucket.id\r\n block_public_acls = true\r\n}\r\n```", + "markdown": "If resource `aws_s3_bucket` exists, then resource `aws_s3_bucket_public_access_block` must also exist and have `block_public_acls=true`\r\n\r\nVulnerable example -\r\n```\r\nresource \"aws_s3_bucket\" \"example_bucket\" {\r\n bucket = \"mybucket\"\r\n}\r\n\r\nresource \"aws_s3_bucket_public_access_block\" \"vulnerable_example\" {\r\n bucket = aws_s3_bucket.example_bucket.id\r\n # block_public_acls is not set\r\n}\r\n```\r\n\r\nSecure example -\r\n```\r\nresource \"aws_s3_bucket\" \"example_bucket\" {\r\n bucket = \"mybucket\"\r\n}\r\n\r\nresource \"aws_s3_bucket_public_access_block\" \"secure_example\" {\r\n bucket = aws_s3_bucket.example_bucket.id\r\n block_public_acls = true\r\n}\r\n```" + }, + "shortDescription": { + "text": "Scanner for aws_s3_block_public_acl" + } + }, + { + "id": "aws_s3_block_public_policy", + "fullDescription": { + "text": "If resource `aws_s3_bucket` exists, then resource `aws_s3_bucket_public_access_block` must also exist and have `block_public_acls=true`\r\n\r\nVulnerable example -\r\n```\r\nresource \"aws_s3_bucket\" \"example_bucket\" {\r\n bucket = \"mybucket\"\r\n}\r\n\r\nresource \"aws_s3_bucket_public_access_block\" \"vulnerable_example\" {\r\n bucket = aws_s3_bucket.example_bucket.id\r\n # block_public_acls is not set\r\n}\r\n```\r\n\r\nSecure example -\r\n```\r\nresource \"aws_s3_bucket\" \"example_bucket\" {\r\n bucket = \"mybucket\"\r\n}\r\n\r\nresource \"aws_s3_bucket_public_access_block\" \"secure_example\" {\r\n bucket = aws_s3_bucket.example_bucket.id\r\n block_public_acls = true\r\n}\r\n```", + "markdown": "If resource `aws_s3_bucket` exists, then resource `aws_s3_bucket_public_access_block` must also exist and have `block_public_acls=true`\r\n\r\nVulnerable example -\r\n```\r\nresource \"aws_s3_bucket\" \"example_bucket\" {\r\n bucket = \"mybucket\"\r\n}\r\n\r\nresource \"aws_s3_bucket_public_access_block\" \"vulnerable_example\" {\r\n bucket = aws_s3_bucket.example_bucket.id\r\n # block_public_acls is not set\r\n}\r\n```\r\n\r\nSecure example -\r\n```\r\nresource \"aws_s3_bucket\" \"example_bucket\" {\r\n bucket = \"mybucket\"\r\n}\r\n\r\nresource \"aws_s3_bucket_public_access_block\" \"secure_example\" {\r\n bucket = aws_s3_bucket.example_bucket.id\r\n block_public_acls = true\r\n}\r\n```" + }, + "shortDescription": { + "text": "Scanner for aws_s3_block_public_policy" + } + }, + { + "id": "aws_s3_encrypt", + "fullDescription": { + "text": "If resource `aws_s3_bucket` exists, then resource `aws_s3_bucket_server_side_encryption_configuration` must also exist with the key `apply_server_side_encryption_by_default`. Alternatively, the `aws_s3_bucket` resource should have the (deprecated) `server_side_encryption_configuration` key.\n\nVulnerable example #1 -\n```\nresource \"aws_s3_bucket\" \"mybucket\" {\n bucket = \"mybucket\"\n}\n\n# resource \"aws_s3_bucket_server_side_encryption_configuration\" is not defined\n```\n\nSecure example #1 -\n```\nresource \"aws_s3_bucket\" \"mybucket\" {\n bucket = \"mybucket\"\n}\n\nresource \"aws_s3_bucket_server_side_encryption_configuration\" \"secure_example_1\" {\n bucket = aws_s3_bucket.mybucket.bucket\n\n rule {\n apply_server_side_encryption_by_default {\n kms_master_key_id = aws_kms_key.mykey.arn\n sse_algorithm = \"aws:kms\"\n }\n }\n}\n```\n\nVulnerable example #2 -\n```\nresource \"aws_s3_bucket\" \"vulnerable_example_2\" {\n # server_side_encryption_configuration is not set\n}\n```\n\nSecure example #2 -\n```\nresource \"aws_s3_bucket\" \"secure_example_2\" {\n bucket = \"mybucket\"\n\n server_side_encryption_configuration {\n rule {\n apply_server_side_encryption_by_default {\n kms_master_key_id = aws_kms_key.mykey.arn\n sse_algorithm = \"aws:kms\"\n }\n }\n }\n}\n```", + "markdown": "If resource `aws_s3_bucket` exists, then resource `aws_s3_bucket_server_side_encryption_configuration` must also exist with the key `apply_server_side_encryption_by_default`. Alternatively, the `aws_s3_bucket` resource should have the (deprecated) `server_side_encryption_configuration` key.\n\nVulnerable example #1 -\n```\nresource \"aws_s3_bucket\" \"mybucket\" {\n bucket = \"mybucket\"\n}\n\n# resource \"aws_s3_bucket_server_side_encryption_configuration\" is not defined\n```\n\nSecure example #1 -\n```\nresource \"aws_s3_bucket\" \"mybucket\" {\n bucket = \"mybucket\"\n}\n\nresource \"aws_s3_bucket_server_side_encryption_configuration\" \"secure_example_1\" {\n bucket = aws_s3_bucket.mybucket.bucket\n\n rule {\n apply_server_side_encryption_by_default {\n kms_master_key_id = aws_kms_key.mykey.arn\n sse_algorithm = \"aws:kms\"\n }\n }\n}\n```\n\nVulnerable example #2 -\n```\nresource \"aws_s3_bucket\" \"vulnerable_example_2\" {\n # server_side_encryption_configuration is not set\n}\n```\n\nSecure example #2 -\n```\nresource \"aws_s3_bucket\" \"secure_example_2\" {\n bucket = \"mybucket\"\n\n server_side_encryption_configuration {\n rule {\n apply_server_side_encryption_by_default {\n kms_master_key_id = aws_kms_key.mykey.arn\n sse_algorithm = \"aws:kms\"\n }\n }\n }\n}\n```" + }, + "shortDescription": { + "text": "Scanner for aws_s3_encrypt" + } + } + ], + "version": "" + } + }, + "invocations": [ + { + "executionSuccessful": true, + "arguments": [ + "iac_scanner/tf_scanner", + "scan", + "/var/folders/mj/sk15wcdx5kl7p5shk662bjt80000gn/T/jfrog.cli.temp.-1690974158-62790465/config.yaml" + ], + "workingDirectory": { + "uri": "file:///Users/omerz/.jfrog/dependencies/analyzerManager" + } + } + ], + "results": [ + { + "message": { + "text": "storage_encrypted=false was detected" + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "file:///Users/omerz/Documents/analyzers_test/iac/aws/byok/module.tf" + }, + "region": { + "endColumn": 2, + "endLine": 121, + "snippet": { + "text": "byok_database" + }, + "startColumn": 1, + "startLine": 69 + } + } + } + ], + "ruleId": "aws_rds_encrypt_instance" + }, + { + "message": { + "text": "iam_database_authentication_enabled=False was detected" + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "file:///Users/omerz/Documents/analyzers_test/iac/aws/byok/module.tf" + }, + "region": { + "endColumn": 2, + "endLine": 121, + "snippet": { + "text": "byok_database" + }, + "startColumn": 1, + "startLine": 69 + } + } + } + ], + "ruleId": "aws_rds_iam_auth" + }, + { + "message": { + "text": "storage_encrypted=False was detected" + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "file:///Users/omerz/Documents/analyzers_test/iac/aws/documentdb/module.tf" + }, + "region": { + "endColumn": 2, + "endLine": 31, + "snippet": { + "text": "default" + }, + "startColumn": 1, + "startLine": 15 + } + } + } + ], + "ruleId": "aws_docdb_encrypt_cluster" + }, + { + "message": { + "text": "AWS EKS public API server is publicly accessible" + }, + "level": "error", + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "file:///Users/omerz/Documents/analyzers_test/iac/aws/eks_mng_ng/module.tf" + }, + "region": { + "endColumn": 2, + "endLine": 65, + "snippet": { + "text": "aws_eks" + }, + "startColumn": 1, + "startLine": 36 + } + } + } + ], + "ruleId": "aws_eks_no_cidr" + }, + { + "message": { + "text": "AWS EKS public API server is publicly accessible" + }, + "level": "error", + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "file:///Users/omerz/Documents/analyzers_test/iac/aws/eks_mng_ng_coralogix/module.tf" + }, + "region": { + "endColumn": 2, + "endLine": 65, + "snippet": { + "text": "aws_eks" + }, + "startColumn": 1, + "startLine": 36 + } + } + } + ], + "ruleId": "aws_eks_no_cidr" + }, + { + "message": { + "text": "AWS EKS public API server is publicly accessible" + }, + "level": "error", + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "file:///Users/omerz/Documents/analyzers_test/iac/aws/k8s/module/cluster.tf" + }, + "region": { + "endColumn": 2, + "endLine": 24, + "snippet": { + "text": "this" + }, + "startColumn": 1, + "startLine": 1 + } + } + } + ], + "ruleId": "aws_eks_no_cidr" + }, + { + "message": { + "text": "AWS EKS secrets do not usedata-at-rest encryption" + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "file:///Users/omerz/Documents/analyzers_test/iac/aws/k8s/module/cluster.tf" + }, + "region": { + "endColumn": 2, + "endLine": 24, + "snippet": { + "text": "this" + }, + "startColumn": 1, + "startLine": 1 + } + } + } + ], + "ruleId": "aws_eks_encrypt_cluster" + }, + { + "message": { + "text": "AWS EKS public API server is publicly accessible" + }, + "level": "error", + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "file:///Users/omerz/Documents/analyzers_test/iac/aws/k8s/module2/cluster.tf" + }, + "region": { + "endColumn": 2, + "endLine": 49, + "snippet": { + "text": "this" + }, + "startColumn": 1, + "startLine": 9 + } + } + } + ], + "ruleId": "aws_eks_no_cidr" + }, + { + "message": { + "text": "kms_key_id='' was detected" + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "file:///Users/omerz/Documents/analyzers_test/iac/aws/msk/module.tf" + }, + "region": { + "endColumn": 2, + "endLine": 33, + "snippet": { + "text": "log" + }, + "startColumn": 1, + "startLine": 30 + } + } + } + ], + "ruleId": "aws_cloudwatch_log_encrypt" + }, + { + "message": { + "text": "block_public_acls=false was detected" + }, + "level": "error", + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "file:///Users/omerz/Documents/analyzers_test/iac/aws/msk/module.tf" + }, + "region": { + "endColumn": 2, + "endLine": 39, + "snippet": { + "text": "bucket" + }, + "startColumn": 1, + "startLine": 35 + } + } + } + ], + "ruleId": "aws_s3_block_public_acl" + }, + { + "message": { + "text": "block_public_acls=false was detected" + }, + "level": "error", + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "file:///Users/omerz/Documents/analyzers_test/iac/aws/msk/module.tf" + }, + "region": { + "endColumn": 2, + "endLine": 39, + "snippet": { + "text": "bucket" + }, + "startColumn": 1, + "startLine": 35 + } + } + } + ], + "ruleId": "aws_s3_block_public_policy" + }, + { + "message": { + "text": "Missing server_side_encryption_configuration was detected, Missing aws_s3_bucket_server_side_encryption_configuration was detected" + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "file:///Users/omerz/Documents/analyzers_test/iac/aws/msk/module.tf" + }, + "region": { + "endColumn": 2, + "endLine": 39, + "snippet": { + "text": "bucket" + }, + "startColumn": 1, + "startLine": 35 + } + } + } + ], + "ruleId": "aws_s3_encrypt" + }, + { + "message": { + "text": "Missing server_side_encryption_configuration was detected, Missing aws_s3_bucket_server_side_encryption_configuration was detected" + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "file:///Users/omerz/Documents/analyzers_test/iac/aws/msk/module.tf" + }, + "region": { + "endColumn": 2, + "endLine": 39, + "snippet": { + "text": "bucket" + }, + "startColumn": 1, + "startLine": 35 + } + } + } + ], + "ruleId": "aws_s3_encrypt" + }, + { + "message": { + "text": "storage_encrypted=false was detected" + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "file:///Users/omerz/Documents/analyzers_test/iac/aws/mysql_coralogix/module.tf" + }, + "region": { + "endColumn": 2, + "endLine": 147, + "snippet": { + "text": "k8s_database" + }, + "startColumn": 1, + "startLine": 102 + } + } + } + ], + "ruleId": "aws_rds_encrypt_instance" + }, + { + "message": { + "text": "iam_database_authentication_enabled=False was detected" + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "file:///Users/omerz/Documents/analyzers_test/iac/aws/mysql_coralogix/module.tf" + }, + "region": { + "endColumn": 2, + "endLine": 147, + "snippet": { + "text": "k8s_database" + }, + "startColumn": 1, + "startLine": 102 + } + } + } + ], + "ruleId": "aws_rds_iam_auth" + }, + { + "message": { + "text": "AWS Load balancer using insecure communications" + }, + "level": "error", + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "file:///Users/omerz/Documents/analyzers_test/iac/aws/private_link/module.tf" + }, + "region": { + "endColumn": 2, + "endLine": 68, + "snippet": { + "text": "pl_lb_listener" + }, + "startColumn": 1, + "startLine": 53 + } + } + } + ], + "ruleId": "aws_alb_https_only" + }, + { + "message": { + "text": "AWS Load balancer using insecure communications" + }, + "level": "error", + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "file:///Users/omerz/Documents/analyzers_test/iac/aws/private_link/module.tf" + }, + "region": { + "endColumn": 2, + "endLine": 115, + "snippet": { + "text": "pl_lb_listener_plain" + }, + "startColumn": 1, + "startLine": 100 + } + } + } + ], + "ruleId": "aws_alb_https_only" + }, + { + "message": { + "text": "storage_encrypted=false was detected" + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "file:///Users/omerz/Documents/analyzers_test/iac/aws/rds/module.tf" + }, + "region": { + "endColumn": 2, + "endLine": 152, + "snippet": { + "text": "k8s_database" + }, + "startColumn": 1, + "startLine": 103 + } + } + } + ], + "ruleId": "aws_rds_encrypt_instance" + }, + { + "message": { + "text": "iam_database_authentication_enabled=False was detected" + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "file:///Users/omerz/Documents/analyzers_test/iac/aws/rds/module.tf" + }, + "region": { + "endColumn": 2, + "endLine": 152, + "snippet": { + "text": "k8s_database" + }, + "startColumn": 1, + "startLine": 103 + } + } + } + ], + "ruleId": "aws_rds_iam_auth" + }, + { + "message": { + "text": "block_public_acls=false was detected" + }, + "level": "error", + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "file:///Users/omerz/Documents/analyzers_test/iac/aws/s3/module.tf" + }, + "region": { + "endColumn": 2, + "endLine": 28, + "snippet": { + "text": "default" + }, + "startColumn": 1, + "startLine": 8 + } + } + } + ], + "ruleId": "aws_s3_block_public_acl" + }, + { + "message": { + "text": "block_public_acls=false was detected" + }, + "level": "error", + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "file:///Users/omerz/Documents/analyzers_test/iac/aws/s3/module.tf" + }, + "region": { + "endColumn": 2, + "endLine": 28, + "snippet": { + "text": "default" + }, + "startColumn": 1, + "startLine": 8 + } + } + } + ], + "ruleId": "aws_s3_block_public_policy" + }, + { + "message": { + "text": "kms_key_id='' was detected" + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "file:///Users/omerz/Documents/analyzers_test/iac/aws/vpc/module2/examples/vpc-flow-logs/cloud-watch-logs.tf" + }, + "region": { + "endColumn": 2, + "endLine": 53, + "snippet": { + "text": "flow_log" + }, + "startColumn": 1, + "startLine": 51 + } + } + } + ], + "ruleId": "aws_cloudwatch_log_encrypt" + } + ] + } + ], + "version": "2.1.0", + "$schema": "https://docs.oasis-open.org/sarif/sarif/v2.1.0/cos02/schemas/sarif-schema-2.1.0.json" +} \ No newline at end of file diff --git a/xray/commands/testdata/secrets-scan/contain-secrets.sarif b/xray/commands/testdata/secrets-scan/contain-secrets.sarif index a678bab91..af3f378eb 100644 --- a/xray/commands/testdata/secrets-scan/contain-secrets.sarif +++ b/xray/commands/testdata/secrets-scan/contain-secrets.sarif @@ -24,7 +24,7 @@ "sec_config_example.yaml" ], "workingDirectory": { - "uri": "file:///Users/ort/Desktop/secrets_scanner" + "uri": "secrets_scanner" } } ], @@ -37,7 +37,7 @@ { "physicalLocation": { "artifactLocation": { - "uri": "file:///Users/ort/Desktop/secrets_scanner/tests/req.nodejs.hardcoded-secrets/applicable_base64.js" + "uri": "file://secrets_scanner/tests/req.nodejs.hardcoded-secrets/applicable_base64.js" }, "region": { "endColumn": 118, @@ -61,7 +61,7 @@ { "physicalLocation": { "artifactLocation": { - "uri": "file:///Users/ort/Desktop/secrets_scanner/tests/req.nodejs.hardcoded-secrets/applicable_base64.js.approval.json" + "uri": "file://secrets_scanner/tests/req.nodejs.hardcoded-secrets/applicable_base64.js.approval.json" }, "region": { "endColumn": 195, @@ -85,7 +85,7 @@ { "physicalLocation": { "artifactLocation": { - "uri": "file:///Users/ort/Desktop/secrets_scanner/tests/req.nodejs.hardcoded-secrets/applicable_hex.js" + "uri": "file://secrets_scanner/tests/req.nodejs.hardcoded-secrets/applicable_hex.js" }, "region": { "endColumn": 138, @@ -109,7 +109,7 @@ { "physicalLocation": { "artifactLocation": { - "uri": "file:///Users/ort/Desktop/secrets_scanner/tests/req.nodejs.hardcoded-secrets/applicable_hex.js.approval.json" + "uri": "file://secrets_scanner/tests/req.nodejs.hardcoded-secrets/applicable_hex.js.approval.json" }, "region": { "endColumn": 215, @@ -133,7 +133,7 @@ { "physicalLocation": { "artifactLocation": { - "uri": "file:///Users/ort/Desktop/secrets_scanner/tests/req.python.hardcoded-secrets/applicable_base64.py" + "uri": "file://secrets_scanner/tests/req.python.hardcoded-secrets/applicable_base64.py" }, "region": { "endColumn": 112, @@ -157,7 +157,7 @@ { "physicalLocation": { "artifactLocation": { - "uri": "file:///Users/ort/Desktop/secrets_scanner/tests/req.python.hardcoded-secrets/applicable_base64.py.approval.json" + "uri": "file://secrets_scanner/tests/req.python.hardcoded-secrets/applicable_base64.py.approval.json" }, "region": { "endColumn": 191, @@ -181,7 +181,7 @@ { "physicalLocation": { "artifactLocation": { - "uri": "file:///Users/ort/Desktop/secrets_scanner/tests/req.python.hardcoded-secrets/applicable_hex.py" + "uri": "file://secrets_scanner/tests/req.python.hardcoded-secrets/applicable_hex.py" }, "region": { "endColumn": 132, @@ -205,7 +205,7 @@ { "physicalLocation": { "artifactLocation": { - "uri": "file:///Users/ort/Desktop/secrets_scanner/tests/req.python.hardcoded-secrets/applicable_hex.py.approval.json" + "uri": "file://secrets_scanner/tests/req.python.hardcoded-secrets/applicable_hex.py.approval.json" }, "region": { "endColumn": 211, @@ -219,7 +219,12 @@ } } ], - "ruleId": "entropy" + "ruleId": "entropy", + "suppressions": [ + { + "kind": "inSource" + } + ] } ] } diff --git a/xray/commands/testdata/secrets-scan/no-secrets.sarif b/xray/commands/testdata/secrets-scan/no-secrets.sarif index 4b3186cd1..8b0ae50d4 100644 --- a/xray/commands/testdata/secrets-scan/no-secrets.sarif +++ b/xray/commands/testdata/secrets-scan/no-secrets.sarif @@ -17,7 +17,7 @@ "sec_config_example.yaml" ], "workingDirectory": { - "uri": "file:///Users/ort/Desktop/am_versions_for_leap" + "uri": "file:///am_versions_for_leap" } } ], diff --git a/xray/commands/utils/utils.go b/xray/commands/utils/utils.go index fdaed4626..2d5f5e92d 100644 --- a/xray/commands/utils/utils.go +++ b/xray/commands/utils/utils.go @@ -1,6 +1,7 @@ package utils import ( + "fmt" "github.com/jfrog/jfrog-cli-core/v2/utils/config" "github.com/jfrog/jfrog-cli-core/v2/utils/coreutils" "github.com/jfrog/jfrog-cli-core/v2/xray/utils" @@ -12,6 +13,7 @@ import ( "golang.org/x/text/cases" "golang.org/x/text/language" "os" + "strings" ) const ( @@ -185,7 +187,7 @@ func CreateXrayServiceManagerAndGetVersion(serviceDetails *config.ServerDetails) return xrayManager, xrayVersion, nil } -func DetectedTechnologies() (technologies []string, err error) { +func DetectedTechnologies() (technologies []string) { wd, err := os.Getwd() if errorutils.CheckError(err) != nil { return @@ -194,12 +196,12 @@ func DetectedTechnologies() (technologies []string, err error) { if err != nil { return } - detectedTechnologiesString := coreutils.DetectedTechnologiesToString(detectedTechnologies) - if detectedTechnologiesString == "" { - return nil, errorutils.CheckErrorf("could not determine the package manager / build tool used by this project.") + if len(detectedTechnologies) == 0 { + return } - log.Info("Detected: " + detectedTechnologiesString) - return coreutils.DetectedTechnologiesToSlice(detectedTechnologies), nil + techStringsList := coreutils.DetectedTechnologiesToSlice(detectedTechnologies) + log.Info(fmt.Sprintf("Detected: %s.", strings.Join(techStringsList, ","))) + return techStringsList } func DetectNumOfThreads(threadsCount int) (int, error) { diff --git a/xray/commands/utils/utils_test.go b/xray/commands/utils/utils_test.go index de26cb7ba..56d5bf0cc 100644 --- a/xray/commands/utils/utils_test.go +++ b/xray/commands/utils/utils_test.go @@ -88,12 +88,12 @@ func TestFilterResultIfNeeded(t *testing.T) { } // Run test cases - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { + for i := range tests { + t.Run(tests[i].name, func(t *testing.T) { // Call the function with the input parameters - actual := filterResultIfNeeded(&tt.scanResult, &tt.params) + actual := filterResultIfNeeded(&tests[i].scanResult, &tests[i].params) // Check that the function returned the expected result - assert.True(t, reflect.DeepEqual(*actual, tt.expected)) + assert.True(t, reflect.DeepEqual(*actual, tests[i].expected)) }) } } diff --git a/xray/formats/conversion.go b/xray/formats/conversion.go index 570b1dd3b..88e2c2612 100644 --- a/xray/formats/conversion.go +++ b/xray/formats/conversion.go @@ -147,7 +147,6 @@ func ConvertToSecretsTableRow(rows []IacSecretsRow) (tableRows []secretsTableRow file: rows[i].File, lineColumn: rows[i].LineColumn, text: rows[i].Text, - secretType: rows[i].Type, }) } return @@ -160,7 +159,6 @@ func ConvertToIacTableRow(rows []IacSecretsRow) (tableRows []iacTableRow) { file: rows[i].File, lineColumn: rows[i].LineColumn, text: rows[i].Text, - iacType: rows[i].Type, }) } return diff --git a/xray/formats/table.go b/xray/formats/table.go index 2fc3c389c..44fe79f20 100644 --- a/xray/formats/table.go +++ b/xray/formats/table.go @@ -128,7 +128,6 @@ type secretsTableRow struct { file string `col-name:"File"` lineColumn string `col-name:"Line:Column"` text string `col-name:"Secret"` - secretType string `col-name:"Type"` } type iacTableRow struct { @@ -136,5 +135,4 @@ type iacTableRow struct { file string `col-name:"File"` lineColumn string `col-name:"Line:Column"` text string `col-name:"Finding"` - iacType string `col-name:"Scanner"` } diff --git a/xray/utils/analyzermanager.go b/xray/utils/analyzermanager.go index e9181084e..e88acb735 100644 --- a/xray/utils/analyzermanager.go +++ b/xray/utils/analyzermanager.go @@ -3,6 +3,13 @@ package utils import ( "errors" "fmt" + "os" + "os/exec" + "path" + "path/filepath" + "strconv" + "strings" + "github.com/jfrog/jfrog-cli-core/v2/utils/config" "github.com/jfrog/jfrog-cli-core/v2/utils/coreutils" "github.com/jfrog/jfrog-client-go/utils/errorutils" @@ -10,23 +17,18 @@ import ( "github.com/jfrog/jfrog-client-go/utils/log" "github.com/jfrog/jfrog-client-go/xray/services" "github.com/owenrumney/go-sarif/v2/sarif" - "os" - "os/exec" - "path/filepath" - "strconv" - "strings" ) var ( - analyzerManagerLogFolder = "" - levelToSeverity = map[string]string{"error": "High", "warning": "Medium", "info": "Low"} + levelToSeverity = map[string]string{"error": "High", "warning": "Medium", "info": "Low"} ) const ( EntitlementsMinVersion = "3.66.5" ApplicabilityFeatureId = "contextual_analysis" AnalyzerManagerZipName = "analyzerManager.zip" - analyzerManagerDownloadPath = "xsc-gen-exe-analyzer-manager-local/v1/[RELEASE]" + analyzerManagerVersion = "1.2.4.1953469" + analyzerManagerDownloadPath = "xsc-gen-exe-analyzer-manager-local/v1" analyzerManagerDirName = "analyzerManager" analyzerManagerExecutableName = "analyzerManager" analyzerManagerLogDirName = "analyzerManagerLogs" @@ -38,6 +40,8 @@ const ( SeverityDefaultValue = "Medium" notEntitledExitCode = 31 unsupportedCommandExitCode = 13 + unsupportedOsExitCode = 55 + ErrFailedScannerRun = "failed to run %s scan. Exit code received: %s" ) const ( @@ -46,6 +50,27 @@ const ( ApplicabilityUndeterminedStringValue = "Undetermined" ) +type ScanType string + +const ( + Applicability ScanType = "Applicability" + Secrets ScanType = "Secrets" + IaC ScanType = "IaC" +) + +func (st ScanType) FormattedError(err error) error { + if err != nil { + return fmt.Errorf(ErrFailedScannerRun, st, err.Error()) + } + return nil +} + +var exitCodeErrorsMap = map[int]string{ + notEntitledExitCode: "got not entitled error from analyzer manager", + unsupportedCommandExitCode: "got unsupported scan command error from analyzer manager", + unsupportedOsExitCode: "got unsupported operating system error from analyzer manager", +} + type IacOrSecretResult struct { Severity string File string @@ -55,48 +80,27 @@ type IacOrSecretResult struct { } type ExtendedScanResults struct { - XrayResults []services.ScanResponse - ApplicabilityScanResults map[string]string - SecretsScanResults []IacOrSecretResult - IacScanResults []IacOrSecretResult - EntitledForJas bool - EligibleForApplicabilityScan bool - EligibleForSecretScan bool - EligibleForIacScan bool + XrayResults []services.ScanResponse + ScannedTechnologies []coreutils.Technology + ApplicabilityScanResults map[string]string + SecretsScanResults []IacOrSecretResult + IacScanResults []IacOrSecretResult + EntitledForJas bool } func (e *ExtendedScanResults) getXrayScanResults() []services.ScanResponse { return e.XrayResults } -// AnalyzerManagerInterface represents the analyzer manager executable file that exists locally as a Jfrog dependency. -// It triggers JAS capabilities by verifying user's entitlements and running the JAS scanners. -// Analyzer manager input: -// - scan command: ca (contextual analysis) / sec (secrets) / iac -// - path to configuration file -// -// Analyzer manager output: -// - sarif file containing the scan results -type AnalyzerManagerInterface interface { - ExistLocally() (bool, error) - Exec(string, string) error -} - type AnalyzerManager struct { - analyzerManagerFullPath string + AnalyzerManagerFullPath string } -func (am *AnalyzerManager) ExistLocally() (bool, error) { - analyzerManagerPath, err := getAnalyzerManagerExecutable() - if err != nil { - return false, err +func (am *AnalyzerManager) Exec(configFile, scanCommand string, serverDetails *config.ServerDetails) (err error) { + if err = SetAnalyzerManagerEnvVariables(serverDetails); err != nil { + return err } - am.analyzerManagerFullPath = analyzerManagerPath - return fileutils.IsFileExists(analyzerManagerPath, false) -} - -func (am *AnalyzerManager) Exec(configFile string, scanCommand string) (err error) { - cmd := exec.Command(am.analyzerManagerFullPath, scanCommand, configFile) + cmd := exec.Command(am.AnalyzerManagerFullPath, scanCommand, configFile) defer func() { if !cmd.ProcessState.Exited() { if killProcessError := cmd.Process.Kill(); errorutils.CheckError(killProcessError) != nil { @@ -104,26 +108,17 @@ func (am *AnalyzerManager) Exec(configFile string, scanCommand string) (err erro } } }() - cmd.Dir = filepath.Dir(am.analyzerManagerFullPath) + cmd.Dir = filepath.Dir(am.AnalyzerManagerFullPath) err = cmd.Run() return errorutils.CheckError(err) } -func CreateAnalyzerManagerLogDir() error { - logDir, err := coreutils.CreateDirInJfrogHome(filepath.Join(coreutils.JfrogLogsDirName, analyzerManagerLogDirName)) - if err != nil { - return err - } - analyzerManagerLogFolder = logDir - return nil -} - func GetAnalyzerManagerDownloadPath() (string, error) { osAndArc, err := coreutils.GetOSAndArc() if err != nil { return "", err } - return fmt.Sprintf("%s/%s/%s", analyzerManagerDownloadPath, osAndArc, AnalyzerManagerZipName), nil + return path.Join(analyzerManagerDownloadPath, analyzerManagerVersion, osAndArc, AnalyzerManagerZipName), nil } func GetAnalyzerManagerDirAbsolutePath() (string, error) { @@ -134,12 +129,20 @@ func GetAnalyzerManagerDirAbsolutePath() (string, error) { return filepath.Join(jfrogDir, analyzerManagerDirName), nil } -func getAnalyzerManagerExecutable() (string, error) { +func GetAnalyzerManagerExecutable() (analyzerManagerPath string, err error) { analyzerManagerDir, err := GetAnalyzerManagerDirAbsolutePath() if err != nil { return "", err } - return filepath.Join(analyzerManagerDir, GetAnalyzerManagerExecutableName()), nil + analyzerManagerPath = filepath.Join(analyzerManagerDir, GetAnalyzerManagerExecutableName()) + var exists bool + if exists, err = fileutils.IsFileExists(analyzerManagerPath, false); err != nil { + return + } + if !exists { + err = errors.New("unable to locate the analyzer manager package. Advanced security scans cannot be performed without this package") + } + return analyzerManagerPath, err } func GetAnalyzerManagerExecutableName() string { @@ -166,34 +169,26 @@ func SetAnalyzerManagerEnvVariables(serverDetails *config.ServerDetails) error { if err := os.Setenv(jfTokenEnvVariable, serverDetails.AccessToken); errorutils.CheckError(err) != nil { return err } - if err := os.Setenv(logDirEnvVariable, analyzerManagerLogFolder); errorutils.CheckError(err) != nil { + analyzerManagerLogFolder, err := coreutils.CreateDirInJfrogHome(filepath.Join(coreutils.JfrogLogsDirName, analyzerManagerLogDirName)) + if err != nil { return err } - return nil -} - -func IsNotEntitledError(err error) bool { - if exitError, ok := err.(*exec.ExitError); ok { - exitCode := exitError.ExitCode() - // User not entitled error - if exitCode == notEntitledExitCode { - log.Debug("got not entitled error from analyzer manager") - return true - } + if err = os.Setenv(logDirEnvVariable, analyzerManagerLogFolder); errorutils.CheckError(err) != nil { + return err } - return false + return nil } -func IsUnsupportedCommandError(err error) bool { - if exitError, ok := err.(*exec.ExitError); ok { +func ParseAnalyzerManagerError(scanner ScanType, err error) error { + var exitError *exec.ExitError + if errors.As(err, &exitError) { exitCode := exitError.ExitCode() - // Analyzer manager doesn't support the requested scan command - if exitCode == unsupportedCommandExitCode { - log.Debug("got unsupported scan command error from analyzer manager") - return true + if exitCodeDescription, exitCodeExists := exitCodeErrorsMap[exitCode]; exitCodeExists { + log.Warn(exitCodeDescription) + return nil } } - return false + return scanner.FormattedError(err) } func RemoveDuplicateValues(stringSlice []string) []string { @@ -243,3 +238,24 @@ func GetResultSeverity(result *sarif.Result) string { } return SeverityDefaultValue } + +// Receives a list of relative path working dirs, returns a list of full paths working dirs +func GetFullPathsWorkingDirs(workingDirs []string) ([]string, error) { + if len(workingDirs) == 0 { + currentDir, err := coreutils.GetWorkingDirectory() + if err != nil { + return nil, err + } + return []string{currentDir}, nil + } + + var fullPathsWorkingDirs []string + for _, wd := range workingDirs { + fullPathWd, err := filepath.Abs(wd) + if err != nil { + return nil, err + } + fullPathsWorkingDirs = append(fullPathsWorkingDirs, fullPathWd) + } + return fullPathsWorkingDirs, nil +} diff --git a/xray/utils/analyzermanager_test.go b/xray/utils/analyzermanager_test.go index ce90b5210..a0fedcca1 100644 --- a/xray/utils/analyzermanager_test.go +++ b/xray/utils/analyzermanager_test.go @@ -1,8 +1,12 @@ package utils import ( + "errors" + "fmt" + "github.com/jfrog/jfrog-cli-core/v2/utils/coreutils" "github.com/owenrumney/go-sarif/v2/sarif" "github.com/stretchr/testify/assert" + "path/filepath" "testing" ) @@ -133,3 +137,86 @@ func TestGetResultSeverity(t *testing.T) { assert.Equal(t, test.expectedSeverity, GetResultSeverity(test.result)) } } + +func TestScanTypeErrorMsg(t *testing.T) { + tests := []struct { + scanner ScanType + err error + wantMsg string + }{ + { + scanner: Applicability, + err: errors.New("an error occurred"), + wantMsg: fmt.Sprintf(ErrFailedScannerRun, Applicability, "an error occurred"), + }, + { + scanner: Applicability, + err: nil, + wantMsg: "", + }, + { + scanner: Secrets, + err: nil, + wantMsg: "", + }, + { + scanner: Secrets, + err: errors.New("an error occurred"), + wantMsg: fmt.Sprintf(ErrFailedScannerRun, Secrets, "an error occurred"), + }, + { + scanner: IaC, + err: nil, + wantMsg: "", + }, + { + scanner: IaC, + err: errors.New("an error occurred"), + wantMsg: fmt.Sprintf(ErrFailedScannerRun, IaC, "an error occurred"), + }, + } + + for _, test := range tests { + t.Run(fmt.Sprintf("Scanner: %s", test.scanner), func(t *testing.T) { + gotMsg := test.scanner.FormattedError(test.err) + if gotMsg == nil { + assert.Nil(t, test.err) + return + } + assert.Equal(t, test.wantMsg, gotMsg.Error()) + }) + } +} + +func TestGetFullPathsWorkingDirs(t *testing.T) { + currentDir, err := coreutils.GetWorkingDirectory() + assert.NoError(t, err) + dir1, err := filepath.Abs("dir1") + assert.NoError(t, err) + dir2, err := filepath.Abs("dir2") + assert.NoError(t, err) + tests := []struct { + name string + workingDirs []string + expectedDirs []string + }{ + { + name: "EmptyWorkingDirs", + workingDirs: []string{}, + expectedDirs: []string{currentDir}, + }, + { + name: "ValidWorkingDirs", + workingDirs: []string{"dir1", "dir2"}, + expectedDirs: []string{dir1, dir2}, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + actualDirs, err := GetFullPathsWorkingDirs(test.workingDirs) + assert.NoError(t, err) + assert.Equal(t, test.expectedDirs, actualDirs, "Incorrect full paths of working directories") + }) + } +} diff --git a/xray/utils/models.go b/xray/utils/models.go index fc83f0439..553758082 100644 --- a/xray/utils/models.go +++ b/xray/utils/models.go @@ -3,15 +3,13 @@ package utils import ( "github.com/jfrog/jfrog-cli-core/v2/utils/config" ioUtils "github.com/jfrog/jfrog-client-go/utils/io" - xrayUtils "github.com/jfrog/jfrog-client-go/xray/services/utils" ) type GraphBasicParams struct { serverDetails *config.ServerDetails outputFormat OutputFormat progress ioUtils.ProgressMgr - fullDependenciesTree []*xrayUtils.GraphNode - releasesRepo string + directDependencies []string excludeTestDependencies bool useWrapper bool insecureTls bool @@ -22,21 +20,12 @@ type GraphBasicParams struct { ignoreConfigFile bool } -func (gbp *GraphBasicParams) ReleasesRepo() string { - return gbp.releasesRepo +func (gbp *GraphBasicParams) DirectDependencies() []string { + return gbp.directDependencies } -func (gbp *GraphBasicParams) SetReleasesRepo(releasesRepo string) *GraphBasicParams { - gbp.releasesRepo = releasesRepo - return gbp -} - -func (gbp *GraphBasicParams) FullDependenciesTree() []*xrayUtils.GraphNode { - return gbp.fullDependenciesTree -} - -func (gbp *GraphBasicParams) SetFullDependenciesTree(fullDependenciesTree []*xrayUtils.GraphNode) *GraphBasicParams { - gbp.fullDependenciesTree = fullDependenciesTree +func (gbp *GraphBasicParams) AppendDirectDependencies(directDependencies []string) *GraphBasicParams { + gbp.directDependencies = append(gbp.directDependencies, directDependencies...) return gbp } diff --git a/xray/utils/resultstable.go b/xray/utils/resultstable.go index 39af61f15..745b61b46 100644 --- a/xray/utils/resultstable.go +++ b/xray/utils/resultstable.go @@ -32,13 +32,13 @@ const ( // In case one (or more) of the violations contains the field FailBuild set to true, CliError with exit code 3 will be returned. // Set printExtended to true to print fields with 'extended' tag. // If the scan argument is set to true, print the scan tables. -func PrintViolationsTable(violations []services.Violation, extendedResults *ExtendedScanResults, multipleRoots, printExtended, scan bool) error { +func PrintViolationsTable(violations []services.Violation, extendedResults *ExtendedScanResults, multipleRoots, printExtended, isBinaryScan bool) error { securityViolationsRows, licenseViolationsRows, operationalRiskViolationsRows, err := prepareViolations(violations, extendedResults, multipleRoots, true, true) if err != nil { return err } // Print tables, if scan is true; print the scan tables. - if scan { + if isBinaryScan { err = coreutils.PrintTable(formats.ConvertToVulnerabilityScanTableRow(securityViolationsRows), "Security Violations", "No security violations were found", printExtended) if err != nil { return err @@ -169,17 +169,22 @@ func prepareViolations(violations []services.Violation, extendedResults *Extende // In case multipleRoots is true, the field Component will show the root of each impact path, otherwise it will show the root's child. // Set printExtended to true to print fields with 'extended' tag. // If the scan argument is set to true, print the scan tables. -func PrintVulnerabilitiesTable(vulnerabilities []services.Vulnerability, extendedResults *ExtendedScanResults, multipleRoots, printExtended, scan bool) error { +func PrintVulnerabilitiesTable(vulnerabilities []services.Vulnerability, extendedResults *ExtendedScanResults, multipleRoots, printExtended, isBinaryScan bool) error { vulnerabilitiesRows, err := prepareVulnerabilities(vulnerabilities, extendedResults, multipleRoots, true, true) if err != nil { return err } - if scan { - return coreutils.PrintTable(formats.ConvertToVulnerabilityScanTableRow(vulnerabilitiesRows), "Vulnerabilities", "✨ No vulnerabilities were found ✨", printExtended) + if isBinaryScan { + return coreutils.PrintTable(formats.ConvertToVulnerabilityScanTableRow(vulnerabilitiesRows), "Vulnerable Components", "✨ No vulnerable components were found ✨", printExtended) } - - return coreutils.PrintTable(formats.ConvertToVulnerabilityTableRow(vulnerabilitiesRows), "Vulnerabilities", "✨ No vulnerabilities were found ✨", printExtended) + var emptyTableMessage string + if len(extendedResults.ScannedTechnologies) > 0 { + emptyTableMessage = "✨ No vulnerable dependencies were found ✨" + } else { + emptyTableMessage = coreutils.PrintYellow("🔧 Couldn't determine a package manager or build tool used by this project 🔧") + } + return coreutils.PrintTable(formats.ConvertToVulnerabilityTableRow(vulnerabilitiesRows), "Vulnerable Dependencies", emptyTableMessage, printExtended) } // Prepare vulnerabilities for all non-table formats (without style or emoji) @@ -242,12 +247,12 @@ func sortVulnerabilityOrViolationRows(rows []formats.VulnerabilityOrViolationRow // In case multipleRoots is true, the field Component will show the root of each impact path, otherwise it will show the root's child. // Set printExtended to true to print fields with 'extended' tag. // If the scan argument is set to true, print the scan tables. -func PrintLicensesTable(licenses []services.License, printExtended, scan bool) error { +func PrintLicensesTable(licenses []services.License, printExtended, isBinaryScan bool) error { licensesRows, err := PrepareLicenses(licenses) if err != nil { return err } - if scan { + if isBinaryScan { return coreutils.PrintTable(formats.ConvertToLicenseScanTableRow(licensesRows), "Licenses", "No licenses were found", printExtended) } return coreutils.PrintTable(formats.ConvertToLicenseTableRow(licensesRows), "Licenses", "No licenses were found", printExtended) @@ -308,7 +313,8 @@ func prepareSecrets(secrets []IacOrSecretResult, isTable bool) []formats.IacSecr func PrintSecretsTable(secrets []IacOrSecretResult, entitledForSecretsScan bool) error { if entitledForSecretsScan { secretsRows := prepareSecrets(secrets, true) - return coreutils.PrintTable(formats.ConvertToSecretsTableRow(secretsRows), "Secrets", + log.Output() + return coreutils.PrintTable(formats.ConvertToSecretsTableRow(secretsRows), "Secret Detection", "✨ No secrets were found ✨", false) } return nil @@ -345,6 +351,7 @@ func prepareIacs(iacs []IacOrSecretResult, isTable bool) []formats.IacSecretsRow func PrintIacTable(iacs []IacOrSecretResult, entitledForIacScan bool) error { if entitledForIacScan { iacRows := prepareIacs(iacs, true) + log.Output() return coreutils.PrintTable(formats.ConvertToIacTableRow(iacRows), "Infrastructure as Code Vulnerabilities", "✨ No Infrastructure as Code vulnerabilities were found ✨", false) } @@ -531,22 +538,22 @@ var Severities = map[string]map[string]*Severity{ "Critical": { ApplicableStringValue: {emoji: "💀", title: "Critical", numValue: 12, style: color.New(color.BgLightRed, color.LightWhite)}, ApplicabilityUndeterminedStringValue: {emoji: "💀", title: "Critical", numValue: 11, style: color.New(color.BgLightRed, color.LightWhite)}, - NotApplicableStringValue: {emoji: "👌", title: "Critical", numValue: 10}, + NotApplicableStringValue: {emoji: "💀", title: "Critical", numValue: 4, style: color.New(color.Gray)}, }, "High": { - ApplicableStringValue: {emoji: "🔥", title: "High", numValue: 9, style: color.New(color.Red)}, - ApplicabilityUndeterminedStringValue: {emoji: "🔥", title: "High", numValue: 8, style: color.New(color.Red)}, - NotApplicableStringValue: {emoji: "👌", title: "High", numValue: 7}, + ApplicableStringValue: {emoji: "🔥", title: "High", numValue: 10, style: color.New(color.Red)}, + ApplicabilityUndeterminedStringValue: {emoji: "🔥", title: "High", numValue: 9, style: color.New(color.Red)}, + NotApplicableStringValue: {emoji: "🔥", title: "High", numValue: 3, style: color.New(color.Gray)}, }, "Medium": { - ApplicableStringValue: {emoji: "🎃", title: "Medium", numValue: 6, style: color.New(color.Yellow)}, - ApplicabilityUndeterminedStringValue: {emoji: "🎃", title: "Medium", numValue: 5, style: color.New(color.Yellow)}, - NotApplicableStringValue: {emoji: "👌", title: "Medium", numValue: 4}, + ApplicableStringValue: {emoji: "🎃", title: "Medium", numValue: 8, style: color.New(color.Yellow)}, + ApplicabilityUndeterminedStringValue: {emoji: "🎃", title: "Medium", numValue: 7, style: color.New(color.Yellow)}, + NotApplicableStringValue: {emoji: "🎃", title: "Medium", numValue: 2, style: color.New(color.Gray)}, }, "Low": { - ApplicableStringValue: {emoji: "👻", title: "Low", numValue: 3}, - ApplicabilityUndeterminedStringValue: {emoji: "👻", title: "Low", numValue: 2}, - NotApplicableStringValue: {emoji: "👌", title: "Low", numValue: 1}, + ApplicableStringValue: {emoji: "👻", title: "Low", numValue: 6}, + ApplicabilityUndeterminedStringValue: {emoji: "👻", title: "Low", numValue: 5}, + NotApplicableStringValue: {emoji: "👻", title: "Low", numValue: 1, style: color.New(color.Gray)}, }, } @@ -626,13 +633,13 @@ func getOperationalRiskViolationReadableData(violation services.Violation) *oper } // simplifyVulnerabilities returns a new slice of services.Vulnerability that contains only the unique vulnerabilities from the input slice -// The uniqueness of the vulnerabilities is determined by the getUniqueKey function +// The uniqueness of the vulnerabilities is determined by the GetUniqueKey function func simplifyVulnerabilities(scanVulnerabilities []services.Vulnerability, multipleRoots bool) []services.Vulnerability { var uniqueVulnerabilities = make(map[string]*services.Vulnerability) for _, vulnerability := range scanVulnerabilities { for vulnerableComponentId := range vulnerability.Components { vulnerableDependency, vulnerableVersion, _ := SplitComponentId(vulnerableComponentId) - packageKey := getUniqueKey(vulnerableDependency, vulnerableVersion, vulnerability.Cves, len(vulnerability.Components[vulnerableComponentId].FixedVersions) > 0) + packageKey := GetUniqueKey(vulnerableDependency, vulnerableVersion, vulnerability.IssueId, len(vulnerability.Components[vulnerableComponentId].FixedVersions) > 0) if uniqueVulnerability, exist := uniqueVulnerabilities[packageKey]; exist { fixedVersions := appendUniqueFixVersions(uniqueVulnerability.Components[vulnerableComponentId].FixedVersions, vulnerability.Components[vulnerableComponentId].FixedVersions...) impactPaths := appendUniqueImpactPaths(uniqueVulnerability.Components[vulnerableComponentId].ImpactPaths, vulnerability.Components[vulnerableComponentId].ImpactPaths, multipleRoots) @@ -662,13 +669,13 @@ func simplifyVulnerabilities(scanVulnerabilities []services.Vulnerability, multi } // simplifyViolations returns a new slice of services.Violations that contains only the unique violations from the input slice -// The uniqueness of the violations is determined by the getUniqueKey function +// The uniqueness of the violations is determined by the GetUniqueKey function func simplifyViolations(scanViolations []services.Violation, multipleRoots bool) []services.Violation { var uniqueViolations = make(map[string]*services.Violation) for _, violation := range scanViolations { for vulnerableComponentId := range violation.Components { vulnerableDependency, vulnerableVersion, _ := SplitComponentId(vulnerableComponentId) - packageKey := getUniqueKey(vulnerableDependency, vulnerableVersion, violation.Cves, len(violation.Components[vulnerableComponentId].FixedVersions) > 0) + packageKey := GetUniqueKey(vulnerableDependency, vulnerableVersion, violation.IssueId, len(violation.Components[vulnerableComponentId].FixedVersions) > 0) if uniqueVulnerability, exist := uniqueViolations[packageKey]; exist { fixedVersions := appendUniqueFixVersions(uniqueVulnerability.Components[vulnerableComponentId].FixedVersions, violation.Components[vulnerableComponentId].FixedVersions...) impactPaths := appendUniqueImpactPaths(uniqueVulnerability.Components[vulnerableComponentId].ImpactPaths, violation.Components[vulnerableComponentId].ImpactPaths, multipleRoots) @@ -792,20 +799,16 @@ func appendUniqueFixVersions(targetFixVersions []string, sourceFixVersions ...st return result } -// getUniqueKey returns a unique string key of format "vulnerableDependency:vulnerableVersion:cveId:fixVersionExist" -func getUniqueKey(vulnerableDependency, vulnerableVersion string, cves []services.Cve, fixVersionExist bool) string { - var cveId string - if len(cves) != 0 { - cveId = cves[0].Id - } - return fmt.Sprintf("%s:%s:%s:%t", vulnerableDependency, vulnerableVersion, cveId, fixVersionExist) +// GetUniqueKey returns a unique string key of format "vulnerableDependency:vulnerableVersion:xrayID:fixVersionExist" +func GetUniqueKey(vulnerableDependency, vulnerableVersion, xrayID string, fixVersionExist bool) string { + return strings.Join([]string{vulnerableDependency, vulnerableVersion, xrayID, strconv.FormatBool(fixVersionExist)}, ":") } // If at least one cve is applicable - final value is applicable // Else if at least one cve is undetermined - final value is undetermined // Else (case when all cves aren't applicable) -> final value is not applicable func getApplicableCveValue(extendedResults *ExtendedScanResults, xrayCves []formats.CveRow) string { - if !extendedResults.EntitledForJas { + if !extendedResults.EntitledForJas || len(extendedResults.ApplicabilityScanResults) == 0 { return "" } if len(xrayCves) == 0 { @@ -830,9 +833,12 @@ func getApplicableCveValue(extendedResults *ExtendedScanResults, xrayCves []form } func printApplicableCveValue(applicableValue string, isTable bool) string { - if applicableValue == ApplicableStringValue && isTable && (log.IsStdOutTerminal() && log.IsColorsSupported() || - os.Getenv("GITLAB_CI") != "") { - return color.New(color.Red).Render(ApplicableStringValue) + if isTable && (log.IsStdOutTerminal() && log.IsColorsSupported() || os.Getenv("GITLAB_CI") != "") { + if applicableValue == ApplicableStringValue { + return color.New(color.Red).Render(applicableValue) + } else if applicableValue == NotApplicableStringValue { + return color.New(color.Green).Render(applicableValue) + } } return applicableValue } diff --git a/xray/utils/resultstable_test.go b/xray/utils/resultstable_test.go index ec260c1d6..0f102862d 100644 --- a/xray/utils/resultstable_test.go +++ b/xray/utils/resultstable_test.go @@ -172,17 +172,12 @@ func TestAppendUniqueFixVersions(t *testing.T) { func TestGetUniqueKey(t *testing.T) { vulnerableDependency := "test-dependency" vulnerableVersion := "1.0" - cves := []services.Cve{} - expectedKey := "test-dependency:1.0::true" - key := getUniqueKey(vulnerableDependency, vulnerableVersion, cves, true) + expectedKey := "test-dependency:1.0:XRAY-12234:true" + key := GetUniqueKey(vulnerableDependency, vulnerableVersion, "XRAY-12234", true) assert.Equal(t, expectedKey, key) - cves = []services.Cve{ - {Id: "CVE-1"}, - {Id: "CVE-2"}, - } - expectedKey = "test-dependency:1.0:CVE-1:false" - key = getUniqueKey(vulnerableDependency, vulnerableVersion, cves, false) + expectedKey = "test-dependency:1.0:XRAY-12143:false" + key = GetUniqueKey(vulnerableDependency, vulnerableVersion, "XRAY-12143", false) assert.Equal(t, expectedKey, key) } diff --git a/xray/utils/resultwriter.go b/xray/utils/resultwriter.go index fb775bd33..33f5ed3b2 100644 --- a/xray/utils/resultwriter.go +++ b/xray/utils/resultwriter.go @@ -60,10 +60,10 @@ type sarifProperties struct { // printExtended -If true, show extended results. // scan - If true, use an output layout suitable for `jf scan` or `jf docker scan` results. Otherwise, use a layout compatible for `jf audit` . // messages - Option array of messages, to be displayed if the format is Table -func PrintScanResults(results *ExtendedScanResults, simpleJsonError []formats.SimpleJsonError, format OutputFormat, includeVulnerabilities, includeLicenses, isMultipleRoots, printExtended, scan bool, messages []string) error { +func PrintScanResults(results *ExtendedScanResults, simpleJsonError []formats.SimpleJsonError, format OutputFormat, includeVulnerabilities, includeLicenses, isMultipleRoots, printExtended, isBinaryScan bool, messages []string) error { switch format { case Table: - return printScanResultsTables(results, scan, includeVulnerabilities, includeLicenses, isMultipleRoots, printExtended, messages) + return printScanResultsTables(results, isBinaryScan, includeVulnerabilities, includeLicenses, isMultipleRoots, printExtended, messages) case SimpleJson: jsonTable, err := convertScanToSimpleJson(results, simpleJsonError, isMultipleRoots, includeLicenses, false) if err != nil { @@ -82,8 +82,7 @@ func PrintScanResults(results *ExtendedScanResults, simpleJsonError []formats.Si return nil } -func printScanResultsTables(results *ExtendedScanResults, scan, includeVulnerabilities, includeLicenses, isMultipleRoots, printExtended bool, messages []string) (err error) { - log.Output() +func printScanResultsTables(results *ExtendedScanResults, isBinaryScan, includeVulnerabilities, includeLicenses, isMultipleRoots, printExtended bool, messages []string) (err error) { printMessages(messages) violations, vulnerabilities, licenses := SplitScanResults(results.getXrayScanResults()) if len(results.getXrayScanResults()) > 0 { @@ -93,35 +92,37 @@ func printScanResultsTables(results *ExtendedScanResults, scan, includeVulnerabi } printMessage(coreutils.PrintTitle("The full scan results are available here: ") + coreutils.PrintLink(resultsPath)) } - log.Output() if includeVulnerabilities { - err = PrintVulnerabilitiesTable(vulnerabilities, results, isMultipleRoots, printExtended, scan) + err = PrintVulnerabilitiesTable(vulnerabilities, results, isMultipleRoots, printExtended, isBinaryScan) } else { - err = PrintViolationsTable(violations, results, isMultipleRoots, printExtended, scan) + err = PrintViolationsTable(violations, results, isMultipleRoots, printExtended, isBinaryScan) } if err != nil { return } if includeLicenses { - if err = PrintLicensesTable(licenses, printExtended, scan); err != nil { + if err = PrintLicensesTable(licenses, printExtended, isBinaryScan); err != nil { return } } - if err = PrintSecretsTable(results.SecretsScanResults, results.EligibleForSecretScan); err != nil { + if err = PrintSecretsTable(results.SecretsScanResults, results.EntitledForJas); err != nil { return } - return PrintIacTable(results.IacScanResults, results.EligibleForIacScan) + return PrintIacTable(results.IacScanResults, results.EntitledForJas) } func printMessages(messages []string) { + if len(messages) > 0 { + log.Output() + } for _, m := range messages { printMessage(m) } } func printMessage(message string) { - log.Output("💬", message) + log.Output("💬" + message) } func GenerateSarifFileFromScan(extendedResults *ExtendedScanResults, isMultipleRoots, markdownOutput bool, scanningTool, toolURI string) (string, error) { @@ -233,17 +234,15 @@ func getIacOrSecretsProperties(secretOrIac formats.IacSecretsRow, markdownOutput markdownDescription := "" headline := "Infrastructure as Code Vulnerability" secretOrFinding := "Finding" - typeOrScanner := "Scanner" if isSecret { secretOrFinding = "Secret" - typeOrScanner = "Type" headline = "Potential Secret Exposed" } if markdownOutput { - headerRow := fmt.Sprintf("| Severity | File | Line:Column | %s | %s |\n", secretOrFinding, typeOrScanner) - separatorRow := "| :---: | :---: | :---: | :---: | :---: |\n" + headerRow := fmt.Sprintf("| Severity | File | Line:Column | %s |\n", secretOrFinding) + separatorRow := "| :---: | :---: | :---: | :---: |\n" tableHeader := headerRow + separatorRow - markdownDescription = tableHeader + fmt.Sprintf("| %s | %s | %s | %s | %s |", secretOrIac.Severity, file, secretOrIac.LineColumn, secretOrIac.Text, secretOrIac.Type) + markdownDescription = tableHeader + fmt.Sprintf("| %s | %s | %s | %s |", secretOrIac.Severity, file, secretOrIac.LineColumn, secretOrIac.Text) } return sarifProperties{ Headline: headline, diff --git a/xray/utils/resultwriter_test.go b/xray/utils/resultwriter_test.go index 252112826..44deddaba 100644 --- a/xray/utils/resultwriter_test.go +++ b/xray/utils/resultwriter_test.go @@ -24,15 +24,6 @@ func TestGenerateSarifFileFromScan(t *testing.T) { }, Technology: coreutils.Go.ToString(), }, - { - Summary: "A test vulnerability the harms nothing", - Severity: "High", - Components: map[string]services.Component{ - "vulnerability2": {}, - }, - IssueId: "XRAY-1234", - Technology: coreutils.Go.ToString(), - }, }, }, }, @@ -65,20 +56,20 @@ func TestGenerateSarifFileFromScan(t *testing.T) { { name: "Scan results with vulnerabilities, secrets and IaC", extendedResults: extendedResults, - expectedSarifOutput: "{\n \"version\": \"2.1.0\",\n \"$schema\": \"https://json.schemastore.org/sarif-2.1.0-rtm.5.json\",\n \"runs\": [\n {\n \"tool\": {\n \"driver\": {\n \"informationUri\": \"https://example.com/\",\n \"name\": \"JFrog Security\",\n \"rules\": [\n {\n \"id\": \"CVE-2022-1234, CVE-2023-1234\",\n \"shortDescription\": {\n \"text\": \"A test vulnerability the harms nothing\"\n },\n \"help\": {\n \"markdown\": \"\"\n },\n \"properties\": {\n \"security-severity\": \"8.0\"\n }\n },\n {\n \"id\": \"XRAY-1234\",\n \"shortDescription\": {\n \"text\": \"A test vulnerability the harms nothing\"\n },\n \"help\": {\n \"markdown\": \"\"\n },\n \"properties\": {\n \"security-severity\": \"0.0\"\n }\n },\n {\n \"id\": \"found_secrets.js\",\n \"shortDescription\": {\n \"text\": \"AAA************\"\n },\n \"help\": {\n \"markdown\": \"\"\n },\n \"properties\": {\n \"security-severity\": \"6.9\"\n }\n },\n {\n \"id\": \"plan/nonapplicable/req_sw_terraform_azure_compute_no_pass_auth.json\",\n \"shortDescription\": {\n \"text\": \"BBB************\"\n },\n \"help\": {\n \"markdown\": \"\"\n },\n \"properties\": {\n \"security-severity\": \"6.9\"\n }\n }\n ]\n }\n },\n \"results\": [\n {\n \"ruleId\": \"CVE-2022-1234, CVE-2023-1234\",\n \"ruleIndex\": 0,\n \"message\": {\n \"text\": \"[CVE-2022-1234, CVE-2023-1234] vulnerability1 \"\n },\n \"locations\": [\n {\n \"physicalLocation\": {\n \"artifactLocation\": {\n \"uri\": \"go.mod\"\n },\n \"region\": {\n \"startLine\": 0,\n \"startColumn\": 0,\n \"endLine\": 0\n }\n }\n }\n ]\n },\n {\n \"ruleId\": \"XRAY-1234\",\n \"ruleIndex\": 1,\n \"message\": {\n \"text\": \"[XRAY-1234] vulnerability2 \"\n },\n \"locations\": [\n {\n \"physicalLocation\": {\n \"artifactLocation\": {\n \"uri\": \"go.mod\"\n },\n \"region\": {\n \"startLine\": 0,\n \"startColumn\": 0,\n \"endLine\": 0\n }\n }\n }\n ]\n },\n {\n \"ruleId\": \"found_secrets.js\",\n \"ruleIndex\": 2,\n \"message\": {\n \"text\": \"Potential Secret Exposed\"\n },\n \"locations\": [\n {\n \"physicalLocation\": {\n \"artifactLocation\": {\n \"uri\": \"found_secrets.js\"\n },\n \"region\": {\n \"startLine\": 1,\n \"startColumn\": 18,\n \"endLine\": 1\n }\n }\n }\n ]\n },\n {\n \"ruleId\": \"plan/nonapplicable/req_sw_terraform_azure_compute_no_pass_auth.json\",\n \"ruleIndex\": 3,\n \"message\": {\n \"text\": \"Infrastructure as Code Vulnerability\"\n },\n \"locations\": [\n {\n \"physicalLocation\": {\n \"artifactLocation\": {\n \"uri\": \"plan/nonapplicable/req_sw_terraform_azure_compute_no_pass_auth.json\"\n },\n \"region\": {\n \"startLine\": 229,\n \"startColumn\": 38,\n \"endLine\": 229\n }\n }\n }\n ]\n }\n ]\n }\n ]\n}", + expectedSarifOutput: "{\n \"version\": \"2.1.0\",\n \"$schema\": \"https://json.schemastore.org/sarif-2.1.0.json\",\n \"runs\": [\n {\n \"tool\": {\n \"driver\": {\n \"informationUri\": \"https://example.com/\",\n \"name\": \"JFrog Security\",\n \"rules\": [\n {\n \"id\": \"CVE-2022-1234, CVE-2023-1234\",\n \"shortDescription\": {\n \"text\": \"A test vulnerability the harms nothing\"\n },\n \"help\": {\n \"markdown\": \"\"\n },\n \"properties\": {\n \"security-severity\": \"8.0\"\n }\n },\n {\n \"id\": \"found_secrets.js\",\n \"shortDescription\": {\n \"text\": \"AAA************\"\n },\n \"help\": {\n \"markdown\": \"\"\n },\n \"properties\": {\n \"security-severity\": \"6.9\"\n }\n },\n {\n \"id\": \"plan/nonapplicable/req_sw_terraform_azure_compute_no_pass_auth.json\",\n \"shortDescription\": {\n \"text\": \"BBB************\"\n },\n \"help\": {\n \"markdown\": \"\"\n },\n \"properties\": {\n \"security-severity\": \"6.9\"\n }\n }\n ]\n }\n },\n \"results\": [\n {\n \"ruleId\": \"CVE-2022-1234, CVE-2023-1234\",\n \"ruleIndex\": 0,\n \"message\": {\n \"text\": \"[CVE-2022-1234, CVE-2023-1234] vulnerability1 \"\n },\n \"locations\": [\n {\n \"physicalLocation\": {\n \"artifactLocation\": {\n \"uri\": \"go.mod\"\n },\n \"region\": {\n \"startLine\": 0,\n \"startColumn\": 0,\n \"endLine\": 0\n }\n }\n }\n ]\n },\n {\n \"ruleId\": \"found_secrets.js\",\n \"ruleIndex\": 1,\n \"message\": {\n \"text\": \"Potential Secret Exposed\"\n },\n \"locations\": [\n {\n \"physicalLocation\": {\n \"artifactLocation\": {\n \"uri\": \"found_secrets.js\"\n },\n \"region\": {\n \"startLine\": 1,\n \"startColumn\": 18,\n \"endLine\": 1\n }\n }\n }\n ]\n },\n {\n \"ruleId\": \"plan/nonapplicable/req_sw_terraform_azure_compute_no_pass_auth.json\",\n \"ruleIndex\": 2,\n \"message\": {\n \"text\": \"Infrastructure as Code Vulnerability\"\n },\n \"locations\": [\n {\n \"physicalLocation\": {\n \"artifactLocation\": {\n \"uri\": \"plan/nonapplicable/req_sw_terraform_azure_compute_no_pass_auth.json\"\n },\n \"region\": {\n \"startLine\": 229,\n \"startColumn\": 38,\n \"endLine\": 229\n }\n }\n }\n ]\n }\n ]\n }\n ]\n}", }, { name: "Scan results with vulnerabilities, secrets and IaC as Markdown", extendedResults: extendedResults, markdownOutput: true, - expectedSarifOutput: "{\n \"version\": \"2.1.0\",\n \"$schema\": \"https://json.schemastore.org/sarif-2.1.0-rtm.5.json\",\n \"runs\": [\n {\n \"tool\": {\n \"driver\": {\n \"informationUri\": \"https://example.com/\",\n \"name\": \"JFrog Security\",\n \"rules\": [\n {\n \"id\": \"CVE-2022-1234, CVE-2023-1234\",\n \"shortDescription\": {\n \"text\": \"\"\n },\n \"help\": {\n \"markdown\": \"| Severity Score | Direct Dependencies | Fixed Versions |\\n| :---: | :----: | :---: |\\n| 8.0 | | 1.2.3 |\\n\"\n },\n \"properties\": {\n \"security-severity\": \"8.0\"\n }\n },\n {\n \"id\": \"XRAY-1234\",\n \"shortDescription\": {\n \"text\": \"\"\n },\n \"help\": {\n \"markdown\": \"| Severity Score | Direct Dependencies | Fixed Versions |\\n| :---: | :----: | :---: |\\n| 0.0 | | No fix available |\\n\"\n },\n \"properties\": {\n \"security-severity\": \"0.0\"\n }\n },\n {\n \"id\": \"found_secrets.js\",\n \"shortDescription\": {\n \"text\": \"\"\n },\n \"help\": {\n \"markdown\": \"| Severity | File | Line:Column | Secret | Type |\\n| :---: | :---: | :---: | :---: | :---: |\\n| Medium | found_secrets.js | 1:18 | AAA************ | entropy |\"\n },\n \"properties\": {\n \"security-severity\": \"6.9\"\n }\n },\n {\n \"id\": \"plan/nonapplicable/req_sw_terraform_azure_compute_no_pass_auth.json\",\n \"shortDescription\": {\n \"text\": \"\"\n },\n \"help\": {\n \"markdown\": \"| Severity | File | Line:Column | Finding | Scanner |\\n| :---: | :---: | :---: | :---: | :---: |\\n| Medium | plan/nonapplicable/req_sw_terraform_azure_compute_no_pass_auth.json | 229:38 | BBB************ | entropy |\"\n },\n \"properties\": {\n \"security-severity\": \"6.9\"\n }\n }\n ]\n }\n },\n \"results\": [\n {\n \"ruleId\": \"CVE-2022-1234, CVE-2023-1234\",\n \"ruleIndex\": 0,\n \"message\": {\n \"text\": \"[CVE-2022-1234, CVE-2023-1234] vulnerability1 \"\n },\n \"locations\": [\n {\n \"physicalLocation\": {\n \"artifactLocation\": {\n \"uri\": \"go.mod\"\n },\n \"region\": {\n \"startLine\": 0,\n \"startColumn\": 0,\n \"endLine\": 0\n }\n }\n }\n ]\n },\n {\n \"ruleId\": \"XRAY-1234\",\n \"ruleIndex\": 1,\n \"message\": {\n \"text\": \"[XRAY-1234] vulnerability2 \"\n },\n \"locations\": [\n {\n \"physicalLocation\": {\n \"artifactLocation\": {\n \"uri\": \"go.mod\"\n },\n \"region\": {\n \"startLine\": 0,\n \"startColumn\": 0,\n \"endLine\": 0\n }\n }\n }\n ]\n },\n {\n \"ruleId\": \"found_secrets.js\",\n \"ruleIndex\": 2,\n \"message\": {\n \"text\": \"Potential Secret Exposed\"\n },\n \"locations\": [\n {\n \"physicalLocation\": {\n \"artifactLocation\": {\n \"uri\": \"found_secrets.js\"\n },\n \"region\": {\n \"startLine\": 1,\n \"startColumn\": 18,\n \"endLine\": 1\n }\n }\n }\n ]\n },\n {\n \"ruleId\": \"plan/nonapplicable/req_sw_terraform_azure_compute_no_pass_auth.json\",\n \"ruleIndex\": 3,\n \"message\": {\n \"text\": \"Infrastructure as Code Vulnerability\"\n },\n \"locations\": [\n {\n \"physicalLocation\": {\n \"artifactLocation\": {\n \"uri\": \"plan/nonapplicable/req_sw_terraform_azure_compute_no_pass_auth.json\"\n },\n \"region\": {\n \"startLine\": 229,\n \"startColumn\": 38,\n \"endLine\": 229\n }\n }\n }\n ]\n }\n ]\n }\n ]\n}", + expectedSarifOutput: "{\n \"version\": \"2.1.0\",\n \"$schema\": \"https://json.schemastore.org/sarif-2.1.0.json\",\n \"runs\": [\n {\n \"tool\": {\n \"driver\": {\n \"informationUri\": \"https://example.com/\",\n \"name\": \"JFrog Security\",\n \"rules\": [\n {\n \"id\": \"CVE-2022-1234, CVE-2023-1234\",\n \"shortDescription\": {\n \"text\": \"\"\n },\n \"help\": {\n \"markdown\": \"| Severity Score | Direct Dependencies | Fixed Versions |\\n| :---: | :----: | :---: |\\n| 8.0 | | 1.2.3 |\\n\"\n },\n \"properties\": {\n \"security-severity\": \"8.0\"\n }\n },\n {\n \"id\": \"found_secrets.js\",\n \"shortDescription\": {\n \"text\": \"\"\n },\n \"help\": {\n \"markdown\": \"| Severity | File | Line:Column | Secret |\\n| :---: | :---: | :---: | :---: |\\n| Medium | found_secrets.js | 1:18 | AAA************ |\"\n },\n \"properties\": {\n \"security-severity\": \"6.9\"\n }\n },\n {\n \"id\": \"plan/nonapplicable/req_sw_terraform_azure_compute_no_pass_auth.json\",\n \"shortDescription\": {\n \"text\": \"\"\n },\n \"help\": {\n \"markdown\": \"| Severity | File | Line:Column | Finding |\\n| :---: | :---: | :---: | :---: |\\n| Medium | plan/nonapplicable/req_sw_terraform_azure_compute_no_pass_auth.json | 229:38 | BBB************ |\"\n },\n \"properties\": {\n \"security-severity\": \"6.9\"\n }\n }\n ]\n }\n },\n \"results\": [\n {\n \"ruleId\": \"CVE-2022-1234, CVE-2023-1234\",\n \"ruleIndex\": 0,\n \"message\": {\n \"text\": \"[CVE-2022-1234, CVE-2023-1234] vulnerability1 \"\n },\n \"locations\": [\n {\n \"physicalLocation\": {\n \"artifactLocation\": {\n \"uri\": \"go.mod\"\n },\n \"region\": {\n \"startLine\": 0,\n \"startColumn\": 0,\n \"endLine\": 0\n }\n }\n }\n ]\n },\n {\n \"ruleId\": \"found_secrets.js\",\n \"ruleIndex\": 1,\n \"message\": {\n \"text\": \"Potential Secret Exposed\"\n },\n \"locations\": [\n {\n \"physicalLocation\": {\n \"artifactLocation\": {\n \"uri\": \"found_secrets.js\"\n },\n \"region\": {\n \"startLine\": 1,\n \"startColumn\": 18,\n \"endLine\": 1\n }\n }\n }\n ]\n },\n {\n \"ruleId\": \"plan/nonapplicable/req_sw_terraform_azure_compute_no_pass_auth.json\",\n \"ruleIndex\": 2,\n \"message\": {\n \"text\": \"Infrastructure as Code Vulnerability\"\n },\n \"locations\": [\n {\n \"physicalLocation\": {\n \"artifactLocation\": {\n \"uri\": \"plan/nonapplicable/req_sw_terraform_azure_compute_no_pass_auth.json\"\n },\n \"region\": {\n \"startLine\": 229,\n \"startColumn\": 38,\n \"endLine\": 229\n }\n }\n }\n ]\n }\n ]\n }\n ]\n}", }, { name: "Scan results without vulnerabilities", extendedResults: &ExtendedScanResults{}, isMultipleRoots: true, markdownOutput: true, - expectedSarifOutput: "{\n \"version\": \"2.1.0\",\n \"$schema\": \"https://json.schemastore.org/sarif-2.1.0-rtm.5.json\",\n \"runs\": [\n {\n \"tool\": {\n \"driver\": {\n \"informationUri\": \"https://example.com/\",\n \"name\": \"JFrog Security\",\n \"rules\": []\n }\n },\n \"results\": []\n }\n ]\n}", + expectedSarifOutput: "{\n \"version\": \"2.1.0\",\n \"$schema\": \"https://json.schemastore.org/sarif-2.1.0.json\",\n \"runs\": [\n {\n \"tool\": {\n \"driver\": {\n \"informationUri\": \"https://example.com/\",\n \"name\": \"JFrog Security\",\n \"rules\": []\n }\n },\n \"results\": []\n }\n ]\n}", }, } @@ -154,7 +145,7 @@ func TestGetIacOrSecretsProperties(t *testing.T) { Headline: "Potential Secret Exposed", Severity: "6.9", Description: "Potential secret", - MarkdownDescription: fmt.Sprintf("| Severity | File | Line:Column | Secret | Type |\n| :---: | :---: | :---: | :---: | :---: |\n| medium | %s | 5:3 | Potential secret | AWS Secret Manager |", path.Join("path", "to", "file")), + MarkdownDescription: fmt.Sprintf("| Severity | File | Line:Column | Secret |\n| :---: | :---: | :---: | :---: |\n| medium | %s | 5:3 | Potential secret |", path.Join("path", "to", "file")), XrayID: "", File: path.Join("path", "to", "file"), LineColumn: "5:3",