From 40c88ccba211ba6cf4ce5afc2b95bf654ddf3d17 Mon Sep 17 00:00:00 2001 From: Tian Feng Date: Thu, 7 Nov 2024 13:44:47 -0800 Subject: [PATCH 01/19] feat: Tag node_modules archive --- internal/hashio/files.go | 26 +++++++ internal/saucecloud/cloud.go | 141 +++++++++++++++++++++++++++-------- 2 files changed, 135 insertions(+), 32 deletions(-) diff --git a/internal/hashio/files.go b/internal/hashio/files.go index 3ae36ee43..d0e25e684 100644 --- a/internal/hashio/files.go +++ b/internal/hashio/files.go @@ -5,6 +5,7 @@ import ( "fmt" "io" "os" + "strings" ) // SHA256 hashes the given file with crypto.SHA256 and returns the checksum as a @@ -20,3 +21,28 @@ func SHA256(filename string) (string, error) { } return fmt.Sprintf("%x", h.Sum(nil)), nil } + +// HashContent computes a SHA-256 hash of the file content combined with extra content, +// and returns the first 16 characters of the hex-encoded hash. +func HashContent(filePath string, extraContent ...string) (string, error) { + file, err := os.Open(filePath) + if err != nil { + return "", fmt.Errorf("failed to open file: %w", err) + } + defer file.Close() + + fileInfo, err := file.Stat() + if err != nil { + return "", fmt.Errorf("failed to get file info: %w", err) + } + + buffer := make([]byte, fileInfo.Size()) + if _, err := file.Read(buffer); err != nil { + return "", fmt.Errorf("failed to read file: %w", err) + } + + combinedContent := string(buffer) + strings.Join(extraContent, "") + + hash := sha256.Sum256([]byte(combinedContent)) + return fmt.Sprintf("%x", hash)[:15], nil +} diff --git a/internal/saucecloud/cloud.go b/internal/saucecloud/cloud.go index 8bcfd271c..7ba65db24 100644 --- a/internal/saucecloud/cloud.go +++ b/internal/saucecloud/cloud.go @@ -409,8 +409,9 @@ func (r *CloudRunner) runJobs(jobOpts chan job.StartOptions, results chan<- resu } // remoteArchiveProject archives the contents of the folder and uploads to remote storage. -// It returns app uri as the uploaded project, otherApps as the collection of runner config and node_modules bundle. -func (r *CloudRunner) remoteArchiveProject(project interface{}, folder string, sauceignoreFile string, dryRun bool) (app string, otherApps []string, err error) { +// Returns the app URI for the uploaded project and additional URIs for the +// runner config, node_modules, and other resources. +func (r *CloudRunner) remoteArchiveProject(project interface{}, projectDir string, sauceignoreFile string, dryRun bool) (app string, otherApps []string, err error) { tempDir, err := os.MkdirTemp(os.TempDir(), "saucectl-app-payload-") if err != nil { return @@ -419,65 +420,140 @@ func (r *CloudRunner) remoteArchiveProject(project interface{}, folder string, s defer os.RemoveAll(tempDir) } - var files []string + files, err := collectFiles(projectDir) + if err != nil { + return "", nil, fmt.Errorf("failed to retrieve project files: %w", err) + } - contents, err := os.ReadDir(folder) + matcher, err := sauceignore.NewMatcherFromFile(sauceignoreFile) if err != nil { return } - for _, file := range contents { - // we never want mode_modules as part of the app payload - if file.Name() == "node_modules" { - continue - } - files = append(files, filepath.Join(folder, file.Name())) + archives, err := r.createArchives(tempDir, projectDir, project, files, matcher) + if err != nil { + return } - archives := make(map[uploadType]string) - - matcher, err := sauceignore.NewMatcherFromFile(sauceignoreFile) + uris, err := r.uploadFiles(archives, dryRun) if err != nil { return } - appZip, err := zip.ArchiveFiles("app", tempDir, folder, files, matcher) + nodeModulesURI, err := r.handleNodeModules(tempDir, projectDir, matcher) if err != nil { return } - archives[projectUpload] = appZip + uris[nodeModulesUpload] = nodeModulesURI + + appURI := uris[projectUpload] + extraURIs := r.sortExtraURIs(uris) + return appURI, extraURIs, nil +} - modZip, err := zip.ArchiveNodeModules(tempDir, folder, matcher, r.NPMDependencies) +// collectFiles retrieves all relevant files in the project directory, excluding "node_modules". +func collectFiles(dir string) ([]string, error) { + var files []string + contents, err := os.ReadDir(dir) if err != nil { - return + return nil, fmt.Errorf("failed to read project directory: %w", err) } - if modZip != "" { - archives[nodeModulesUpload] = modZip + + for _, file := range contents { + if file.Name() != "node_modules" { + files = append(files, filepath.Join(dir, file.Name())) + } } + return files, nil +} - configZip, err := zip.ArchiveRunnerConfig(project, tempDir) +// createArchives creates archives for the project's main files and runner configuration. +func (r *CloudRunner) createArchives(tempDir, projectDir string, project interface{}, files []string, matcher sauceignore.Matcher) (map[uploadType]string, error) { + archives := make(map[uploadType]string) + + projectArchive, err := zip.ArchiveFiles("app", tempDir, projectDir, files, matcher) if err != nil { - return + return nil, fmt.Errorf("failed to archive project files: %w", err) } - archives[runnerConfigUpload] = configZip + archives[projectUpload] = projectArchive - var uris = map[uploadType]string{} - for k, v := range archives { - uri, err := r.uploadArchive(storage.FileInfo{Name: v}, k, dryRun) + configArchive, err := zip.ArchiveRunnerConfig(project, tempDir) + if err != nil { + return nil, fmt.Errorf("failed to archive runner configuration: %w", err) + } + archives[runnerConfigUpload] = configArchive + + return archives, nil +} + +// handleNodeModules archives the node_modules directory and uploads it to remote storage. +// If tagging is enabled and a tagged version of node_modules already exists in storage, +// it returns the URI of the existing archive. +// Otherwise, it creates a new archive and uploads it. +func (r *CloudRunner) handleNodeModules(tempDir, projectDir string, matcher sauceignore.Matcher) (string, error) { + var tag string + var err error + if taggableModules(projectDir, r.NPMDependencies) { + tag, err = hashio.HashContent(filepath.Join(projectDir, "package-lock.json"), r.NPMDependencies...) + if err != nil { + return "", err + } + existingURI := r.findTaggedArchives(tag) + if existingURI != "" { + return existingURI, nil + } + } + + archive, err := zip.ArchiveNodeModules(tempDir, projectDir, matcher, r.NPMDependencies) + if err != nil { + return "", fmt.Errorf("failed to archive node_modules: %w", err) + } + + return r.uploadArchive(storage.FileInfo{Name: archive, Tags: []string{tag}}, nodeModulesUpload, false) +} + +// taggableModules checks if tagging should be applied based on the presence of package-lock.json and dependencies. +func taggableModules(dir string, npmDependencies []string) bool { + return len(npmDependencies) > 0 && fileExists(filepath.Join(dir, "package-lock.json")) +} + +// findTaggedArchives searches storage for a tagged archive with a matching hash. +func (r *CloudRunner) findTaggedArchives(tag string) string { + list, err := r.ProjectUploader.List(storage.ListOptions{Tags: []string{tag}}) + if err != nil || len(list.Items) == 0 { + return "" + } + + return fmt.Sprintf("storage:%s", list.Items[0].ID) +} + +// uploadFiles uploads each archive and returns a map of URIs. +func (r *CloudRunner) uploadFiles(archives map[uploadType]string, dryRun bool) (map[uploadType]string, error) { + uris := make(map[uploadType]string) + for uploadType, path := range archives { + uri, err := r.uploadArchive(storage.FileInfo{Name: path}, uploadType, dryRun) if err != nil { - return "", []string{}, err + return nil, fmt.Errorf("failed to upload %s archive: %w", uploadType, err) } - uris[k] = uri + uris[uploadType] = uri } + return uris, nil +} - app = uris[projectUpload] - for _, item := range []uploadType{runnerConfigUpload, nodeModulesUpload, otherAppsUpload} { - if val, ok := uris[item]; ok { - otherApps = append(otherApps, val) +func (r *CloudRunner) sortExtraURIs(uris map[uploadType]string) []string { + var extraURIs []string + for _, t := range []uploadType{runnerConfigUpload, nodeModulesUpload, otherAppsUpload} { + if uri, exists := uris[t]; exists { + extraURIs = append(extraURIs, uri) } } + return extraURIs +} - return +// fileExists verifies if a file exists at the specified path. +func fileExists(path string) bool { + _, err := os.Stat(path) + return !os.IsNotExist(err) } // remoteArchiveFiles archives the files to a remote storage. @@ -614,6 +690,7 @@ func (r *CloudRunner) uploadArchive(fileInfo storage.FileInfo, pType uploadType, if err != nil { return "", fmt.Errorf("unable to download app from %s: %w", filename, err) } + defer os.RemoveAll(dest) filename = dest From 5208cca1f316c9c357765325774f1d1e8b941341 Mon Sep 17 00:00:00 2001 From: Tian Feng Date: Thu, 7 Nov 2024 14:39:22 -0800 Subject: [PATCH 02/19] revise vars and functions names --- internal/saucecloud/cloud.go | 41 ++++++++++++++++++------------------ internal/storage/storage.go | 1 - 2 files changed, 20 insertions(+), 22 deletions(-) diff --git a/internal/saucecloud/cloud.go b/internal/saucecloud/cloud.go index 7ba65db24..d3b13be59 100644 --- a/internal/saucecloud/cloud.go +++ b/internal/saucecloud/cloud.go @@ -430,6 +430,7 @@ func (r *CloudRunner) remoteArchiveProject(project interface{}, projectDir strin return } + // Create archives for the project's main files and runner configuration. archives, err := r.createArchives(tempDir, projectDir, project, files, matcher) if err != nil { return @@ -440,15 +441,13 @@ func (r *CloudRunner) remoteArchiveProject(project interface{}, projectDir strin return } - nodeModulesURI, err := r.handleNodeModules(tempDir, projectDir, matcher) + nodeModulesURI, err := r.handleNodeModules(tempDir, projectDir, matcher, dryRun) if err != nil { return } uris[nodeModulesUpload] = nodeModulesURI - appURI := uris[projectUpload] - extraURIs := r.sortExtraURIs(uris) - return appURI, extraURIs, nil + return uris[projectUpload], r.refineURIs(uris), nil } // collectFiles retrieves all relevant files in the project directory, excluding "node_modules". @@ -467,7 +466,6 @@ func collectFiles(dir string) ([]string, error) { return files, nil } -// createArchives creates archives for the project's main files and runner configuration. func (r *CloudRunner) createArchives(tempDir, projectDir string, project interface{}, files []string, matcher sauceignore.Matcher) (map[uploadType]string, error) { archives := make(map[uploadType]string) @@ -490,7 +488,7 @@ func (r *CloudRunner) createArchives(tempDir, projectDir string, project interfa // If tagging is enabled and a tagged version of node_modules already exists in storage, // it returns the URI of the existing archive. // Otherwise, it creates a new archive and uploads it. -func (r *CloudRunner) handleNodeModules(tempDir, projectDir string, matcher sauceignore.Matcher) (string, error) { +func (r *CloudRunner) handleNodeModules(tempDir, projectDir string, matcher sauceignore.Matcher, dryRun bool) (string, error) { var tag string var err error if taggableModules(projectDir, r.NPMDependencies) { @@ -498,8 +496,10 @@ func (r *CloudRunner) handleNodeModules(tempDir, projectDir string, matcher sauc if err != nil { return "", err } + log.Info().Msgf("Searching remote node_modules archive by tag %s", tag) existingURI := r.findTaggedArchives(tag) if existingURI != "" { + log.Info().Msgf("Skipping archive and upload node_modules, use %s", existingURI) return existingURI, nil } } @@ -509,17 +509,21 @@ func (r *CloudRunner) handleNodeModules(tempDir, projectDir string, matcher sauc return "", fmt.Errorf("failed to archive node_modules: %w", err) } - return r.uploadArchive(storage.FileInfo{Name: archive, Tags: []string{tag}}, nodeModulesUpload, false) + return r.uploadArchive(storage.FileInfo{Name: archive, Tags: []string{tag}}, nodeModulesUpload, dryRun) } // taggableModules checks if tagging should be applied based on the presence of package-lock.json and dependencies. func taggableModules(dir string, npmDependencies []string) bool { - return len(npmDependencies) > 0 && fileExists(filepath.Join(dir, "package-lock.json")) + if len(npmDependencies) == 0 { + return false + } + _, err := os.Stat(filepath.Join(dir, "package-lock.json")) + return err == nil } -// findTaggedArchives searches storage for a tagged archive with a matching hash. +// findTaggedArchives searches storage for a tagged archive with a matching tag. func (r *CloudRunner) findTaggedArchives(tag string) string { - list, err := r.ProjectUploader.List(storage.ListOptions{Tags: []string{tag}}) + list, err := r.ProjectUploader.List(storage.ListOptions{Tags: []string{tag}, MaxResults: 1}) if err != nil || len(list.Items) == 0 { return "" } @@ -540,20 +544,15 @@ func (r *CloudRunner) uploadFiles(archives map[uploadType]string, dryRun bool) ( return uris, nil } -func (r *CloudRunner) sortExtraURIs(uris map[uploadType]string) []string { - var extraURIs []string +// refineURIs filters out extra URIs and sorts the remaining ones. +func (r *CloudRunner) refineURIs(uriMap map[uploadType]string) []string { + var uris []string for _, t := range []uploadType{runnerConfigUpload, nodeModulesUpload, otherAppsUpload} { - if uri, exists := uris[t]; exists { - extraURIs = append(extraURIs, uri) + if uri, ok := uriMap[t]; ok { + uris = append(uris, uri) } } - return extraURIs -} - -// fileExists verifies if a file exists at the specified path. -func fileExists(path string) bool { - _, err := os.Stat(path) - return !os.IsNotExist(err) + return uris } // remoteArchiveFiles archives the files to a remote storage. diff --git a/internal/storage/storage.go b/internal/storage/storage.go index 62998d406..177f2b545 100644 --- a/internal/storage/storage.go +++ b/internal/storage/storage.go @@ -36,7 +36,6 @@ type Item struct { Name string `json:"name"` Size int `json:"size"` Uploaded time.Time `json:"uploaded"` - Tags []string } // ErrFileNotFound is returned when the requested file does not exist. From 1d781920f391bb26f5da640193bb93fc52c7edbc Mon Sep 17 00:00:00 2001 From: Tian Feng Date: Thu, 7 Nov 2024 14:47:02 -0800 Subject: [PATCH 03/19] tag can be empty --- internal/saucecloud/cloud.go | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/internal/saucecloud/cloud.go b/internal/saucecloud/cloud.go index d3b13be59..a21c4f11d 100644 --- a/internal/saucecloud/cloud.go +++ b/internal/saucecloud/cloud.go @@ -487,15 +487,15 @@ func (r *CloudRunner) createArchives(tempDir, projectDir string, project interfa // handleNodeModules archives the node_modules directory and uploads it to remote storage. // If tagging is enabled and a tagged version of node_modules already exists in storage, // it returns the URI of the existing archive. -// Otherwise, it creates a new archive and uploads it. +// Otherwise, it creates a new archive, uploads it and returns the storage ID. func (r *CloudRunner) handleNodeModules(tempDir, projectDir string, matcher sauceignore.Matcher, dryRun bool) (string, error) { - var tag string - var err error + var tags []string if taggableModules(projectDir, r.NPMDependencies) { - tag, err = hashio.HashContent(filepath.Join(projectDir, "package-lock.json"), r.NPMDependencies...) + tag, err := hashio.HashContent(filepath.Join(projectDir, "package-lock.json"), r.NPMDependencies...) if err != nil { return "", err } + tags = append(tags, tag) log.Info().Msgf("Searching remote node_modules archive by tag %s", tag) existingURI := r.findTaggedArchives(tag) if existingURI != "" { @@ -509,7 +509,7 @@ func (r *CloudRunner) handleNodeModules(tempDir, projectDir string, matcher sauc return "", fmt.Errorf("failed to archive node_modules: %w", err) } - return r.uploadArchive(storage.FileInfo{Name: archive, Tags: []string{tag}}, nodeModulesUpload, dryRun) + return r.uploadArchive(storage.FileInfo{Name: archive, Tags: tags}, nodeModulesUpload, dryRun) } // taggableModules checks if tagging should be applied based on the presence of package-lock.json and dependencies. From bb364298be89a0d1ef24fd6166c06334610172dc Mon Sep 17 00:00:00 2001 From: Tian Feng Date: Thu, 7 Nov 2024 14:53:14 -0800 Subject: [PATCH 04/19] revise comments --- internal/saucecloud/cloud.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/internal/saucecloud/cloud.go b/internal/saucecloud/cloud.go index a21c4f11d..c1e22d335 100644 --- a/internal/saucecloud/cloud.go +++ b/internal/saucecloud/cloud.go @@ -544,7 +544,7 @@ func (r *CloudRunner) uploadFiles(archives map[uploadType]string, dryRun bool) ( return uris, nil } -// refineURIs filters out extra URIs and sorts the remaining ones. +// refineURIs picks extra URIs and sorts them. func (r *CloudRunner) refineURIs(uriMap map[uploadType]string) []string { var uris []string for _, t := range []uploadType{runnerConfigUpload, nodeModulesUpload, otherAppsUpload} { From 276d6ff75ff76eca6e65e1889c7a3608e7bd65c1 Mon Sep 17 00:00:00 2001 From: Tian Feng Date: Thu, 7 Nov 2024 15:21:59 -0800 Subject: [PATCH 05/19] empty node_modules path caused the failure --- internal/saucecloud/cloud.go | 17 +++++++++++++---- 1 file changed, 13 insertions(+), 4 deletions(-) diff --git a/internal/saucecloud/cloud.go b/internal/saucecloud/cloud.go index c1e22d335..c9d893daa 100644 --- a/internal/saucecloud/cloud.go +++ b/internal/saucecloud/cloud.go @@ -441,11 +441,15 @@ func (r *CloudRunner) remoteArchiveProject(project interface{}, projectDir strin return } - nodeModulesURI, err := r.handleNodeModules(tempDir, projectDir, matcher, dryRun) - if err != nil { - return + if len(r.NPMDependencies) > 0 { + nodeModulesURI, err := r.handleNodeModules(tempDir, projectDir, matcher, dryRun) + if err != nil { + return "", nil, err + } + if nodeModulesURI != "" { + uris[nodeModulesUpload] = nodeModulesURI + } } - uris[nodeModulesUpload] = nodeModulesURI return uris[projectUpload], r.refineURIs(uris), nil } @@ -490,12 +494,14 @@ func (r *CloudRunner) createArchives(tempDir, projectDir string, project interfa // Otherwise, it creates a new archive, uploads it and returns the storage ID. func (r *CloudRunner) handleNodeModules(tempDir, projectDir string, matcher sauceignore.Matcher, dryRun bool) (string, error) { var tags []string + if taggableModules(projectDir, r.NPMDependencies) { tag, err := hashio.HashContent(filepath.Join(projectDir, "package-lock.json"), r.NPMDependencies...) if err != nil { return "", err } tags = append(tags, tag) + log.Info().Msgf("Searching remote node_modules archive by tag %s", tag) existingURI := r.findTaggedArchives(tag) if existingURI != "" { @@ -508,6 +514,9 @@ func (r *CloudRunner) handleNodeModules(tempDir, projectDir string, matcher sauc if err != nil { return "", fmt.Errorf("failed to archive node_modules: %w", err) } + if archive == "" { + return "", nil + } return r.uploadArchive(storage.FileInfo{Name: archive, Tags: tags}, nodeModulesUpload, dryRun) } From a88bbe0b3737d295063c7484ef25dcf6bf736a36 Mon Sep 17 00:00:00 2001 From: Tian Feng Date: Thu, 7 Nov 2024 15:27:41 -0800 Subject: [PATCH 06/19] let me try windows --- .github/workflows/test.yml | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 79415982c..86cd50a87 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -150,6 +150,25 @@ jobs: - name: Saucectl RUN - CLI Driven run: ./saucectl run playwright ".*.js" -c "" --name "CLI Driven" --browser chromium --rootDir tests/e2e/playwright/ --playwright.version 1.43.1 --timeout 10m -r us-west-1 + playwright-windows: + needs: build + runs-on: windows-latest + + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Download saucectl Binary + uses: actions/download-artifact@v4 + with: + name: saucectlbin + + - name: Saucectl RUN - Config Driven + run: .\saucectl.exe run -c .sauce/playwright.yml --timeout 10m + + - name: Saucectl RUN - CLI Driven + run: .\saucectl.exe run playwright ".*.js" -c "" --name "CLI Driven" --browser chromium --rootDir tests/e2e/playwright/ --playwright.version 1.43.1 --timeout 10m -r us-west-1 + testcafe: needs: build runs-on: ubuntu-latest From 8be022973e1b89a26dabb373cbac7a5aa1ebcb70 Mon Sep 17 00:00:00 2001 From: Tian Feng Date: Thu, 7 Nov 2024 15:33:03 -0800 Subject: [PATCH 07/19] how about cucumber test on windows --- .github/workflows/test.yml | 26 ++------------------------ 1 file changed, 2 insertions(+), 24 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 86cd50a87..80ec1a524 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -150,25 +150,6 @@ jobs: - name: Saucectl RUN - CLI Driven run: ./saucectl run playwright ".*.js" -c "" --name "CLI Driven" --browser chromium --rootDir tests/e2e/playwright/ --playwright.version 1.43.1 --timeout 10m -r us-west-1 - playwright-windows: - needs: build - runs-on: windows-latest - - steps: - - name: Checkout - uses: actions/checkout@v4 - - - name: Download saucectl Binary - uses: actions/download-artifact@v4 - with: - name: saucectlbin - - - name: Saucectl RUN - Config Driven - run: .\saucectl.exe run -c .sauce/playwright.yml --timeout 10m - - - name: Saucectl RUN - CLI Driven - run: .\saucectl.exe run playwright ".*.js" -c "" --name "CLI Driven" --browser chromium --rootDir tests/e2e/playwright/ --playwright.version 1.43.1 --timeout 10m -r us-west-1 - testcafe: needs: build runs-on: ubuntu-latest @@ -293,7 +274,7 @@ jobs: playwright-cucumberjs: needs: build - runs-on: ubuntu-latest + runs-on: windows-latest steps: - name: Checkout @@ -304,14 +285,11 @@ jobs: with: name: saucectlbin - - name: Set Permissions - run: chmod +x ./saucectl - - name: Saucectl RUN - Config Driven working-directory: ./tests/e2e/playwright-cucumberjs run: | npm ci --production - ../../../saucectl run + ..\..\..\saucectl.exe run imagerunner: needs: build From fe9b189798eaf6f3eb456f1df1139ef2d9183130 Mon Sep 17 00:00:00 2001 From: Tian Feng Date: Thu, 7 Nov 2024 15:51:20 -0800 Subject: [PATCH 08/19] verification done --- .github/workflows/test.yml | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 80ec1a524..79415982c 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -274,7 +274,7 @@ jobs: playwright-cucumberjs: needs: build - runs-on: windows-latest + runs-on: ubuntu-latest steps: - name: Checkout @@ -285,11 +285,14 @@ jobs: with: name: saucectlbin + - name: Set Permissions + run: chmod +x ./saucectl + - name: Saucectl RUN - Config Driven working-directory: ./tests/e2e/playwright-cucumberjs run: | npm ci --production - ..\..\..\saucectl.exe run + ../../../saucectl run imagerunner: needs: build From d4ea20d2aa560f74dd959832f1dd125c5c86bef0 Mon Sep 17 00:00:00 2001 From: Tian Feng Date: Thu, 7 Nov 2024 16:10:45 -0800 Subject: [PATCH 09/19] adapt to context change --- internal/saucecloud/cloud.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/internal/saucecloud/cloud.go b/internal/saucecloud/cloud.go index c9d893daa..8e360e9f5 100644 --- a/internal/saucecloud/cloud.go +++ b/internal/saucecloud/cloud.go @@ -532,7 +532,7 @@ func taggableModules(dir string, npmDependencies []string) bool { // findTaggedArchives searches storage for a tagged archive with a matching tag. func (r *CloudRunner) findTaggedArchives(tag string) string { - list, err := r.ProjectUploader.List(storage.ListOptions{Tags: []string{tag}, MaxResults: 1}) + list, err := r.ProjectUploader.List(context.TODO(), storage.ListOptions{Tags: []string{tag}, MaxResults: 1}) if err != nil || len(list.Items) == 0 { return "" } From 9a1c2b91f3b3cbd462c58bafd0a6a4788c7b77a9 Mon Sep 17 00:00:00 2001 From: Tian Feng Date: Thu, 7 Nov 2024 16:15:17 -0800 Subject: [PATCH 10/19] revise comment --- internal/saucecloud/cloud.go | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/internal/saucecloud/cloud.go b/internal/saucecloud/cloud.go index 8e360e9f5..8ec48e333 100644 --- a/internal/saucecloud/cloud.go +++ b/internal/saucecloud/cloud.go @@ -422,7 +422,7 @@ func (r *CloudRunner) remoteArchiveProject(project interface{}, projectDir strin files, err := collectFiles(projectDir) if err != nil { - return "", nil, fmt.Errorf("failed to retrieve project files: %w", err) + return "", nil, fmt.Errorf("failed to collect project files: %w", err) } matcher, err := sauceignore.NewMatcherFromFile(sauceignoreFile) @@ -698,7 +698,6 @@ func (r *CloudRunner) uploadArchive(fileInfo storage.FileInfo, pType uploadType, if err != nil { return "", fmt.Errorf("unable to download app from %s: %w", filename, err) } - defer os.RemoveAll(dest) filename = dest From c84ed2141558b130367e64e6ad96e0ff37618da6 Mon Sep 17 00:00:00 2001 From: Tian Feng Date: Thu, 7 Nov 2024 16:43:20 -0800 Subject: [PATCH 11/19] update comments --- internal/saucecloud/cloud.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/internal/saucecloud/cloud.go b/internal/saucecloud/cloud.go index 8ec48e333..aaa4ccf3a 100644 --- a/internal/saucecloud/cloud.go +++ b/internal/saucecloud/cloud.go @@ -553,7 +553,7 @@ func (r *CloudRunner) uploadFiles(archives map[uploadType]string, dryRun bool) ( return uris, nil } -// refineURIs picks extra URIs and sorts them. +// refineURIs selects non-main URIs and sorts them. func (r *CloudRunner) refineURIs(uriMap map[uploadType]string) []string { var uris []string for _, t := range []uploadType{runnerConfigUpload, nodeModulesUpload, otherAppsUpload} { From 80b32f777322ffe93ac65a5ab60d7b5e6c8f8ca1 Mon Sep 17 00:00:00 2001 From: Tian Feng Date: Thu, 7 Nov 2024 17:19:08 -0800 Subject: [PATCH 12/19] Update internal/hashio/files.go Co-authored-by: Alex Plischke --- internal/hashio/files.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/internal/hashio/files.go b/internal/hashio/files.go index d0e25e684..4d1a32199 100644 --- a/internal/hashio/files.go +++ b/internal/hashio/files.go @@ -24,7 +24,7 @@ func SHA256(filename string) (string, error) { // HashContent computes a SHA-256 hash of the file content combined with extra content, // and returns the first 16 characters of the hex-encoded hash. -func HashContent(filePath string, extraContent ...string) (string, error) { +func HashContent(filename string, extra ...string) (string, error) { file, err := os.Open(filePath) if err != nil { return "", fmt.Errorf("failed to open file: %w", err) From 828e853f420ccd0dc3bfcdfacf13c41959a3d874 Mon Sep 17 00:00:00 2001 From: Tian Feng Date: Thu, 7 Nov 2024 17:19:30 -0800 Subject: [PATCH 13/19] Update internal/hashio/files.go Co-authored-by: Alex Plischke --- internal/hashio/files.go | 15 +++++---------- 1 file changed, 5 insertions(+), 10 deletions(-) diff --git a/internal/hashio/files.go b/internal/hashio/files.go index 4d1a32199..4dcdacc2f 100644 --- a/internal/hashio/files.go +++ b/internal/hashio/files.go @@ -25,24 +25,19 @@ func SHA256(filename string) (string, error) { // HashContent computes a SHA-256 hash of the file content combined with extra content, // and returns the first 16 characters of the hex-encoded hash. func HashContent(filename string, extra ...string) (string, error) { + h := sha256.New() + file, err := os.Open(filePath) if err != nil { return "", fmt.Errorf("failed to open file: %w", err) } defer file.Close() - fileInfo, err := file.Stat() - if err != nil { - return "", fmt.Errorf("failed to get file info: %w", err) - } - - buffer := make([]byte, fileInfo.Size()) - if _, err := file.Read(buffer); err != nil { + if _, err := io.Copy(h, file); err != nil { return "", fmt.Errorf("failed to read file: %w", err) } - combinedContent := string(buffer) + strings.Join(extraContent, "") + h.Write([]byte(strings.Join(extraContent, ""))) - hash := sha256.Sum256([]byte(combinedContent)) - return fmt.Sprintf("%x", hash)[:15], nil + return fmt.Sprintf("%x", h.Sum(nil))[:16], nil } From 1caa1f7caccd5ab11240f857fb7cfbe5de518889 Mon Sep 17 00:00:00 2001 From: Tian Feng Date: Thu, 7 Nov 2024 17:18:53 -0800 Subject: [PATCH 14/19] fix hash length --- internal/hashio/files.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/internal/hashio/files.go b/internal/hashio/files.go index 4dcdacc2f..f5afaa5c9 100644 --- a/internal/hashio/files.go +++ b/internal/hashio/files.go @@ -27,7 +27,7 @@ func SHA256(filename string) (string, error) { func HashContent(filename string, extra ...string) (string, error) { h := sha256.New() - file, err := os.Open(filePath) + file, err := os.Open(filename) if err != nil { return "", fmt.Errorf("failed to open file: %w", err) } @@ -37,7 +37,7 @@ func HashContent(filename string, extra ...string) (string, error) { return "", fmt.Errorf("failed to read file: %w", err) } - h.Write([]byte(strings.Join(extraContent, ""))) + h.Write([]byte(strings.Join(extra, ""))) return fmt.Sprintf("%x", h.Sum(nil))[:16], nil } From 14b3edc1a3ec37f9fec2377cd6b5c8234ba511ef Mon Sep 17 00:00:00 2001 From: Tian Feng Date: Thu, 7 Nov 2024 17:37:52 -0800 Subject: [PATCH 15/19] revise extra uris --- internal/saucecloud/cloud.go | 26 +++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/internal/saucecloud/cloud.go b/internal/saucecloud/cloud.go index aaa4ccf3a..c8e3cd022 100644 --- a/internal/saucecloud/cloud.go +++ b/internal/saucecloud/cloud.go @@ -451,7 +451,14 @@ func (r *CloudRunner) remoteArchiveProject(project interface{}, projectDir strin } } - return uris[projectUpload], r.refineURIs(uris), nil + var extraURIs []string + for _, t := range []uploadType{runnerConfigUpload, nodeModulesUpload, otherAppsUpload} { + if uri, ok := uris[t]; ok { + extraURIs = append(extraURIs, uri) + } + } + + return uris[projectUpload], extraURIs, nil } // collectFiles retrieves all relevant files in the project directory, excluding "node_modules". @@ -533,7 +540,11 @@ func taggableModules(dir string, npmDependencies []string) bool { // findTaggedArchives searches storage for a tagged archive with a matching tag. func (r *CloudRunner) findTaggedArchives(tag string) string { list, err := r.ProjectUploader.List(context.TODO(), storage.ListOptions{Tags: []string{tag}, MaxResults: 1}) - if err != nil || len(list.Items) == 0 { + if err != nil { + log.Err(err).Msgf("Failed to retrieve file with tag %q from storage", tag) + return "" + } + if len(list.Items) == 0 { return "" } @@ -553,17 +564,6 @@ func (r *CloudRunner) uploadFiles(archives map[uploadType]string, dryRun bool) ( return uris, nil } -// refineURIs selects non-main URIs and sorts them. -func (r *CloudRunner) refineURIs(uriMap map[uploadType]string) []string { - var uris []string - for _, t := range []uploadType{runnerConfigUpload, nodeModulesUpload, otherAppsUpload} { - if uri, ok := uriMap[t]; ok { - uris = append(uris, uri) - } - } - return uris -} - // remoteArchiveFiles archives the files to a remote storage. func (r *CloudRunner) remoteArchiveFiles(project interface{}, files []string, sauceignoreFile string, dryRun bool) (string, error) { tempDir, err := os.MkdirTemp(os.TempDir(), "saucectl-app-payload-") From 3641f9f5ad9acb637ce6be6fccd2f7f06c388315 Mon Sep 17 00:00:00 2001 From: Tian Feng Date: Thu, 7 Nov 2024 20:19:26 -0800 Subject: [PATCH 16/19] refactor archive node modules --- internal/saucecloud/cloud.go | 47 ++++-- internal/saucecloud/cloud_test.go | 255 ++++++++++++----------------- internal/saucecloud/zip/archive.go | 25 +-- 3 files changed, 139 insertions(+), 188 deletions(-) diff --git a/internal/saucecloud/cloud.go b/internal/saucecloud/cloud.go index c8e3cd022..653d0cfe9 100644 --- a/internal/saucecloud/cloud.go +++ b/internal/saucecloud/cloud.go @@ -441,7 +441,11 @@ func (r *CloudRunner) remoteArchiveProject(project interface{}, projectDir strin return } - if len(r.NPMDependencies) > 0 { + need, err := needsNodeModules(projectDir, matcher, r.NPMDependencies) + if err != nil { + return + } + if need { nodeModulesURI, err := r.handleNodeModules(tempDir, projectDir, matcher, dryRun) if err != nil { return "", nil, err @@ -451,14 +455,14 @@ func (r *CloudRunner) remoteArchiveProject(project interface{}, projectDir strin } } - var extraURIs []string + var sortedURIs []string for _, t := range []uploadType{runnerConfigUpload, nodeModulesUpload, otherAppsUpload} { if uri, ok := uris[t]; ok { - extraURIs = append(extraURIs, uri) + sortedURIs = append(sortedURIs, uri) } } - return uris[projectUpload], extraURIs, nil + return uris[projectUpload], sortedURIs, nil } // collectFiles retrieves all relevant files in the project directory, excluding "node_modules". @@ -521,20 +525,34 @@ func (r *CloudRunner) handleNodeModules(tempDir, projectDir string, matcher sauc if err != nil { return "", fmt.Errorf("failed to archive node_modules: %w", err) } - if archive == "" { - return "", nil - } return r.uploadArchive(storage.FileInfo{Name: archive, Tags: tags}, nodeModulesUpload, dryRun) } +func needsNodeModules(projectDir string, matcher sauceignore.Matcher, dependencies []string) (bool, error) { + modDir := filepath.Join(projectDir, "node_modules") + ignored := matcher.Match(strings.Split(modDir, string(os.PathSeparator)), true) + hasMods := fileExists(modDir) + wantMods := len(dependencies) > 0 + + if wantMods && !hasMods { + return false, fmt.Errorf("unable to access 'node_modules' folder, but you have npm dependencies defined in your configuration; ensure that the folder exists and is accessible") + } + + if ignored && wantMods { + return false, fmt.Errorf("'node_modules' is ignored by sauceignore, but you have npm dependencies defined in your project; please remove 'node_modules' from your sauceignore file") + } + + if !hasMods || ignored { + return false, nil + } + + return true, nil +} + // taggableModules checks if tagging should be applied based on the presence of package-lock.json and dependencies. func taggableModules(dir string, npmDependencies []string) bool { - if len(npmDependencies) == 0 { - return false - } - _, err := os.Stat(filepath.Join(dir, "package-lock.json")) - return err == nil + return len(npmDependencies) > 0 && fileExists(filepath.Join(dir, "package-lock.json")) } // findTaggedArchives searches storage for a tagged archive with a matching tag. @@ -564,6 +582,11 @@ func (r *CloudRunner) uploadFiles(archives map[uploadType]string, dryRun bool) ( return uris, nil } +func fileExists(path string) bool { + _, err := os.Stat(path) + return err == nil +} + // remoteArchiveFiles archives the files to a remote storage. func (r *CloudRunner) remoteArchiveFiles(project interface{}, files []string, sauceignoreFile string, dryRun bool) (string, error) { tempDir, err := os.MkdirTemp(os.TempDir(), "saucectl-app-payload-") diff --git a/internal/saucecloud/cloud_test.go b/internal/saucecloud/cloud_test.go index 76bda20ef..8d157d73f 100644 --- a/internal/saucecloud/cloud_test.go +++ b/internal/saucecloud/cloud_test.go @@ -1,18 +1,15 @@ package saucecloud import ( - "fmt" "os" "path/filepath" + "strings" "syscall" "testing" "time" "github.com/saucelabs/saucectl/internal/job" - "github.com/saucelabs/saucectl/internal/saucecloud/zip" - "github.com/saucelabs/saucectl/internal/sauceignore" "github.com/stretchr/testify/assert" - "gotest.tools/v3/fs" ) func TestSignalDetection(t *testing.T) { @@ -56,156 +53,6 @@ func TestRunJobsSkipped(t *testing.T) { assert.True(t, res.skipped) } -func TestCloudRunner_archiveNodeModules(t *testing.T) { - tempDir, err := os.MkdirTemp(os.TempDir(), "saucectl-app-payload-") - if err != nil { - t.Error(err) - } - defer os.RemoveAll(tempDir) - - projectsDir := fs.NewDir(t, "project", - fs.WithDir("has-mods", - fs.WithDir("node_modules", - fs.WithDir("mod1", - fs.WithFile("package.json", "{}"), - ), - ), - ), - fs.WithDir("no-mods"), - fs.WithDir("empty-mods", - fs.WithDir("node_modules"), - ), - ) - defer projectsDir.Remove() - - wd, err := os.Getwd() - if err != nil { - t.Errorf("Failed to get the current working dir: %v", err) - } - - if err := os.Chdir(projectsDir.Path()); err != nil { - t.Errorf("Failed to change the current working dir: %v", err) - } - defer func() { - if err := os.Chdir(wd); err != nil { - t.Errorf("Failed to change the current working dir back to original: %v", err) - } - }() - - type fields struct { - NPMDependencies []string - } - type args struct { - tempDir string - rootDir string - matcher sauceignore.Matcher - } - tests := []struct { - name string - fields fields - args args - want string - wantErr assert.ErrorAssertionFunc - }{ - { - "want to include mods, but node_modules does not exist", - fields{ - NPMDependencies: []string{"mod1"}, - }, - args{ - tempDir: tempDir, - rootDir: "no-mods", - matcher: sauceignore.NewMatcher([]sauceignore.Pattern{}), - }, - "", - func(t assert.TestingT, err error, args ...interface{}) bool { - return assert.EqualError(t, err, "unable to access 'node_modules' folder, but you have npm dependencies defined in your configuration; ensure that the folder exists and is accessible", args) - }, - }, - { - "have and want mods, but mods are ignored", - fields{ - NPMDependencies: []string{"mod1"}, - }, - args{ - tempDir: tempDir, - rootDir: "has-mods", - matcher: sauceignore.NewMatcher([]sauceignore.Pattern{sauceignore.NewPattern("/has-mods/node_modules")}), - }, - "", - func(t assert.TestingT, err error, args ...interface{}) bool { - return assert.EqualError(t, err, "'node_modules' is ignored by sauceignore, but you have npm dependencies defined in your project; please remove 'node_modules' from your sauceignore file", args) - }, - }, - { - "have mods, don't want them and they are ignored", - fields{ - NPMDependencies: []string{}, // no mods selected, because we don't want any - }, - args{ - tempDir: tempDir, - rootDir: "has-mods", - matcher: sauceignore.NewMatcher([]sauceignore.Pattern{sauceignore.NewPattern("/has-mods/node_modules")}), - }, - "", - assert.NoError, - }, - { - "no mods wanted and no mods exist", - fields{ - NPMDependencies: []string{}, - }, - args{ - tempDir: tempDir, - rootDir: "no-mods", - matcher: sauceignore.NewMatcher([]sauceignore.Pattern{}), - }, - "", - assert.NoError, - }, - { - "has and wants mods (happy path)", - fields{ - NPMDependencies: []string{"mod1"}, - }, - args{ - tempDir: tempDir, - rootDir: "has-mods", - matcher: sauceignore.NewMatcher([]sauceignore.Pattern{}), - }, - filepath.Join(tempDir, "node_modules.zip"), - assert.NoError, - }, - { - "want mods, but node_modules folder is empty", - fields{ - NPMDependencies: []string{"mod1"}, - }, - args{ - tempDir: tempDir, - rootDir: "empty-mods", - matcher: sauceignore.NewMatcher([]sauceignore.Pattern{}), - }, - "", - func(t assert.TestingT, err error, args ...interface{}) bool { - return assert.EqualError(t, err, "unable to find required dependencies; please check 'node_modules' folder and make sure the dependencies exist", args) - }, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - r := &CloudRunner{ - NPMDependencies: tt.fields.NPMDependencies, - } - got, err := zip.ArchiveNodeModules(tt.args.tempDir, tt.args.rootDir, tt.args.matcher, r.NPMDependencies) - if !tt.wantErr(t, err, fmt.Sprintf("archiveNodeModules(%v, %v, %v)", tt.args.tempDir, tt.args.rootDir, tt.args.matcher)) { - return - } - assert.Equalf(t, tt.want, got, "archiveNodeModules(%v, %v, %v)", tt.args.tempDir, tt.args.rootDir, tt.args.matcher) - }) - } -} - func Test_arrayContains(t *testing.T) { type args struct { list []string @@ -247,3 +94,103 @@ func Test_arrayContains(t *testing.T) { }) } } + +type MockMatcher struct { + ignoreNodeModules bool +} + +func (m *MockMatcher) Match(path []string, isDir bool) bool { + return m.ignoreNodeModules && strings.Contains(filepath.Join(path...), "node_modules") +} + +func TestCloudRunner_needsNodeModules(t *testing.T) { + tempDir := t.TempDir() + modDir := filepath.Join(tempDir, "node_modules") + dependencies := []string{"chalk", "lodash"} + + createNodeModules := func() { + if err := os.Mkdir(modDir, 0755); err != nil { + t.Fatalf("failed to create node_modules directory: %v", err) + } + } + + tests := []struct { + name string + setup func() + ignoreModules bool + dependencies []string + want bool + expectErr bool + }{ + { + name: "No dependencies, no node_modules", + setup: func() {}, + ignoreModules: false, + dependencies: []string{}, + want: false, + expectErr: false, + }, + { + name: "Dependencies defined, no node_modules", + setup: func() {}, + ignoreModules: false, + dependencies: dependencies, + want: false, + expectErr: true, + }, + { + name: "Dependencies defined, node_modules present", + setup: createNodeModules, + ignoreModules: false, + dependencies: dependencies, + want: true, + expectErr: false, + }, + { + name: "Dependencies defined, node_modules ignored", + setup: createNodeModules, + ignoreModules: true, + dependencies: dependencies, + want: false, + expectErr: true, + }, + { + name: "No dependencies, node_modules ignored", + setup: createNodeModules, + ignoreModules: true, + dependencies: []string{}, + want: false, + expectErr: false, + }, + { + name: "No dependencies, node_modules present", + setup: createNodeModules, + ignoreModules: false, + dependencies: []string{}, + want: true, + expectErr: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + tt.setup() + t.Cleanup(func() { + if err := os.RemoveAll(modDir); err != nil { + t.Fatalf("failed to clean up node_modules directory: %v", err) + } + }) + + matcher := &MockMatcher{ignoreNodeModules: tt.ignoreModules} + got, err := needsNodeModules(tempDir, matcher, tt.dependencies) + + if (err != nil) != tt.expectErr { + t.Fatalf("expected error: %v, got error: %v", tt.expectErr, err) + } + + if got != tt.want { + t.Errorf("expected result: %v, got result: %v", tt.want, got) + } + }) + } +} diff --git a/internal/saucecloud/zip/archive.go b/internal/saucecloud/zip/archive.go index 2a2f27e65..881c14c98 100644 --- a/internal/saucecloud/zip/archive.go +++ b/internal/saucecloud/zip/archive.go @@ -112,32 +112,13 @@ func ArchiveFiles(targetFileName string, targetDir string, sourceDir string, fil // ArchiveNodeModules collects npm dependencies from sourceDir and compresses them into targetDir. func ArchiveNodeModules(targetDir string, sourceDir string, matcher sauceignore.Matcher, dependencies []string) (string, error) { - modDir := filepath.Join(sourceDir, "node_modules") - ignored := matcher.Match(strings.Split(modDir, string(os.PathSeparator)), true) - - _, err := os.Stat(modDir) - hasMods := err == nil - wantMods := len(dependencies) > 0 - - if !hasMods && wantMods { - return "", fmt.Errorf("unable to access 'node_modules' folder, but you have npm dependencies defined in your configuration; ensure that the folder exists and is accessible") - } - - if ignored && wantMods { - return "", fmt.Errorf("'node_modules' is ignored by sauceignore, but you have npm dependencies defined in your project; please remove 'node_modules' from your sauceignore file") - } - - if !hasMods || ignored { - return "", nil - } - - dependencies, err = ExpandDependencies(sourceDir, dependencies) + dependencies, err := ExpandDependencies(sourceDir, dependencies) if err != nil { return "", err } var files []string - + wantMods := len(dependencies) > 0 // does the user only want a subset of dependencies? if wantMods { reqs := node.Requirements(filepath.Join(sourceDir, "node_modules"), dependencies...) @@ -155,7 +136,7 @@ func ArchiveNodeModules(targetDir string, sourceDir string, matcher sauceignore. if !wantMods { log.Warn().Msg("Adding the entire node_modules folder to the payload. " + "This behavior is deprecated, not recommended and will be removed in the future. " + - "Please address your dependency needs via https://docs.saucelabs.com/dev/cli/saucectl/usage/use-cases/#set-npm-packages-in-configyml") + "Please address your dependency needs via https://docs.saucelabs.com/dev/cli/saucectl/usage/use-cases/#including-node-dependencies") files = append(files, filepath.Join(sourceDir, "node_modules")) } From 37f05d95eda70674bf443b1675f2d200560dd684 Mon Sep 17 00:00:00 2001 From: Tian Feng Date: Thu, 7 Nov 2024 20:23:23 -0800 Subject: [PATCH 17/19] save lint --- internal/saucecloud/cloud_test.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/internal/saucecloud/cloud_test.go b/internal/saucecloud/cloud_test.go index 8d157d73f..318e43d10 100644 --- a/internal/saucecloud/cloud_test.go +++ b/internal/saucecloud/cloud_test.go @@ -99,7 +99,7 @@ type MockMatcher struct { ignoreNodeModules bool } -func (m *MockMatcher) Match(path []string, isDir bool) bool { +func (m *MockMatcher) Match(path []string, _ bool) bool { return m.ignoreNodeModules && strings.Contains(filepath.Join(path...), "node_modules") } From 42687aa50153b1443722d3c7421707dc9e9d040d Mon Sep 17 00:00:00 2001 From: Tian Feng Date: Fri, 8 Nov 2024 11:07:44 -0800 Subject: [PATCH 18/19] Update internal/saucecloud/cloud.go Co-authored-by: Alex Plischke --- internal/saucecloud/cloud.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/internal/saucecloud/cloud.go b/internal/saucecloud/cloud.go index 653d0cfe9..5eb31bc45 100644 --- a/internal/saucecloud/cloud.go +++ b/internal/saucecloud/cloud.go @@ -516,7 +516,7 @@ func (r *CloudRunner) handleNodeModules(tempDir, projectDir string, matcher sauc log.Info().Msgf("Searching remote node_modules archive by tag %s", tag) existingURI := r.findTaggedArchives(tag) if existingURI != "" { - log.Info().Msgf("Skipping archive and upload node_modules, use %s", existingURI) + log.Info().Msgf("Skipping upload, using %s", existingURI) return existingURI, nil } } From 079ddeee09fbac11c1874d13745c57a926e2b023 Mon Sep 17 00:00:00 2001 From: Tian Feng Date: Fri, 8 Nov 2024 11:12:02 -0800 Subject: [PATCH 19/19] revise comments for handleNodeModules --- internal/saucecloud/cloud.go | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/internal/saucecloud/cloud.go b/internal/saucecloud/cloud.go index 5eb31bc45..f31d0b9e5 100644 --- a/internal/saucecloud/cloud.go +++ b/internal/saucecloud/cloud.go @@ -500,9 +500,9 @@ func (r *CloudRunner) createArchives(tempDir, projectDir string, project interfa } // handleNodeModules archives the node_modules directory and uploads it to remote storage. -// If tagging is enabled and a tagged version of node_modules already exists in storage, -// it returns the URI of the existing archive. -// Otherwise, it creates a new archive, uploads it and returns the storage ID. +// Checks if npm dependencies are taggable and if a tagged version of node_modules already exists in storage. +// If an existing archive is found, it returns the URI of that archive. +// If not, it creates a new archive, uploads it, and returns the storage ID. func (r *CloudRunner) handleNodeModules(tempDir, projectDir string, matcher sauceignore.Matcher, dryRun bool) (string, error) { var tags []string