From 4c6c438a8b6b1cb1d70a44a0d9a893a76e2af424 Mon Sep 17 00:00:00 2001 From: achumachenko-cloudinary Date: Mon, 30 Dec 2024 12:41:35 -0800 Subject: [PATCH 1/8] Fixed outdated dependency with `npm audit fix` --- package-lock.json | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/package-lock.json b/package-lock.json index 999e511..1212d0d 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1619,10 +1619,11 @@ } }, "node_modules/cross-spawn": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", - "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", + "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", "dev": true, + "license": "MIT", "dependencies": { "path-key": "^3.1.0", "shebang-command": "^2.0.0", From f6ad5101463f97e1eb585fa52698f6f0d7c4ddec Mon Sep 17 00:00:00 2001 From: achumachenko-cloudinary Date: Mon, 30 Dec 2024 12:57:16 -0800 Subject: [PATCH 2/8] Documented `SkippedAlreadyExists` operation code in the migration report --- readme/identify-reattempt-failed.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/readme/identify-reattempt-failed.md b/readme/identify-reattempt-failed.md index 00f4eb5..d54b4ea 100644 --- a/readme/identify-reattempt-failed.md +++ b/readme/identify-reattempt-failed.md @@ -17,6 +17,9 @@ The migration script adds the following additional columns to the migration repo + `Overwritten` - if the Cloudinary asset already existed and was overwritten * This may indicate undesired behavior, for example if several assets in the migration input file were assigned the same `public_id` + `Uploaded` - if a new Cloudinary asset was created + + `SkippedAlreadyExists` - indicates that upload operation was not performed because: + * `overwrite` upload API parameter was set to `false` + * AND asset with the `public_id` value specified for the upload already exists - `Cld_Error` : the error details for troubleshooting (if an asset failed to migrate) - `Cld_PublicId`: `public_id` reported back by Cloudinary after uploading an asset + Should be used as "source of truth" when addressing migrated assets via Cloudinary API (as [Cloudinary may have to replace some of the characters](https://support.cloudinary.com/hc/en-us/articles/115001317409--Legal-naming-conventions)) From d201ea5d407c2f8841491c8912ccfdc2fb45b09f Mon Sep 17 00:00:00 2001 From: achumachenko-cloudinary Date: Mon, 30 Dec 2024 13:13:22 -0800 Subject: [PATCH 3/8] Updated sample payload to provide more details --- __input-to-api-payload.js | 28 +++++++++++++++++----------- 1 file changed, 17 insertions(+), 11 deletions(-) diff --git a/__input-to-api-payload.js b/__input-to-api-payload.js index 2bff6ed..8477de1 100644 --- a/__input-to-api-payload.js +++ b/__input-to-api-payload.js @@ -21,25 +21,31 @@ * - options: options for the Cloudinary Upload API call */ exports.input2ApiPayload = function(csvRec) { - // Pass value from 'Url' column with the asset URLs or paths - const file = csvRec['Url']; + // Where to load the asset from + // Any source supported by Cloudinary Upload API: https://cloudinary.com/documentation/upload_parameters#required_file_parameter + const file = csvRec['File_Path_or_URL_ColumnName']; // Optional parameters for the Cloudinary API const options = { - public_id: csvRec['Id'], // Pass value from 'Id' column to be used as public_id - unique_filename: false, // Do not add random suffix to the public_id - resource_type: 'auto', // Let Cloudinary determine the resource type - overwrite: false, // Do not overwrite the asset with same public_id if it already exists - type: 'upload', // Explicitly set delivery type - tags: csvRec['Tags'], // Pass value from 'Tags' column as tags + public_id: csvRec['Asset_Public_Id_ColumnName'], // Pass value to be used as public_id (addressed by column name from the input CSV file) + unique_filename: false, // Do not add random suffix to the public_id + resource_type: 'auto', // Let Cloudinary determine the resource type + overwrite: false, // Do not overwrite the asset with same public_id if it already exists + type: 'upload', // Explicitly set delivery type + tags: csvRec['Asset_Tags_ColumnName'], // Pass value to be set as tags on the uploaded asset (addressed by column name from the input CSV file) + + // Example: Assigning contextual metadata + // See specs at https://cloudinary.com/documentation/contextual_metadata context: { - caption: csvRec['Description'], // Pass value from 'Description' column as contextual metadata + caption: csvRec['Asset_Description_ColumnName'], // Pass value to be set as caption field in contextual metadata (addressed by column name from the input CSV file) }, + + // Example: Assigning structured metadata + // See specs at https://cloudinary.com/documentation/structured_metadata metadata: { - sample_field: csvRec['SampleField'], // Pass value from 'SampleField' column into the structured metadata field - // with external_id of 'sample_field' + sample_field: csvRec['SampleField_Value_ColumnName'], // Pass value to the structured metadata field with external_id of 'sample_field' (addressed by column name from the input CSV file) }, }; From 9c069e5eb2a330bec52f0341edc03fe71ac68765 Mon Sep 17 00:00:00 2001 From: achumachenko-cloudinary Date: Mon, 30 Dec 2024 13:25:46 -0800 Subject: [PATCH 4/8] Explained "estimated total" in the progressbar message --- lib/output/progress.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/output/progress.js b/lib/output/progress.js index 9740694..7f67dff 100644 --- a/lib/output/progress.js +++ b/lib/output/progress.js @@ -61,7 +61,7 @@ async function init_Async(filePath) { fps: 5 }); _statusBar = _multiBar.create(totalCount, 0, init_stats, { - format: 'Attempted: {value} (✅{succeeded} /❌{failed}) out of estimated {total}', + format: 'Attempted: {value} (✅{succeeded} /❌{failed}) out of estimated* {total} (* counted newlines in the input file)', fps: 5 }); } From e12ec2a35532a5bdbe51e09aab89099b03f05a33 Mon Sep 17 00:00:00 2001 From: achumachenko-cloudinary Date: Mon, 30 Dec 2024 13:35:28 -0800 Subject: [PATCH 5/8] Addressed feedback received during peer review --- readme/provision-runtime.md | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/readme/provision-runtime.md b/readme/provision-runtime.md index 0320203..e4e4110 100644 --- a/readme/provision-runtime.md +++ b/readme/provision-runtime.md @@ -2,19 +2,19 @@ For smaller-scale migrations involving thousands or tens of thousands of assets, running the script from your local machine should suffice. -For larger migrations with hundreds of thousands of assets, it's advisable to run the script from a virtual machine (VM). +For larger migrations with hundreds of thousands of assets, it's advisable to run the script from a stable, always-on environment with reliable internet connectivity (for example, an AWS EC2 VM). # Guidelines for Provisioning a Virtual Machine Runtime ## Important Considerations ❗️ -- If you're planning to run the script on a VM via an SSH connection, consider using a terminal multiplexer like `screen` or `tmux`. - - Failing to use a multiplexer may result in the termination of the migration process if your SSH connection closes and your VM session is terminated. +- If you're planning to run the script on a VM via an SSH connection, make sure to use a terminal multiplexer like `screen` or `tmux`. + - Not using a multiplexer will likely result in the migration process stalled or even terminated. ## CPU and Memory ⚙️ - The script is not resource-intensive as most of the heavy lifting is done by Cloudinary's back-end systems. - - For example, an AWS `t2.micro` VM should suffice. + - For example, an AWS `t2.micro` EC2 VM is usually sufficient ## Storage Requirements 💾 @@ -32,7 +32,7 @@ For larger migrations with hundreds of thousands of assets, it's advisable to ru ## Clone the Repository 👯 -- Clone the forked repository onto your VM. +- Clone the forked repository onto your system. ## Install Node.js 🛠️ From e388e893116325f53105239d6a9e8901e8a5ab2b Mon Sep 17 00:00:00 2001 From: achumachenko-cloudinary Date: Mon, 30 Dec 2024 14:20:28 -0800 Subject: [PATCH 6/8] Release process - incremented version --- package-lock.json | 4 ++-- package.json | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/package-lock.json b/package-lock.json index 1212d0d..0b5424a 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "cld-bulk", - "version": "2.1.0", + "version": "2.1.1", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "cld-bulk", - "version": "2.1.0", + "version": "2.1.1", "dependencies": { "async": "^3.2.4", "bunyan": "^1.8.15", diff --git a/package.json b/package.json index 49cec3f..f8da57b 100644 --- a/package.json +++ b/package.json @@ -21,5 +21,5 @@ "scripts": { "test": "node ./test/jest.run-all-tests.js" }, - "version": "2.1.0" + "version": "2.1.1" } From 4ae841c1404396079d48a49fbf87f87c0cb67085 Mon Sep 17 00:00:00 2001 From: achumachenko-cloudinary Date: Mon, 30 Dec 2024 14:21:45 -0800 Subject: [PATCH 7/8] Relase process - updated version in the CLI description --- cld-bulk.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cld-bulk.js b/cld-bulk.js index 26032d5..b85ce33 100755 --- a/cld-bulk.js +++ b/cld-bulk.js @@ -57,7 +57,7 @@ function configureProgram(program) { program .name('cld-bulk') .description('Extensible CLI tool to efficiently translate CSV file records into Cloudinary API operations') - .version('2.1.0'); + .version('2.1.1'); } From 0bdc0a7056bb79315c9c172def22284234e8404d Mon Sep 17 00:00:00 2001 From: achumachenko-cloudinary Date: Mon, 30 Dec 2024 14:26:31 -0800 Subject: [PATCH 8/8] Relase process - updated changelog --- CHANGELOG.md | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 96898cb..2e08b3e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,17 @@ # CHANGELOG +[2.1.1] / 2024-12-30 +==================== + +### Added +- Documentation for the `SkippedAlreadyExists` upload operation status + +### Changed +- Updated outdated dependencies +- Sample payload in the `__input-to-api-payload.js` for better clarity +- Explanation for "estimated number of records" in progress bar +- Runtime setup instructions per peer review feedback + [2.1.0] / 2024-11-12 ====================