diff --git a/functions/billing/index.js b/functions/billing/index.js
index 38d3bdfe5a..a2eaf7b0fc 100644
--- a/functions/billing/index.js
+++ b/functions/billing/index.js
@@ -23,12 +23,13 @@ const PROJECT_NAME = `projects/${PROJECT_ID}`;
// [END functions_billing_limit]
// [START functions_billing_slack]
-const slack = require('slack');
+const {WebClient} = require('@slack/web-api');
// TODO(developer) replace these with your own values
const BOT_ACCESS_TOKEN =
process.env.BOT_ACCESS_TOKEN || 'xxxx-111111111111-abcdefghidklmnopq';
const CHANNEL = process.env.SLACK_CHANNEL || 'general';
+const slackClient = new WebClient(BOT_ACCESS_TOKEN);
exports.notifySlack = async pubsubEvent => {
const pubsubAttrs = pubsubEvent.attributes;
@@ -37,8 +38,7 @@ exports.notifySlack = async pubsubEvent => {
pubsubAttrs
)}, ${pubsubData}`;
- await slack.chat.postMessage({
- token: BOT_ACCESS_TOKEN,
+ await slackClient.chat.postMessage({
channel: CHANNEL,
text: budgetNotificationText,
});
diff --git a/functions/billing/package.json b/functions/billing/package.json
index 0c59d6c3e9..5630dfb8a4 100644
--- a/functions/billing/package.json
+++ b/functions/billing/package.json
@@ -5,7 +5,7 @@
"description": "Examples of integrating Cloud Functions with billing",
"main": "index.js",
"engines": {
- "node": ">=16.0.0"
+ "node": ">=18.0.0"
},
"scripts": {
"compute-test": "c8 mocha -p -j 2 test/periodic.test.js --timeout=600000",
@@ -16,9 +16,9 @@
"dependencies": {
"@google-cloud/billing": "^4.0.0",
"@google-cloud/compute": "^4.0.0",
+ "@slack/web-api": "^7.15.0",
"google-auth-library": "^9.0.0",
- "googleapis": "^143.0.0",
- "slack": "^11.0.1"
+ "googleapis": "^143.0.0"
},
"devDependencies": {
"@google-cloud/functions-framework": "^3.0.0",
diff --git a/functions/imagemagick/README.md b/functions/imagemagick/README.md
index 6f2962f7ff..1c7fa3ff4d 100644
--- a/functions/imagemagick/README.md
+++ b/functions/imagemagick/README.md
@@ -1,8 +1,8 @@
-# Google Cloud Functions ImageMagick sample
+# Google Cloud Functions imagemagick sample
-This sample shows you how to blur an image using ImageMagick in a
+This sample shows you how to blur an image using sharp in a
Storage-triggered Cloud Function.
View the [source code][code].
diff --git a/functions/imagemagick/index.js b/functions/imagemagick/index.js
index ec231bd17c..cf782d2a6f 100644
--- a/functions/imagemagick/index.js
+++ b/functions/imagemagick/index.js
@@ -15,7 +15,7 @@
'use strict';
// [START functions_imagemagick_setup]
-const gm = require('gm').subClass({imageMagick: true});
+const sharp = require('sharp');
const fs = require('fs').promises;
const path = require('path');
const vision = require('@google-cloud/vision');
@@ -32,6 +32,12 @@ const {BLURRED_BUCKET_NAME} = process.env;
exports.blurOffensiveImages = async event => {
// This event represents the triggering Cloud Storage object.
const object = event;
+ if (object.bucket === BLURRED_BUCKET_NAME) {
+ console.log(
+ 'Event triggered by the blurred bucket; skip to avoid recursion'
+ );
+ return;
+ }
const file = storage.bucket(object.bucket).file(object.name);
const filePath = `gs://${object.bucket}/${object.name}`;
@@ -60,9 +66,10 @@ exports.blurOffensiveImages = async event => {
// [END functions_imagemagick_analyze]
// [START functions_imagemagick_blur]
-// Blurs the given file using ImageMagick, and uploads it to another bucket.
+// Blurs the given file using sharp, and uploads it to another bucket.
const blurImage = async (file, blurredBucketName) => {
const tempLocalPath = `/tmp/${path.parse(file.name).base}`;
+ const tempLocalBlurredPath = `/tmp/blurred-${path.parse(file.name).base}`;
// Download file from bucket.
try {
@@ -72,20 +79,14 @@ const blurImage = async (file, blurredBucketName) => {
} catch (err) {
throw new Error(`File download failed: ${err}`);
}
+ try {
+ await sharp(tempLocalPath).blur(16).toFile(tempLocalBlurredPath);
- await new Promise((resolve, reject) => {
- gm(tempLocalPath)
- .blur(0, 16)
- .write(tempLocalPath, (err, stdout) => {
- if (err) {
- console.error('Failed to blur image.', err);
- reject(err);
- } else {
- console.log(`Blurred image: ${file.name}`);
- resolve(stdout);
- }
- });
- });
+ console.log(`Blurred image: ${file.name}`);
+ } catch (err) {
+ console.error('Failed to blur image.', err);
+ throw err;
+ }
// Upload result to a different bucket, to avoid re-triggering this function.
const blurredBucket = storage.bucket(blurredBucketName);
@@ -93,13 +94,16 @@ const blurImage = async (file, blurredBucketName) => {
// Upload the Blurred image back into the bucket.
const gcsPath = `gs://${blurredBucketName}/${file.name}`;
try {
- await blurredBucket.upload(tempLocalPath, {destination: file.name});
+ await blurredBucket.upload(tempLocalBlurredPath, {destination: file.name});
console.log(`Uploaded blurred image to: ${gcsPath}`);
} catch (err) {
throw new Error(`Unable to upload blurred image to ${gcsPath}: ${err}`);
+ } finally {
+ // Delete the temporary file.
+ await Promise.allSettled([
+ fs.unlink(tempLocalPath),
+ fs.unlink(tempLocalBlurredPath),
+ ]);
}
-
- // Delete the temporary file.
- return fs.unlink(tempLocalPath);
};
// [END functions_imagemagick_blur]
diff --git a/functions/imagemagick/package.json b/functions/imagemagick/package.json
index cf6fa1cc8b..5b7a94b5d6 100644
--- a/functions/imagemagick/package.json
+++ b/functions/imagemagick/package.json
@@ -9,7 +9,7 @@
"url": "https://github.com/GoogleCloudPlatform/nodejs-docs-samples.git"
},
"engines": {
- "node": ">=12.0.0"
+ "node": ">=18.17.0"
},
"scripts": {
"test": "c8 mocha -p -j 2 test/*.test.js --timeout=30000 --exit"
@@ -17,7 +17,7 @@
"dependencies": {
"@google-cloud/storage": "^7.0.0",
"@google-cloud/vision": "^4.0.0",
- "gm": "^1.23.1"
+ "sharp": "^0.34.5"
},
"devDependencies": {
"@google-cloud/functions-framework": "^3.0.0",
diff --git a/functions/imagemagick/test/index.test.js b/functions/imagemagick/test/index.test.js
index 55fecb924c..a6894fe651 100644
--- a/functions/imagemagick/test/index.test.js
+++ b/functions/imagemagick/test/index.test.js
@@ -15,7 +15,7 @@
'use strict';
const assert = require('assert');
-const {execSync, spawn} = require('child_process');
+const {spawn} = require('child_process');
const {Storage} = require('@google-cloud/storage');
const sinon = require('sinon');
const {request} = require('gaxios');
@@ -62,11 +62,6 @@ async function startFF(port) {
return {ffProc, ffProcHandler};
}
-// ImageMagick is available by default in Cloud Run Functions environments
-// https://cloud.google.com/functions/1stgendocs/tutorials/imagemagick-1st-gen.md#importing_dependencies
-// Manually install it for testing only.
-execSync('sudo apt-get install imagemagick -y');
-
describe('functions/imagemagick tests', () => {
before(async () => {
let exists;
@@ -92,40 +87,54 @@ describe('functions/imagemagick tests', () => {
it('blurOffensiveImages detects safe images using Cloud Vision', async () => {
const PORT = 8080;
const {ffProc, ffProcHandler} = await startFF(PORT);
-
- await request({
- url: `http://localhost:${PORT}/blurOffensiveImages`,
- method: 'POST',
- data: {
+ let stdout;
+ try {
+ await request({
+ url: `http://localhost:${PORT}/blurOffensiveImages`,
+ method: 'POST',
data: {
- bucket: BUCKET_NAME,
- name: testFiles.safe,
+ data: {
+ bucket: BUCKET_NAME,
+ name: testFiles.safe,
+ },
},
- },
- });
- ffProc.kill();
- const stdout = await ffProcHandler;
+ });
+ } catch (err) {
+ console.error(
+ `Cloud Function Error: ${err.response?.data || err.message}`
+ );
+ throw err;
+ } finally {
+ ffProc.kill();
+ stdout = await ffProcHandler;
+ }
assert.ok(stdout.includes(`Detected ${testFiles.safe} as OK.`));
});
it('blurOffensiveImages successfully blurs offensive images', async () => {
const PORT = 8081;
const {ffProc, ffProcHandler} = await startFF(PORT);
-
- await request({
- url: `http://localhost:${PORT}/blurOffensiveImages`,
- method: 'POST',
- data: {
+ let stdout;
+ try {
+ await request({
+ url: `http://localhost:${PORT}/blurOffensiveImages`,
+ method: 'POST',
data: {
- bucket: BUCKET_NAME,
- name: testFiles.offensive,
+ data: {
+ bucket: BUCKET_NAME,
+ name: testFiles.offensive,
+ },
},
- },
- });
-
- ffProc.kill();
- const stdout = await ffProcHandler;
-
+ });
+ } catch (err) {
+ console.error(
+ `Cloud Function Error: ${err.response?.data || err.message}`
+ );
+ throw err;
+ } finally {
+ ffProc.kill();
+ stdout = await ffProcHandler;
+ }
assert.ok(stdout.includes(`Blurred image: ${testFiles.offensive}`));
assert.ok(
stdout.includes(
diff --git a/functions/v2/imagemagick/README.md b/functions/v2/imagemagick/README.md
index 6f2962f7ff..1c7fa3ff4d 100644
--- a/functions/v2/imagemagick/README.md
+++ b/functions/v2/imagemagick/README.md
@@ -1,8 +1,8 @@
-# Google Cloud Functions ImageMagick sample
+# Google Cloud Functions imagemagick sample
-This sample shows you how to blur an image using ImageMagick in a
+This sample shows you how to blur an image using sharp in a
Storage-triggered Cloud Function.
View the [source code][code].
diff --git a/functions/v2/imagemagick/index.js b/functions/v2/imagemagick/index.js
index e6f32f0837..a0326ba311 100644
--- a/functions/v2/imagemagick/index.js
+++ b/functions/v2/imagemagick/index.js
@@ -16,7 +16,7 @@
// [START functions_imagemagick_setup]
const functions = require('@google-cloud/functions-framework');
-const gm = require('gm').subClass({imageMagick: true});
+const sharp = require('sharp');
const fs = require('fs').promises;
const path = require('path');
const vision = require('@google-cloud/vision');
@@ -34,6 +34,14 @@ functions.cloudEvent('blurOffensiveImages', async cloudEvent => {
// This event represents the triggering Cloud Storage object.
const bucket = cloudEvent.data.bucket;
const name = cloudEvent.data.name;
+
+ if (bucket === BLURRED_BUCKET_NAME) {
+ console.log(
+ 'Event triggered by the blurred bucket; skip to avoid recursion'
+ );
+ return;
+ }
+
const file = storage.bucket(bucket).file(name);
const filePath = `gs://${bucket}/${name}`;
@@ -61,9 +69,10 @@ functions.cloudEvent('blurOffensiveImages', async cloudEvent => {
// [END functions_imagemagick_analyze]
// [START functions_imagemagick_blur]
-// Blurs the given file using ImageMagick, and uploads it to another bucket.
+// Blurs the given file using sharp, and uploads it to another bucket.
const blurImage = async (file, blurredBucketName) => {
const tempLocalPath = `/tmp/${path.parse(file.name).base}`;
+ const tempLocalBlurredPath = `/tmp/blurred-${path.parse(file.name).base}`;
// Download file from bucket.
try {
@@ -74,19 +83,14 @@ const blurImage = async (file, blurredBucketName) => {
throw new Error(`File download failed: ${err}`);
}
- await new Promise((resolve, reject) => {
- gm(tempLocalPath)
- .blur(0, 16)
- .write(tempLocalPath, (err, stdout) => {
- if (err) {
- console.error('Failed to blur image.', err);
- reject(err);
- } else {
- console.log(`Blurred image: ${file.name}`);
- resolve(stdout);
- }
- });
- });
+ try {
+ await sharp(tempLocalPath).blur(16).toFile(tempLocalBlurredPath);
+
+ console.log(`Blurred image: ${file.name}`);
+ } catch (err) {
+ console.error('Failed to blur image.', err);
+ throw err;
+ }
// Upload result to a different bucket, to avoid re-triggering this function.
const blurredBucket = storage.bucket(blurredBucketName);
@@ -94,13 +98,16 @@ const blurImage = async (file, blurredBucketName) => {
// Upload the Blurred image back into the bucket.
const gcsPath = `gs://${blurredBucketName}/${file.name}`;
try {
- await blurredBucket.upload(tempLocalPath, {destination: file.name});
+ await blurredBucket.upload(tempLocalBlurredPath, {destination: file.name});
console.log(`Uploaded blurred image to: ${gcsPath}`);
} catch (err) {
throw new Error(`Unable to upload blurred image to ${gcsPath}: ${err}`);
+ } finally {
+ // Delete the temporary file.
+ await Promise.allSettled([
+ fs.unlink(tempLocalPath),
+ fs.unlink(tempLocalBlurredPath),
+ ]);
}
-
- // Delete the temporary file.
- return fs.unlink(tempLocalPath);
};
// [END functions_imagemagick_blur]
diff --git a/functions/v2/imagemagick/package.json b/functions/v2/imagemagick/package.json
index 43e1ac3d46..3b9b2ede57 100644
--- a/functions/v2/imagemagick/package.json
+++ b/functions/v2/imagemagick/package.json
@@ -9,7 +9,7 @@
"url": "https://github.com/GoogleCloudPlatform/nodejs-docs-samples.git"
},
"engines": {
- "node": ">=16.0.0"
+ "node": ">=18.17.0"
},
"scripts": {
"test": "c8 mocha -p -j 2 test/*.test.js --timeout=20000 --exit"
@@ -18,7 +18,7 @@
"@google-cloud/functions-framework": "^3.1.0",
"@google-cloud/storage": "^7.0.0",
"@google-cloud/vision": "^4.0.0",
- "gm": "^1.23.1"
+ "sharp": "^0.34.5"
},
"devDependencies": {
"c8": "^10.0.0",
@@ -27,4 +27,4 @@
"sinon": "^18.0.0",
"supertest": "^7.0.0"
}
-}
+}
\ No newline at end of file
diff --git a/functions/v2/imagemagick/test/integration.test.js b/functions/v2/imagemagick/test/integration.test.js
index 82f5b8a43e..e0a2ac56d2 100644
--- a/functions/v2/imagemagick/test/integration.test.js
+++ b/functions/v2/imagemagick/test/integration.test.js
@@ -15,7 +15,6 @@
'use strict';
const assert = require('assert');
-const {execSync} = require('child_process');
const {Storage} = require('@google-cloud/storage');
const sinon = require('sinon');
const supertest = require('supertest');
@@ -34,11 +33,6 @@ const testFiles = {
require('../index');
-// ImageMagick is available by default in Cloud Run Functions environments
-// https://cloud.google.com/functions/1stgendocs/tutorials/imagemagick-1st-gen.md#importing_dependencies
-// Manually install it for testing only.
-execSync('sudo apt-get install imagemagick -y');
-
describe('functions/imagemagick tests', () => {
before(async () => {
let exists;
diff --git a/functions/v2/imagemagick/test/unit.test.js b/functions/v2/imagemagick/test/unit.test.js
index acf453198d..f502441a01 100644
--- a/functions/v2/imagemagick/test/unit.test.js
+++ b/functions/v2/imagemagick/test/unit.test.js
@@ -38,31 +38,29 @@ const loadSample = (adultResult, fileName) => {
return {
bucket: sinon.stub().returnsThis(),
file: sinon.stub().returnsThis(),
- upload: sinon.stub().returnsThis(),
- download: sinon.stub().returnsThis(),
+ upload: sinon.stub().resolves(),
+ download: sinon.stub().resolves(),
name: fileName,
};
},
};
- const gm = () => {
- return {
- blur: sinon.stub().returnsThis(),
- write: sinon.stub().yields(),
- };
+ const sharpInstance = {
+ blur: sinon.stub().returnsThis(),
+ toFile: sinon.stub().resolves(),
};
- gm.subClass = sinon.stub().returnsThis();
+ const sharpMock = sinon.stub().returns(sharpInstance);
const fs = {
promises: {
- unlink: sinon.stub(),
+ unlink: sinon.stub().resolves(),
},
};
return proxyquire('..', {
'@google-cloud/vision': vision,
'@google-cloud/storage': storage,
- gm: gm,
+ sharp: sharpMock,
fs: fs,
});
};
diff --git a/healthcare/dicom/importDicomInstance.js b/healthcare/dicom/importDicomInstance.js
index 9754ac8b47..d716a7b2f9 100644
--- a/healthcare/dicom/importDicomInstance.js
+++ b/healthcare/dicom/importDicomInstance.js
@@ -73,7 +73,7 @@ const main = (
} else {
console.log('Encountered errors. Sample error:');
console.log(
- 'Resource on which error occured:',
+ 'Resource on which error occurred:',
data.error.details[0]['sampleErrors'][0]['resource']
);
console.log(
diff --git a/parametermanager/regional_samples/renderRegionalParamVersion.js b/parametermanager/regional_samples/renderRegionalParamVersion.js
index 8074b1ed61..94cc8587de 100644
--- a/parametermanager/regional_samples/renderRegionalParamVersion.js
+++ b/parametermanager/regional_samples/renderRegionalParamVersion.js
@@ -22,7 +22,7 @@
* @param {string} projectId - The Google Cloud project ID where the parameter is located.
* @param {string} locationId - The ID of the region where parameter is located.
* @param {string} parameterId - The ID of the parameter for which version details are to be rendered.
- * @param {string} parameterVersionId - The ID of the parameter version to be rendered.
+ * @param {string} parameterVersionId - The ID of the parameter version to be rendered or an alias (e.g. 'latest').
*/
async function main(projectId, locationId, parameterId, parameterVersionId) {
// [START parametermanager_render_regional_param_version]
@@ -33,6 +33,7 @@ async function main(projectId, locationId, parameterId, parameterVersionId) {
// const locationId = 'us-central1';
// const parameterId = 'YOUR_PARAMETER_ID';
// const parameterVersionId = 'YOUR_PARAMETER_VERSION_ID';
+ // const parameterVersionId = 'latest';
// Imports the Parameter Manager library
const {ParameterManagerClient} = require('@google-cloud/parametermanager');
diff --git a/parametermanager/renderParamVersion.js b/parametermanager/renderParamVersion.js
index 6e0095ef77..79c69c484f 100644
--- a/parametermanager/renderParamVersion.js
+++ b/parametermanager/renderParamVersion.js
@@ -21,7 +21,7 @@
*
* @param {string} projectId - The Google Cloud project ID where the parameter is located.
* @param {string} parameterId - The ID of the parameter for which version details are to be rendered.
- * @param {string} parameterVersionId - The ID of the parameter version to be rendered.
+ * @param {string} parameterVersionId - The ID of the parameter version to be rendered or an alias (e.g. 'latest').
*/
async function main(projectId, parameterId, parameterVersionId) {
// [START parametermanager_render_param_version]
@@ -31,6 +31,7 @@ async function main(projectId, parameterId, parameterVersionId) {
// const projectId = 'YOUR_PROJECT_ID';
// const parameterId = 'YOUR_PARAMETER_ID';
// const parameterVersionId = 'YOUR_PARAMETER_VERSION_ID';
+ // const parameterVersionId = 'latest';
// Imports the Parameter Manager library
const {ParameterManagerClient} = require('@google-cloud/parametermanager');
diff --git a/run/helloworld/README.md b/run/helloworld/README.md
index e08f381e3d..06d4e25bc4 100644
--- a/run/helloworld/README.md
+++ b/run/helloworld/README.md
@@ -2,6 +2,8 @@
This sample shows how to deploy a Hello World application to Cloud Run.
+
+
For more details on how to work with this sample read the [Google Cloud Run Node.js Samples README](https://github.com/GoogleCloudPlatform/nodejs-docs-samples/tree/main/run).
## Local Development
diff --git a/run/image-processing/README.md b/run/image-processing/README.md
index ad145c4f90..2402703bbc 100644
--- a/run/image-processing/README.md
+++ b/run/image-processing/README.md
@@ -1,6 +1,6 @@
# Cloud Run Image Processing Sample
-This sample service applies [Cloud Storage](https://cloud.google.com/storage/docs)-triggered image processing with [Cloud Vision API](https://cloud.google.com/vision/docs) analysis and ImageMagick transformation.
+This sample service applies [Cloud Storage](https://cloud.google.com/storage/docs)-triggered image processing with [Cloud Vision API](https://cloud.google.com/vision/docs) analysis and sharp transformation.
Use it with the [Image Processing with Cloud Run tutorial](http://cloud.google.com/run/docs/tutorials/image-processing).
@@ -9,7 +9,7 @@ For more details on how to work with this sample read the [Google Cloud Run Node
## Dependencies
* **express**: Web server framework
-* **[gm](https://github.com/aheckmann/gm#readme)**: ImageMagick integration library.
+* **[sharp](https://sharp.pixelplumbing.com/)**: High-performance Node.js image processing library.
* **@google-cloud/storage**: Google Cloud Storage client library.
* **@google-cloud/vision**: Cloud Vision API client library.
diff --git a/run/image-processing/image.js b/run/image-processing/image.js
index 13f940de90..1d44212153 100644
--- a/run/image-processing/image.js
+++ b/run/image-processing/image.js
@@ -15,9 +15,8 @@
'use strict';
// [START cloudrun_imageproc_handler_setup]
-const gm = require('gm').subClass({imageMagick: true});
-const fs = require('fs');
-const {promisify} = require('util');
+const fs = require('fs').promises;
+const sharp = require('sharp');
const path = require('path');
const vision = require('@google-cloud/vision');
@@ -34,6 +33,13 @@ exports.blurOffensiveImages = async event => {
// This event represents the triggering Cloud Storage object.
const object = event;
+ if (object.bucket === BLURRED_BUCKET_NAME) {
+ console.log(
+ 'Event triggered by the blurred bucket; skip to avoid recursion'
+ );
+ return;
+ }
+
const file = storage.bucket(object.bucket).file(object.name);
const filePath = `gs://${object.bucket}/${object.name}`;
@@ -61,9 +67,10 @@ exports.blurOffensiveImages = async event => {
// [END cloudrun_imageproc_handler_analyze]
// [START cloudrun_imageproc_handler_blur]
-// Blurs the given file using ImageMagick, and uploads it to another bucket.
+// Blurs the given file using sharp, and uploads it to another bucket.
const blurImage = async (file, blurredBucketName) => {
const tempLocalPath = `/tmp/${path.parse(file.name).base}`;
+ const tempLocalBlurredPath = `/tmp/blurred-${path.parse(file.name).base}`;
// Download file from bucket.
try {
@@ -74,19 +81,14 @@ const blurImage = async (file, blurredBucketName) => {
throw new Error(`File download failed: ${err}`);
}
- await new Promise((resolve, reject) => {
- gm(tempLocalPath)
- .blur(0, 16)
- .write(tempLocalPath, (err, stdout) => {
- if (err) {
- console.error('Failed to blur image.', err);
- reject(err);
- } else {
- console.log(`Blurred image: ${file.name}`);
- resolve(stdout);
- }
- });
- });
+ try {
+ await sharp(tempLocalPath).blur(16).toFile(tempLocalBlurredPath);
+
+ console.log(`Blurred image: ${file.name}`);
+ } catch (err) {
+ console.error('Failed to blur image.', err);
+ throw err;
+ }
// Upload result to a different bucket, to avoid re-triggering this function.
const blurredBucket = storage.bucket(blurredBucketName);
@@ -94,14 +96,16 @@ const blurImage = async (file, blurredBucketName) => {
// Upload the Blurred image back into the bucket.
const gcsPath = `gs://${blurredBucketName}/${file.name}`;
try {
- await blurredBucket.upload(tempLocalPath, {destination: file.name});
+ await blurredBucket.upload(tempLocalBlurredPath, {destination: file.name});
console.log(`Uploaded blurred image to: ${gcsPath}`);
} catch (err) {
throw new Error(`Unable to upload blurred image to ${gcsPath}: ${err}`);
+ } finally {
+ // Delete the temporary file.
+ await Promise.allSettled([
+ fs.unlink(tempLocalPath),
+ fs.unlink(tempLocalBlurredPath),
+ ]);
}
-
- // Delete the temporary file.
- const unlink = promisify(fs.unlink);
- return unlink(tempLocalPath);
};
// [END cloudrun_imageproc_handler_blur]
diff --git a/run/image-processing/package.json b/run/image-processing/package.json
index d46a057c9b..8b062ebf5b 100644
--- a/run/image-processing/package.json
+++ b/run/image-processing/package.json
@@ -22,7 +22,7 @@
"@google-cloud/storage": "^7.0.0",
"@google-cloud/vision": "^4.0.0",
"express": "^4.16.4",
- "gm": "^1.23.1"
+ "sharp": "^0.34.5"
},
"devDependencies": {
"c8": "^10.0.0",
diff --git a/spanner/add-and-drop-new-database-role.js b/spanner/add-and-drop-new-database-role.js
new file mode 100644
index 0000000000..6414913056
--- /dev/null
+++ b/spanner/add-and-drop-new-database-role.js
@@ -0,0 +1,98 @@
+// Copyright 2024 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// sample-metadata:
+// title: Add and drop new database role
+// usage: node add-and-drop-new-database-role.js
+
+'use strict';
+
+async function main(
+ instanceId = 'my-instance',
+ databaseId = 'my-database',
+ projectId = 'my-project-id'
+) {
+ // [START spanner_add_and_drop_database_role]
+ /**
+ * TODO(developer): Uncomment these variables before running the sample.
+ */
+ // const instanceId = 'my-instance';
+ // const databaseId = 'my-database';
+ // const projectId = 'my-project-id';
+
+ // Imports the Google Cloud client library
+ const {Spanner} = require('@google-cloud/spanner');
+
+ // creates a client
+ const spanner = new Spanner({
+ projectId: projectId,
+ });
+
+ const databaseAdminClient = spanner.getDatabaseAdminClient();
+
+ async function addAndDropNewDatabaseRole() {
+ // Creates a new user defined role and grant permissions
+ try {
+ const createRequest = [
+ 'CREATE ROLE parent',
+ 'GRANT SELECT ON TABLE Singers TO ROLE parent',
+ 'CREATE ROLE child',
+ 'GRANT ROLE parent TO ROLE child',
+ ];
+ const [createOperation] = await databaseAdminClient.updateDatabaseDdl({
+ database: databaseAdminClient.databasePath(
+ projectId,
+ instanceId,
+ databaseId
+ ),
+ statements: createRequest,
+ });
+
+ console.log('Waiting for operation to complete...');
+ await createOperation.promise();
+
+ console.log('Created roles child and parent and granted privileges');
+
+ // Revoke permissions and drop child role.
+ // A role can't be dropped until all its permissions are revoked.
+ const dropRequest = [
+ 'REVOKE ROLE parent FROM ROLE child',
+ 'DROP ROLE child',
+ ];
+ const [dropOperation] = await databaseAdminClient.updateDatabaseDdl({
+ database: databaseAdminClient.databasePath(
+ projectId,
+ instanceId,
+ databaseId
+ ),
+ statements: dropRequest,
+ });
+
+ console.log('Waiting for operation to complete...');
+ await dropOperation.promise();
+
+ console.log('Revoked privileges and dropped role child');
+ } catch (err) {
+ console.error('Error adding or dropping database roles:', err);
+ } finally {
+ // Close the spanner client when finished.
+ // The databaseAdminClient does not require explicit closure. The closure of the Spanner client will automatically close the databaseAdminClient.
+ spanner.close();
+ }
+ }
+ await addAndDropNewDatabaseRole();
+ // [END spanner_add_and_drop_database_role]
+}
+
+main(...process.argv.slice(2));
diff --git a/spanner/create-instance-without-default-backup-schedules.js b/spanner/create-instance-without-default-backup-schedules.js
new file mode 100644
index 0000000000..caa2f6ea3a
--- /dev/null
+++ b/spanner/create-instance-without-default-backup-schedules.js
@@ -0,0 +1,75 @@
+/**
+ * Copyright 2024 Google LLC
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+'use strict';
+
+function main(instanceId, projectId) {
+ async function createInstanceWithoutDefaultBackupSchedules() {
+ // [START spanner_create_instance_without_default_backup_schedule]
+ /**
+ * TODO(developer): Uncomment the following lines before running the sample.
+ **/
+ // const projectId = 'my-project-id';
+ // const instanceId = 'my-instance';
+
+ // Imports the Google Cloud client library
+ const {Spanner, protos} = require('@google-cloud/spanner');
+
+ // Creates a client
+ const spanner = new Spanner({
+ projectId: projectId,
+ });
+
+ const instanceAdminClient = spanner.getInstanceAdminClient();
+ // Creates a new instance
+ try {
+ const [operation] = await instanceAdminClient.createInstance({
+ instanceId: instanceId,
+ parent: instanceAdminClient.projectPath(projectId),
+ instance: {
+ config: instanceAdminClient.instanceConfigPath(
+ projectId,
+ 'regional-me-central2'
+ ),
+ nodeCount: 1,
+ displayName: 'Display name for the instance.',
+ labels: {
+ cloud_spanner_samples: 'true',
+ created: Math.round(Date.now() / 1000).toString(), // current time
+ },
+ defaultBackupScheduleType:
+ protos.google.spanner.admin.instance.v1.Instance
+ .DefaultBackupScheduleType.NONE,
+ },
+ });
+ await operation.promise();
+
+ console.log(
+ `Created instance ${instanceId} without default backup schedules.`
+ );
+ } catch (err) {
+ console.error(
+ 'Error creating instance without default backup schedules:',
+ err
+ );
+ } finally {
+ spanner.close();
+ }
+ // [END spanner_create_instance_without_default_backup_schedule]
+ }
+ createInstanceWithoutDefaultBackupSchedules();
+}
+
+main(...process.argv.slice(2));
diff --git a/spanner/get-database-roles.js b/spanner/get-database-roles.js
new file mode 100644
index 0000000000..1a4ac9838e
--- /dev/null
+++ b/spanner/get-database-roles.js
@@ -0,0 +1,75 @@
+// Copyright 2024 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// sample-metadata:
+// title: List database roles
+// usage: node get-database-roles.js
+
+'use strict';
+
+async function main(
+ instanceId = 'my-instance',
+ databaseId = 'my-database',
+ projectId = 'my-project-id'
+) {
+ // [START spanner_list_database_roles]
+ /**
+ * TODO(developer): Uncomment these variables before running the sample.
+ */
+ // const instanceId = 'my-instance';
+ // const databaseId = 'my-database';
+ // const projectId = 'my-project-id';
+
+ // Imports the Google Cloud client library
+ const {Spanner} = require('@google-cloud/spanner');
+
+ // creates a client
+ const spanner = new Spanner({
+ projectId: projectId,
+ });
+
+ const databaseAdminClient = spanner.getDatabaseAdminClient();
+
+ async function getDatabaseRoles() {
+ try {
+ const dbPath = databaseAdminClient.databasePath(
+ projectId,
+ instanceId,
+ databaseId
+ );
+
+ // Fetching database roles
+ const [databaseRoles] = await databaseAdminClient.listDatabaseRoles({
+ parent: dbPath,
+ });
+
+ console.log(`Roles for Database: ${dbPath}`);
+ databaseRoles.forEach(role => {
+ console.log(`Role: ${role.name}`);
+ });
+ } catch (err) {
+ console.error('Error listing database roles:', err);
+ } finally {
+ spanner.close();
+ }
+ }
+ await getDatabaseRoles();
+ // [END spanner_list_database_roles]
+}
+
+process.on('unhandledRejection', err => {
+ console.error(err.message);
+ process.exitCode = 1;
+});
+main(...process.argv.slice(2));
diff --git a/spanner/instance-update.js b/spanner/instance-update.js
new file mode 100644
index 0000000000..ec9a9f42bc
--- /dev/null
+++ b/spanner/instance-update.js
@@ -0,0 +1,87 @@
+/**
+ * Copyright 2024 Google LLC
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// sample-metadata:
+// title: Updates an instance.
+// usage: node instance-update.js
+
+'use strict';
+
+function main(instanceId, projectId) {
+ async function updateInstance() {
+ // [START spanner_update_instance]
+
+ // Imports the Google Cloud client library
+ const {Spanner, protos} = require('@google-cloud/spanner');
+
+ /**
+ * TODO(developer): Uncomment the following lines before running the sample.
+ */
+ // const projectId = 'my-project-id';
+ // const instanceId = 'my-instance';
+
+ // Creates a client
+ const spanner = new Spanner({
+ projectId: projectId,
+ });
+
+ const instanceAdminClient = spanner.getInstanceAdminClient();
+
+ // Updates an instance
+ try {
+ const instancePath = instanceAdminClient.instancePath(
+ projectId,
+ instanceId
+ );
+
+ console.log(`Updating instance ${instancePath}.`);
+
+ const [operation] = await instanceAdminClient.updateInstance({
+ instance: {
+ name: instancePath,
+ labels: {
+ updated: 'true',
+ created: Math.round(Date.now() / 1000).toString(), // current time
+ },
+ edition:
+ protos.google.spanner.admin.instance.v1.Instance.Edition.ENTERPRISE, //optional
+ },
+ // Field mask specifying fields that should get updated in an Instance
+ fieldMask: {
+ paths: ['labels', 'edition'],
+ },
+ });
+
+ console.log(`Waiting for operation on ${instanceId} to complete...`);
+ await operation.promise();
+ console.log(`Updated instance ${instanceId}.`);
+ const [metadata] = await instanceAdminClient.getInstance({
+ name: instanceAdminClient.instancePath(projectId, instanceId),
+ });
+ console.log(
+ `Instance ${instanceId} has been updated with the ${metadata.edition} ` +
+ 'edition.'
+ );
+ } catch (err) {
+ console.error('Error updating instance:', err);
+ } finally {
+ spanner.close();
+ }
+ // [END spanner_update_instance]
+ }
+ updateInstance();
+}
+
+main(...process.argv.slice(2));
diff --git a/spanner/instance-with-asymmetric-autoscaling-config.js b/spanner/instance-with-asymmetric-autoscaling-config.js
new file mode 100644
index 0000000000..e626d0e142
--- /dev/null
+++ b/spanner/instance-with-asymmetric-autoscaling-config.js
@@ -0,0 +1,166 @@
+/**
+ * Copyright 2025 Google LLC
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// sample-metadata:
+// title: Creates a instance with asymmetric autoscaling config.
+// usage: node instance-with-asymmetric-autoscaling-config.js
+
+'use strict';
+
+function main(instanceId = 'my-instance', projectId = 'my-project-id') {
+ async function createInstanceWithAsymmetricAutoscalingConfig() {
+ // [START spanner_create_instance_with_asymmetric_autoscaling_config]
+ // Imports the Google Cloud client library
+ const {Spanner, protos} = require('@google-cloud/spanner');
+
+ /**
+ * TODO(developer): Uncomment the following lines before running the sample.
+ */
+ // const projectId = 'my-project-id';
+ // const instanceId = 'my-instance';
+
+ // Creates a client
+ const spanner = new Spanner({
+ projectId: projectId,
+ });
+
+ // Get the instance admin client
+ const instanceAdminClient = spanner.getInstanceAdminClient();
+
+ const autoscalingConfig =
+ protos.google.spanner.admin.instance.v1.AutoscalingConfig.create({
+ // Only one of minNodes/maxNodes or minProcessingUnits/maxProcessingUnits can be set.
+ autoscalingLimits:
+ protos.google.spanner.admin.instance.v1.AutoscalingConfig.AutoscalingLimits.create(
+ {
+ minNodes: 1,
+ maxNodes: 2,
+ }
+ ),
+ // highPriorityCpuUtilizationPercent and storageUtilizationPercent are both
+ // percentages and must lie between 0 and 100.
+ autoscalingTargets:
+ protos.google.spanner.admin.instance.v1.AutoscalingConfig.AutoscalingTargets.create(
+ {
+ highPriorityCpuUtilizationPercent: 65,
+ storageUtilizationPercent: 95,
+ }
+ ),
+ // The read-only replicas listed in the asymmetric autoscaling options scale independently
+ // from other replicas.
+ asymmetricAutoscalingOptions: [
+ protos.google.spanner.admin.instance.v1.AutoscalingConfig.AsymmetricAutoscalingOption.create(
+ {
+ replicaSelection:
+ protos.google.spanner.admin.instance.v1.ReplicaSelection.create(
+ {
+ location: 'europe-west1',
+ }
+ ),
+ }
+ ),
+ protos.google.spanner.admin.instance.v1.AutoscalingConfig.AsymmetricAutoscalingOption.create(
+ {
+ replicaSelection:
+ protos.google.spanner.admin.instance.v1.ReplicaSelection.create(
+ {
+ location: 'europe-west4',
+ }
+ ),
+ }
+ ),
+ protos.google.spanner.admin.instance.v1.AutoscalingConfig.AsymmetricAutoscalingOption.create(
+ {
+ replicaSelection:
+ protos.google.spanner.admin.instance.v1.ReplicaSelection.create(
+ {
+ location: 'asia-east1',
+ }
+ ),
+ }
+ ),
+ ],
+ });
+
+ // Creates a new instance with autoscaling configuration and asymmetric autoscaling option
+ // When autoscalingConfig is enabled, nodeCount and processingUnits fields
+ // need not be specified.
+ try {
+ const instancePath = instanceAdminClient.instancePath(
+ projectId,
+ instanceId
+ );
+
+ console.log(`Creating instance ${instancePath}.`);
+
+ const [operation] = await instanceAdminClient.createInstance({
+ instanceId: instanceId,
+ parent: instanceAdminClient.projectPath(projectId),
+ instance: {
+ config: instanceAdminClient.instanceConfigPath(
+ projectId,
+ 'nam-eur-asia3'
+ ),
+ displayName: 'Display name for the instance.',
+ autoscalingConfig: autoscalingConfig,
+ labels: {
+ cloud_spanner_samples: 'true',
+ created: Math.round(Date.now() / 1000).toString(), // current time
+ },
+ // Feature MULTI_REGION is available only for ENTERPRISE_PLUS edition
+ edition:
+ protos.google.spanner.admin.instance.v1.Instance.Edition
+ .ENTERPRISE_PLUS,
+ },
+ });
+
+ console.log(`Waiting for operation on ${instanceId} to complete...`);
+ await operation.promise();
+ console.log(`Created instance ${instanceId}.`);
+
+ // get instance metadata
+ const [metadata] = await instanceAdminClient.getInstance({
+ name: instancePath,
+ });
+ const asymmetricOptionsStr =
+ metadata.autoscalingConfig.asymmetricAutoscalingOptions?.length > 0
+ ? metadata.autoscalingConfig.asymmetricAutoscalingOptions
+ .map(option => option.replicaSelection?.location || 'N/A')
+ .join(', ')
+ : 'None';
+
+ console.log(
+ `Autoscaling configurations of ${instanceId} are: ` +
+ '\n' +
+ `Min nodes: ${metadata.autoscalingConfig.autoscalingLimits.minNodes} nodes.\n` +
+ `Max nodes: ${metadata.autoscalingConfig.autoscalingLimits.maxNodes} nodes.\n` +
+ `High priority cpu utilization percent: ${metadata.autoscalingConfig.autoscalingTargets.highPriorityCpuUtilizationPercent}.\n` +
+ `Storage utilization percent: ${metadata.autoscalingConfig.autoscalingTargets.storageUtilizationPercent}.\n` +
+ `Asymmetric Autoscaling Options: ${asymmetricOptionsStr}`
+ );
+ } catch (err) {
+ console.error(
+ 'Error creating instance with asymmetric autoscaling config:',
+ err
+ );
+ } finally {
+ spanner.close();
+ }
+ // [END spanner_create_instance_with_asymmetric_autoscaling_config]
+ }
+ createInstanceWithAsymmetricAutoscalingConfig();
+}
+
+main(...process.argv.slice(2));
diff --git a/spanner/instance-with-autoscaling-config.js b/spanner/instance-with-autoscaling-config.js
new file mode 100644
index 0000000000..46cf8390dd
--- /dev/null
+++ b/spanner/instance-with-autoscaling-config.js
@@ -0,0 +1,120 @@
+/**
+ * Copyright 2024 Google LLC
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// sample-metadata:
+// title: Creates a instance with autoscaling config.
+// usage: node instance-with-autoscaling-config.js
+
+'use strict';
+
+function main(instanceId = 'my-instance', projectId = 'my-project-id') {
+ async function createInstanceWithAutoscalingConfig() {
+ // [START spanner_create_instance_with_autoscaling_config]
+ // Imports the Google Cloud client library
+ const {Spanner, protos} = require('@google-cloud/spanner');
+
+ /**
+ * TODO(developer): Uncomment the following lines before running the sample.
+ */
+ // const projectId = 'my-project-id';
+ // const instanceId = 'my-instance';
+
+ // Creates a client
+ const spanner = new Spanner({
+ projectId: projectId,
+ });
+
+ // Get the instance admin client
+ const instanceAdminClient = spanner.getInstanceAdminClient();
+
+ const autoscalingConfig =
+ protos.google.spanner.admin.instance.v1.AutoscalingConfig.create({
+ // Only one of minNodes/maxNodes or minProcessingUnits/maxProcessingUnits can be set.
+ autoscalingLimits:
+ protos.google.spanner.admin.instance.v1.AutoscalingConfig.AutoscalingLimits.create(
+ {
+ minNodes: 1,
+ maxNodes: 2,
+ }
+ ),
+ // highPriorityCpuUtilizationPercent and storageUtilizationPercent are both
+ // percentages and must lie between 0 and 100.
+ autoscalingTargets:
+ protos.google.spanner.admin.instance.v1.AutoscalingConfig.AutoscalingTargets.create(
+ {
+ highPriorityCpuUtilizationPercent: 65,
+ storageUtilizationPercent: 95,
+ }
+ ),
+ });
+
+ // Creates a new instance with autoscaling configuration
+ // When autoscalingConfig is enabled, nodeCount and processingUnits fields
+ // need not be specified.
+ try {
+ const instancePath = instanceAdminClient.instancePath(
+ projectId,
+ instanceId
+ );
+
+ console.log(`Creating instance ${instancePath}.`);
+
+ const [operation] = await instanceAdminClient.createInstance({
+ instanceId: instanceId,
+ parent: instanceAdminClient.projectPath(projectId),
+ instance: {
+ config: instanceAdminClient.instanceConfigPath(
+ projectId,
+ 'regional-us-central1'
+ ),
+ displayName: 'Display name for the instance.',
+ autoscalingConfig: autoscalingConfig,
+ labels: {
+ cloud_spanner_samples: 'true',
+ created: Math.round(Date.now() / 1000).toString(), // current time
+ },
+ // Managed autoscaler is available only for ENTERPRISE edition
+ edition:
+ protos.google.spanner.admin.instance.v1.Instance.Edition.ENTERPRISE,
+ },
+ });
+
+ console.log(`Waiting for operation on ${instanceId} to complete...`);
+ await operation.promise();
+ console.log(`Created instance ${instanceId}.`);
+
+ // get instance metadata
+ const [metadata] = await instanceAdminClient.getInstance({
+ name: instancePath,
+ });
+ console.log(
+ `Autoscaling configurations of ${instanceId} are: ` +
+ '\n' +
+ `Min nodes: ${metadata.autoscalingConfig.autoscalingLimits.minNodes} nodes.\n` +
+ `Max nodes: ${metadata.autoscalingConfig.autoscalingLimits.maxNodes} nodes.\n` +
+ `High priority cpu utilization percent: ${metadata.autoscalingConfig.autoscalingTargets.highPriorityCpuUtilizationPercent}.\n` +
+ `Storage utilization percent: ${metadata.autoscalingConfig.autoscalingTargets.storageUtilizationPercent}.`
+ );
+ } catch (err) {
+ console.error('Error creating instance with autoscaling config:', err);
+ } finally {
+ spanner.close();
+ }
+ // [END spanner_create_instance_with_autoscaling_config]
+ }
+ createInstanceWithAutoscalingConfig();
+}
+
+main(...process.argv.slice(2));
diff --git a/spanner/instance-with-processing-units.js b/spanner/instance-with-processing-units.js
new file mode 100644
index 0000000000..57d0b4c37c
--- /dev/null
+++ b/spanner/instance-with-processing-units.js
@@ -0,0 +1,81 @@
+/**
+ * Copyright 2024 Google LLC
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+'use strict';
+
+async function createInstanceWithProcessingUnits(instanceId, projectId) {
+ // [START spanner_create_instance_with_processing_units]
+
+ // Imports the Google Cloud client library
+ const {Spanner} = require('@google-cloud/spanner');
+
+ /**
+ * TODO(developer): Uncomment the following lines before running the sample.
+ */
+ // const projectId = 'my-project-id';
+ // const instanceId = 'my-instance';
+
+ // Creates a client
+ const spanner = new Spanner({
+ projectId: projectId,
+ });
+
+ const instanceAdminClient = spanner.getInstanceAdminClient();
+
+ // Creates a new instance
+ try {
+ const instancePath = instanceAdminClient.instancePath(
+ projectId,
+ instanceId
+ );
+ console.log(`Creating instance ${instancePath}.`);
+
+ const [operation] = await instanceAdminClient.createInstance({
+ instanceId: instanceId,
+ instance: {
+ config: instanceAdminClient.instanceConfigPath(
+ projectId,
+ 'regional-us-central1'
+ ),
+ displayName: 'Display name for the instance.',
+ processingUnits: 500,
+ labels: {
+ cloud_spanner_samples: 'true',
+ created: Math.round(Date.now() / 1000).toString(), // current time
+ },
+ },
+ parent: instanceAdminClient.projectPath(projectId),
+ });
+
+ console.log(`Waiting for operation on ${instanceId} to complete...`);
+ await operation.promise();
+ console.log(`Created instance ${instanceId}.`);
+ const [metadata] = await instanceAdminClient.getInstance({
+ name: instancePath,
+ });
+ console.log(
+ `Instance ${instanceId} has ${metadata.processingUnits} ` +
+ 'processing units.'
+ );
+ } catch (err) {
+ console.error('Error creating instance with processing units:', err);
+ } finally {
+ spanner.close();
+ }
+ // [END spanner_create_instance_with_processing_units]
+}
+
+module.exports.createInstanceWithProcessingUnits =
+ createInstanceWithProcessingUnits;
diff --git a/spanner/instance.js b/spanner/instance.js
new file mode 100644
index 0000000000..08c8d1aa38
--- /dev/null
+++ b/spanner/instance.js
@@ -0,0 +1,102 @@
+/**
+ * Copyright 2024 Google LLC
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+'use strict';
+
+// creates an instance using Instance Admin Client
+async function createInstance(instanceId, projectId) {
+ // [START spanner_create_instance]
+
+ // Imports the Google Cloud client library
+ const {Spanner, protos} = require('@google-cloud/spanner');
+
+ // Creates a client
+ const spanner = new Spanner({
+ projectId: projectId,
+ });
+
+ const instanceAdminClient = spanner.getInstanceAdminClient();
+ /**
+ * TODO(developer): Uncomment the following lines before running the sample.
+ **/
+ // const projectId = 'my-project-id';
+ // const instanceId = 'my-instance';
+
+ // Creates a new instance
+ try {
+ const instancePath = instanceAdminClient.instancePath(
+ projectId,
+ instanceId
+ );
+ console.log(`Creating instance ${instancePath}.`);
+
+ const [operation] = await instanceAdminClient.createInstance({
+ instanceId: instanceId,
+ parent: instanceAdminClient.projectPath(projectId),
+ instance: {
+ config: instanceAdminClient.instanceConfigPath(
+ projectId,
+ 'regional-us-central1'
+ ),
+ nodeCount: 1,
+ displayName: 'Display name for the instance.',
+ labels: {
+ cloud_spanner_samples: 'true',
+ created: Math.round(Date.now() / 1000).toString(), // current time
+ },
+ edition:
+ protos.google.spanner.admin.instance.v1.Instance.Edition.STANDARD, //optional
+ },
+ });
+
+ console.log(`Waiting for operation on ${instanceId} to complete...`);
+ await operation.promise();
+
+ console.log(`Created instance ${instanceId}.`);
+ } catch (err) {
+ console.error('ERROR:', err);
+ } finally {
+ spanner.close();
+ }
+ // [END spanner_create_instance]
+}
+
+const {
+ createInstanceWithProcessingUnits,
+} = require('./instance-with-processing-units');
+
+require('yargs')
+ .demand(1)
+ .command(
+ 'createInstance ',
+ 'Creates an example instance in a Cloud Spanner instance using Instance Admin Client.',
+ {},
+ opts => createInstance(opts.instanceName, opts.projectId)
+ )
+ .example('node $0 createInstance "my-instance" "my-project-id"')
+ .command(
+ 'createInstanceWithProcessingUnits ',
+ 'Creates an example instance in a Cloud Spanner instance with processing units.',
+ {},
+ opts => createInstanceWithProcessingUnits(opts.instanceName, opts.projectId)
+ )
+ .example(
+ 'node $0 createInstanceWithProcessingUnits "my-instance" "my-project-id"'
+ )
+ .wrap(120)
+ .recommendCommands()
+ .epilogue('For more information, see https://cloud.google.com/spanner/docs')
+ .strict()
+ .help().argv;
diff --git a/spanner/read-data-with-database-role.js b/spanner/read-data-with-database-role.js
new file mode 100644
index 0000000000..70eaf1a05d
--- /dev/null
+++ b/spanner/read-data-with-database-role.js
@@ -0,0 +1,74 @@
+// Copyright 2022 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// sample-metadata:
+// title: Read data with database role
+// usage: node read-data-with-database-role.js
+
+'use strict';
+
+async function main(
+ instanceId = 'my-instance',
+ databaseId = 'my-database',
+ projectId = 'my-project-id'
+) {
+ // [START spanner_read_data_with_database_role]
+ /**
+ * TODO(developer): Uncomment these variables before running the sample.
+ */
+ // const instanceId = 'my-instance';
+ // const databaseId = 'my-database';
+ // const projectId = 'my-project-id';
+ // Imports the Google Cloud Spanner client library
+ const {Spanner} = require('@google-cloud/spanner');
+
+ // Instantiates a client
+ const spanner = new Spanner({
+ projectId: projectId,
+ });
+
+ async function readDataWithDatabaseRole() {
+ // Gets a reference to a Cloud Spanner instance and database.
+ const instance = spanner.instance(instanceId);
+ // Connect to a database using the 'parent' database role. This means that the connection will only have the permissions that have explicitly been granted to the 'parent' role.
+ const options = {
+ databaseRole: 'parent',
+ };
+ const database = instance.database(databaseId, options);
+
+ try {
+ const query = {
+ sql: 'SELECT SingerId, FirstName, LastName FROM Singers',
+ };
+ const [rows] = await database.run(query);
+
+ for (const row of rows) {
+ const json = row.toJSON();
+
+ console.log(
+ `SingerId: ${json.SingerId}, FirstName: ${json.FirstName}, LastName: ${json.LastName}`
+ );
+ }
+ } catch (err) {
+ console.error('Error reading data with database role:', err);
+ } finally {
+ // Close the database when finished.
+ await database.close();
+ }
+ }
+ await readDataWithDatabaseRole();
+ // [END spanner_read_data_with_database_role]
+}
+
+main(...process.argv.slice(2));
diff --git a/spanner/read-lock-mode.js b/spanner/read-lock-mode.js
new file mode 100644
index 0000000000..d1485f2f17
--- /dev/null
+++ b/spanner/read-lock-mode.js
@@ -0,0 +1,96 @@
+// Copyright 2026 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// sample-metadata:
+// title: Performs a read-write transaction with read lock mode option
+// usage: node read-lock-mode.js
+
+'use strict';
+
+async function main(
+ instanceId = 'my-instance',
+ databaseId = 'my-database',
+ projectId = 'my-project-id'
+) {
+ // [START spanner_read_lock_mode]
+ // Imports the Google Cloud Spanner client library
+ const {Spanner, protos} = require('@google-cloud/spanner');
+ // The read lock mode specified at the client-level will be applied
+ // to all RW transactions.
+ const defaultTransactionOptions = {
+ readLockMode:
+ protos.google.spanner.v1.TransactionOptions.ReadWrite.ReadLockMode
+ .OPTIMISTIC,
+ };
+
+ // Instantiates a client with defaultTransactionOptions
+ const spanner = new Spanner({
+ projectId: projectId,
+ defaultTransactionOptions,
+ });
+
+ async function runTransactionWithReadLockMode() {
+ // Gets a reference to a Cloud Spanner instance and database
+ const instance = spanner.instance(instanceId);
+ const database = instance.database(databaseId);
+ // The read lock mode specified at the request-level takes precedence over
+ // the read lock mode configured at the client-level.
+ const readLockModeOptionsForTransaction = {
+ readLockMode:
+ protos.google.spanner.v1.TransactionOptions.ReadWrite.ReadLockMode
+ .PESSIMISTIC,
+ };
+
+ try {
+ // Use runTransactionAsync to safely handle the transaction lifecycle
+ await database.runTransactionAsync(
+ readLockModeOptionsForTransaction,
+ async transaction => {
+ const query =
+ 'SELECT AlbumTitle FROM Albums WHERE SingerId = 2 AND AlbumId = 1';
+
+ const [rows] = await transaction.run(query);
+
+ // Gets first album's title cleanly using native .toJSON()
+ const albumTitle = rows[0].toJSON().AlbumTitle;
+ console.log(`previous album title ${albumTitle}`);
+
+ const update =
+ "UPDATE Albums SET AlbumTitle = 'New Album Title' WHERE SingerId = 2 AND AlbumId = 1";
+ const [rowCount] = await transaction.runUpdate(update);
+ console.log(
+ `Successfully updated ${rowCount} record in Albums table.`
+ );
+
+ await transaction.commit();
+ console.log(
+ 'Successfully executed read-write transaction with readLockMode option.'
+ );
+ }
+ );
+ } catch (err) {
+ console.error(
+ 'Error executing read-write transaction with read lock mode:',
+ err
+ );
+ } finally {
+ // Close the database when finished.
+ await database.close();
+ }
+ }
+ await runTransactionWithReadLockMode();
+ // [END spanner_read_lock_mode]
+}
+
+main(...process.argv.slice(2));
diff --git a/spanner/repeatable-reads.js b/spanner/repeatable-reads.js
new file mode 100644
index 0000000000..42ab774bd0
--- /dev/null
+++ b/spanner/repeatable-reads.js
@@ -0,0 +1,90 @@
+// Copyright 2025 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// sample-metadata:
+// title: Performs a read-write transaction with isolation level option
+// usage: node repeatable-reads.js
+
+'use strict';
+
+async function main(
+ instanceId = 'my-instance',
+ databaseId = 'my-database',
+ projectId = 'my-project-id'
+) {
+ // [START spanner_isolation_level]
+ // Imports the Google Cloud Spanner client library
+ const {Spanner, protos} = require('@google-cloud/spanner');
+ // The isolation level specified at the client-level will be applied
+ // to all RW transactions.
+ const defaultTransactionOptions = {
+ isolationLevel:
+ protos.google.spanner.v1.TransactionOptions.IsolationLevel.SERIALIZABLE,
+ };
+
+ // Instantiates a client with defaultTransactionOptions
+ const spanner = new Spanner({
+ projectId: projectId,
+ defaultTransactionOptions,
+ });
+
+ async function runTransactionWithIsolationLevel() {
+ // Gets a reference to a Cloud Spanner instance and database
+ const instance = spanner.instance(instanceId);
+ const database = instance.database(databaseId);
+ // The isolation level specified at the request level takes precedence over the isolation level configured at the client level.
+ const isolationOptionsForTransaction = {
+ isolationLevel:
+ protos.google.spanner.v1.TransactionOptions.IsolationLevel
+ .REPEATABLE_READ,
+ };
+ try {
+ // Use runTransactionAsync to safely handle the transaction lifecycle
+ await database.runTransactionAsync(
+ isolationOptionsForTransaction,
+ async transaction => {
+ const query =
+ 'SELECT AlbumTitle FROM Albums WHERE SingerId = 1 AND AlbumId = 1';
+
+ const [rows] = await transaction.run(query);
+
+ // Gets first album's title
+ const albumTitle = rows[0].toJSON().AlbumTitle;
+ console.log(`previous album title ${albumTitle}`);
+
+ const update =
+ "UPDATE Albums SET AlbumTitle = 'New Album Title' WHERE SingerId = 1 AND AlbumId = 1";
+ const [rowCount] = await transaction.runUpdate(update);
+ console.log(
+ `Successfully updated ${rowCount} record in Albums table.`
+ );
+
+ await transaction.commit();
+ console.log(
+ 'Successfully executed read-write transaction with isolationLevel option.'
+ );
+ }
+ );
+ } catch (err) {
+ console.error('ERROR:', err);
+ } finally {
+ // Close the database when finished.
+ await database.close();
+ }
+ }
+ await runTransactionWithIsolationLevel();
+ // [END spanner_isolation_level]
+}
+
+main(...process.argv.slice(2));
diff --git a/spanner/request-tag.js b/spanner/request-tag.js
new file mode 100644
index 0000000000..87724251de
--- /dev/null
+++ b/spanner/request-tag.js
@@ -0,0 +1,65 @@
+/**
+ * Copyright 2021 Google LLC
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// sample-metadata:
+// title: Sets a request tag for a single query
+// usage: node request-tag.js
+
+'use strict';
+
+function main(instanceId, databaseId, projectId) {
+ // [START spanner_set_request_tag]
+ /**
+ * TODO(developer): Uncomment the following lines before running the sample.
+ */
+ // const projectId = 'my-project-id';
+ // const instanceId = 'my-instance';
+ // const databaseId = 'my-database';
+
+ // Imports the Google Cloud client library
+ const {Spanner} = require('@google-cloud/spanner');
+
+ // Creates a client
+ const spanner = new Spanner({
+ projectId: projectId,
+ });
+
+ async function queryTags() {
+ // Gets a reference to a Cloud Spanner instance and database.
+ const instance = spanner.instance(instanceId);
+ const database = instance.database(databaseId);
+ try {
+ // Execute a query with a request tag.
+ const [albums] = await database.run({
+ sql: 'SELECT SingerId, AlbumId, AlbumTitle FROM Albums',
+ requestOptions: {requestTag: 'app=concert,env=dev,action=select'},
+ json: true,
+ });
+ albums.forEach(album => {
+ console.log(
+ `SingerId: ${album.SingerId}, AlbumId: ${album.AlbumId}, AlbumTitle: ${album.AlbumTitle}`
+ );
+ });
+ } catch (err) {
+ console.error('Error executing query with request tag:', err);
+ } finally {
+ await database.close();
+ }
+ }
+ queryTags();
+ // [END spanner_set_request_tag]
+}
+
+main(...process.argv.slice(2));
diff --git a/spanner/statement-timeout.js b/spanner/statement-timeout.js
new file mode 100644
index 0000000000..9bd061ccb8
--- /dev/null
+++ b/spanner/statement-timeout.js
@@ -0,0 +1,70 @@
+/**
+ * Copyright 2025 Google LLC
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// sample-metadata:
+// title: Executes a read/write transaction with statement timeout
+// usage: node statement-timeout.js
+
+'use strict';
+
+async function main(instanceId, databaseId, projectId) {
+ // [START spanner_set_statement_timeout]
+ /**
+ * TODO(developer): Uncomment the following lines before running the sample.
+ */
+ // const projectId = 'my-project-id';
+ // const instanceId = 'my-instance';
+ // const databaseId = 'my-database';
+
+ // Imports the Google Cloud client library
+ const {Spanner} = require('@google-cloud/spanner');
+
+ // Creates a client
+ const spanner = new Spanner({
+ projectId: projectId,
+ });
+
+ async function executeSqlWithTimeout() {
+ // Gets a reference to a Cloud Spanner instance and database.
+ const instance = spanner.instance(instanceId);
+ const database = instance.database(databaseId);
+
+ try {
+ await database.runTransactionAsync(async tx => {
+ // NOTE: You can use gaxOptions to set a custom timeout for a single RPC
+ // invocation. This timeout can however ONLY BE SHORTER than the default timeout
+ // for the RPC. If you set a timeout that is longer than the default timeout, then
+ // the default timeout will be used.
+ const query = {
+ sql: "INSERT INTO Singers (SingerId, FirstName, LastName) VALUES (110, 'George', 'Washington')",
+ gaxOptions: {
+ timeout: 60000, // 60 seconds timeout
+ },
+ };
+ const [, stats] = await tx.run(query);
+ console.log(`${stats.rowCountExact} record inserted.`);
+ await tx.commit();
+ });
+ } catch (err) {
+ console.error('ERROR:', err);
+ } finally {
+ await database.close();
+ }
+ }
+ await executeSqlWithTimeout();
+ // [END spanner_set_statement_timeout]
+}
+
+main(...process.argv.slice(2));
diff --git a/spanner/system-test/spanner.test.js b/spanner/system-test/spanner.test.js
index cdc76f49d5..b31bb249ea 100644
--- a/spanner/system-test/spanner.test.js
+++ b/spanner/system-test/spanner.test.js
@@ -1314,7 +1314,7 @@ describe('Autogenerated Admin Clients', () => {
});
// query with request tag
- it.skip('should execute a query with a request tag', async () => {
+ it('should execute a query with a request tag', async () => {
const output = execSync(
`${requestTagCommand} ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`
);
@@ -1322,7 +1322,7 @@ describe('Autogenerated Admin Clients', () => {
});
// read_write_transaction with transaction tag
- it.skip('should execute a read/write transaction with a transaction tag', async () => {
+ it('should execute a read/write transaction with a transaction tag', async () => {
const output = execSync(
`${transactionTagCommand} ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`
);
@@ -1330,7 +1330,7 @@ describe('Autogenerated Admin Clients', () => {
});
// read_write_transaction with transaction timeout
- it.skip('should execute a read/write transaction with a transaction timeout of 60 seconds', async () => {
+ it('should execute a read/write transaction with a transaction timeout of 60 seconds', async () => {
const output = execSync(
`${transactionTimeoutCommand} ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`
);
@@ -1338,7 +1338,7 @@ describe('Autogenerated Admin Clients', () => {
});
// read_write_transaction with statement timeout
- it.skip('should execute a read/write transaction with a statement timeout of 60 seconds', async () => {
+ it('should execute a read/write transaction with a statement timeout of 60 seconds', async () => {
const output = execSync(
`${statementTimeoutCommand} ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`
);
@@ -1346,7 +1346,7 @@ describe('Autogenerated Admin Clients', () => {
});
// add_json_column
- it.skip('should add a VenueDetails column to Venues example table', async () => {
+ it('should add a VenueDetails column to Venues example table', async () => {
const output = execSync(
`${datatypesCmd} addJsonColumn "${INSTANCE_ID}" "${DATABASE_ID}" ${PROJECT_ID}`
);
@@ -1362,7 +1362,7 @@ describe('Autogenerated Admin Clients', () => {
});
// update_data_with_json
- it.skip('should update rows in Venues example table to add data in VenueDetails column', async () => {
+ it('should update rows in Venues example table to add data in VenueDetails column', async () => {
const output = execSync(
`${datatypesCmd} updateWithJsonData ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`
);
@@ -1370,7 +1370,7 @@ describe('Autogenerated Admin Clients', () => {
});
// query_with_json_parameter
- it.skip('should use a JSON query parameter to query records from the Venues example table', async () => {
+ it('should use a JSON query parameter to query records from the Venues example table', async () => {
const output = execSync(
`${datatypesCmd} queryWithJsonParameter ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`
);
@@ -1378,7 +1378,7 @@ describe('Autogenerated Admin Clients', () => {
});
// isolation_level_option
- it.skip('should run read-write transaction with isolation level option set', () => {
+ it('should run read-write transaction with isolation level option set', () => {
const output = execSync(
`node repeatable-reads.js ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`
);
@@ -1396,7 +1396,7 @@ describe('Autogenerated Admin Clients', () => {
});
// read_lock_mode_option
- it.skip('should run read-write transaction with read lock mode option set', () => {
+ it('should run read-write transaction with read lock mode option set', () => {
const output = execSync(
`node read-lock-mode.js ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`
);
@@ -1414,7 +1414,7 @@ describe('Autogenerated Admin Clients', () => {
});
// add_and_drop_new_database_role
- it.skip('should add and drop new database roles', async () => {
+ it('should add and drop new database roles', async () => {
const output = execSync(
`node add-and-drop-new-database-role.js ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`
);
@@ -1430,7 +1430,7 @@ describe('Autogenerated Admin Clients', () => {
});
// read_data_with_database_role
- it.skip('should read data with database role', async () => {
+ it('should read data with database role', async () => {
const output = execSync(
`node read-data-with-database-role.js ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`
);
@@ -1441,7 +1441,7 @@ describe('Autogenerated Admin Clients', () => {
});
// get_database_roles
- it.skip('should list database roles', async () => {
+ it('should list database roles', async () => {
const output = execSync(
`node get-database-roles.js ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`
);
diff --git a/spanner/transaction-tag.js b/spanner/transaction-tag.js
new file mode 100644
index 0000000000..b92835ea45
--- /dev/null
+++ b/spanner/transaction-tag.js
@@ -0,0 +1,90 @@
+/**
+ * Copyright 2021 Google LLC
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// sample-metadata:
+// title: Executes a read/write transaction with transaction and request tags
+// usage: node transaction-tag.js
+
+'use strict';
+
+function main(instanceId, databaseId, projectId) {
+ // [START spanner_set_transaction_tag]
+ /**
+ * TODO(developer): Uncomment the following lines before running the sample.
+ */
+ // const projectId = 'my-project-id';
+ // const instanceId = 'my-instance';
+ // const databaseId = 'my-database';
+
+ // Imports the Google Cloud client library
+ const {Spanner} = require('@google-cloud/spanner');
+
+ // Creates a client
+ const spanner = new Spanner({
+ projectId: projectId,
+ });
+
+ async function transactionTag() {
+ // Gets a reference to a Cloud Spanner instance and database.
+ const instance = spanner.instance(instanceId);
+ const database = instance.database(databaseId);
+
+ // Run a transaction with a transaction tag that will automatically be
+ // included with each request in the transaction.
+ try {
+ await database.runTransactionAsync(
+ {requestOptions: {transactionTag: 'app=cart,env=dev'}},
+ async tx => {
+ // Set the request tag to "app=concert,env=dev,action=update".
+ // This request tag will only be set on this request.
+ await tx.runUpdate({
+ sql: 'UPDATE Venues SET Capacity = DIV(Capacity, 4) WHERE OutdoorVenue = false',
+ requestOptions: {requestTag: 'app=concert,env=dev,action=update'},
+ });
+ console.log('Updated capacity of all indoor venues to 1/4.');
+
+ await tx.runUpdate({
+ sql: `INSERT INTO Venues (VenueId, VenueName, Capacity, OutdoorVenue, LastUpdateTime)
+ VALUES (@venueId, @venueName, @capacity, @outdoorVenue, PENDING_COMMIT_TIMESTAMP())`,
+ params: {
+ venueId: 81,
+ venueName: 'Venue 81',
+ capacity: 1440,
+ outdoorVenue: true,
+ },
+ types: {
+ venueId: {type: 'int64'},
+ venueName: {type: 'string'},
+ capacity: {type: 'int64'},
+ outdoorVenue: {type: 'bool'},
+ },
+ requestOptions: {requestTag: 'app=concert,env=dev,action=update'},
+ });
+ console.log('Inserted new outdoor venue');
+
+ await tx.commit();
+ }
+ );
+ } catch (err) {
+ console.error('ERROR:', err);
+ } finally {
+ await database.close();
+ }
+ }
+ transactionTag();
+ // [END spanner_set_transaction_tag]
+}
+
+main(...process.argv.slice(2));
diff --git a/spanner/transaction-timeout.js b/spanner/transaction-timeout.js
new file mode 100644
index 0000000000..abd6d8c5e9
--- /dev/null
+++ b/spanner/transaction-timeout.js
@@ -0,0 +1,75 @@
+/**
+ * Copyright 2025 Google LLC
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// sample-metadata:
+// title: Executes a read/write transaction with transaction timeout
+// usage: node transaction-timeout.js
+
+'use strict';
+
+async function main(instanceId, databaseId, projectId) {
+ // [START spanner_transaction_timeout]
+ /**
+ * TODO(developer): Uncomment the following lines before running the sample.
+ */
+ // const projectId = 'my-project-id';
+ // const instanceId = 'my-instance';
+ // const databaseId = 'my-database';
+
+ // Imports the Google Cloud client library
+ const {Spanner} = require('@google-cloud/spanner');
+
+ // Creates a client
+ const spanner = new Spanner({
+ projectId: projectId,
+ });
+
+ async function executeTransactionWithTimeout() {
+ // Gets a reference to a Cloud Spanner instance and database.
+ const instance = spanner.instance(instanceId);
+ const database = instance.database(databaseId);
+
+ const options = {
+ timeout: 60000, // 60 seconds timeout
+ };
+
+ try {
+ await database.runTransactionAsync(options, async tx => {
+ const [results] = await tx.run(
+ 'SELECT SingerId, FirstName, LastName FROM Singers ORDER BY LastName, FirstName'
+ );
+ results.forEach(result => {
+ const jsonRow = result.toJSON();
+ console.log(
+ `SingerId: ${jsonRow.SingerId}, FirstName: ${jsonRow.FirstName}, LastName: ${jsonRow.LastName}`
+ );
+ });
+ const sql =
+ "INSERT INTO Singers (SingerId, FirstName, LastName) VALUES (100, 'George', 'Washington')";
+ const [rowCount] = await tx.runUpdate(sql);
+ console.log(`${rowCount} record inserted.`);
+ await tx.commit();
+ });
+ } catch (err) {
+ console.error('ERROR:', err);
+ } finally {
+ await database.close();
+ }
+ }
+ await executeTransactionWithTimeout();
+ // [END spanner_transaction_timeout]
+}
+
+main(...process.argv.slice(2));
diff --git a/spanner/update-instance-default-backup-schedule-type.js b/spanner/update-instance-default-backup-schedule-type.js
new file mode 100644
index 0000000000..603e3eda33
--- /dev/null
+++ b/spanner/update-instance-default-backup-schedule-type.js
@@ -0,0 +1,78 @@
+/**
+ * Copyright 2024 Google LLC
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// sample-metadata:
+// title: Updates an instance.
+// usage: node instance-update.js
+
+'use strict';
+
+function main(instanceId, projectId) {
+ async function updateInstanceDefaultBackupScheduleType() {
+ // [START spanner_update_instance_default_backup_schedule_type]
+ /**
+ * TODO(developer): Uncomment the following lines before running the sample.
+ */
+ // const projectId = 'my-project-id';
+ // const instanceId = 'my-instance';
+
+ // Imports the Google Cloud client library
+ const {Spanner, protos} = require('@google-cloud/spanner');
+
+ // Creates a client
+ const spanner = new Spanner({
+ projectId: projectId,
+ });
+ const instanceAdminClient = spanner.getInstanceAdminClient();
+
+ // Updates an instance
+ try {
+ const instancePath = instanceAdminClient.instancePath(
+ projectId,
+ instanceId
+ );
+
+ const [operation] = await instanceAdminClient.updateInstance({
+ instance: {
+ name: instancePath,
+ defaultBackupScheduleType:
+ protos.google.spanner.admin.instance.v1.Instance
+ .DefaultBackupScheduleType.AUTOMATIC, // optional
+ },
+ // Field mask specifying fields that should get updated in an Instance
+ fieldMask: {
+ paths: ['default_backup_schedule_type'],
+ },
+ });
+
+ await operation.promise();
+ const [metadata] = await instanceAdminClient.getInstance({
+ name: instancePath,
+ });
+ console.log(
+ `Instance ${instanceId} has been updated with the ${metadata.defaultBackupScheduleType}` +
+ ' default backup schedule type.'
+ );
+ } catch (err) {
+ console.error('ERROR:', err);
+ } finally {
+ spanner.close();
+ }
+ // [END spanner_update_instance_default_backup_schedule_type]
+ }
+ updateInstanceDefaultBackupScheduleType();
+}
+
+main(...process.argv.slice(2));
diff --git a/storagebatchoperations/cancelJob.js b/storagebatchoperations/cancelJob.js
new file mode 100644
index 0000000000..7e739fc076
--- /dev/null
+++ b/storagebatchoperations/cancelJob.js
@@ -0,0 +1,89 @@
+// Copyright 2025 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+'use strict';
+
+/**
+ * This application demonstrates how to perform basic operations on an Batch Operations
+ * instance with the Google Cloud Storage API.
+ *
+ * For more information, see the documentation at https://cloud.google.com/storage/docs/batch-operations/overview.
+ */
+
+function main(projectId, jobId) {
+ // [START storage_batch_cancel_job]
+
+ /**
+ * Cancel a batch job instance.
+ *
+ * The operation to cancel a batch job instance in Google Cloud Storage (GCS) is used to stop
+ * a running or queued asynchronous task that is currently processing a large number of GCS objects.
+ *
+ * @param {string} projectId The Google Cloud project ID.
+ * Example: 'my-project-id'
+ * @param {string} jobId A unique identifier for this job.
+ * Example: '94d60cc1-2d95-41c5-b6e3-ff66cd3532d5'
+ */
+
+ // Imports the Control library
+ const {StorageBatchOperationsClient} =
+ require('@google-cloud/storagebatchoperations').v1;
+
+ // Instantiates a client
+ const client = new StorageBatchOperationsClient();
+
+ async function cancelJob() {
+ const name = client.jobPath(projectId, 'global', jobId);
+
+ // Create the request
+ const request = {
+ name,
+ };
+
+ // Run request
+ try {
+ await client.cancelJob(request);
+ console.log(`Cancelled job: ${name}`);
+ } catch (error) {
+ // This might be expected if the job completed quickly or failed creation
+ console.error(
+ `Error canceling batch jobs for jobId ${jobId}:`,
+ error.message
+ );
+
+ if (error.code === 5) {
+ // NOT_FOUND (gRPC code 5) error can occur if the batch job does not exist.
+ console.error(
+ `Ensure the job '${jobId}' exists in project '${projectId}'.`
+ );
+ } else if (error.code === 9) {
+ // FAILED_PRECONDITION (gRPC code 9) can occur if the job is already being cancelled
+ // or is not in a RUNNING state that allows the cancel operation.
+ console.error(
+ `Batch job '${jobId}' may not be in a state that allows canceling (e.g., must be RUNNING).`
+ );
+ }
+ throw error;
+ }
+ }
+
+ cancelJob();
+ // [END storage_batch_cancel_job]
+}
+
+process.on('unhandledRejection', err => {
+ console.error(err.message);
+ process.exitCode = 1;
+});
+main(...process.argv.slice(2));
diff --git a/storagebatchoperations/createJob.js b/storagebatchoperations/createJob.js
new file mode 100644
index 0000000000..0f6eba4134
--- /dev/null
+++ b/storagebatchoperations/createJob.js
@@ -0,0 +1,93 @@
+// Copyright 2025 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+'use strict';
+
+/**
+ * This application demonstrates how to perform basic operations on an Batch Operations
+ * instance with the Google Cloud Storage API.
+ *
+ * For more information, see the documentation at https://cloud.google.com/storage/docs/batch-operations/overview.
+ */
+
+function main(projectId, jobId, bucketName, objectPrefix) {
+ // [START storage_batch_create_job]
+
+ /**
+ * Create a new batch job instance.
+ *
+ * @param {string} projectId Your Google Cloud project ID.
+ * Example: 'my-project-id'
+ * @param {string} bucketName The name of your GCS bucket.
+ * Example: 'your-gcp-bucket-name'
+ * @param {string} jobId A unique identifier for this job.
+ * Example: '94d60cc1-2d95-41c5-b6e3-ff66cd3532d5'
+ * @param {string} objectPrefix The prefix of objects to include in the operation.
+ * Example: 'prefix1'
+ */
+
+ // Imports the Control library
+ const {StorageBatchOperationsClient} =
+ require('@google-cloud/storagebatchoperations').v1;
+
+ // Instantiates a client
+ const client = new StorageBatchOperationsClient();
+
+ async function createJob() {
+ const parent = await client.locationPath(projectId, 'global');
+
+ // Create the request
+ const request = {
+ parent,
+ jobId,
+ job: {
+ bucketList: {
+ buckets: [
+ {
+ bucket: bucketName,
+ prefixList: {
+ includedObjectPrefixes: [objectPrefix],
+ },
+ },
+ ],
+ },
+ deleteObject: {
+ permanentObjectDeletionEnabled: false,
+ },
+ },
+ };
+
+ try {
+ // Run the request, which returns an Operation object
+ const [operation] = await client.createJob(request);
+ console.log(`Waiting for operation ${operation.name} to complete...`);
+
+ // Wait for the operation to complete and get the final resource
+ const [response] = await operation.promise();
+ console.log(`Created job: ${response.name}`);
+ } catch (error) {
+ console.error('Failed to create batch job:', error.message);
+ throw error;
+ }
+ }
+
+ createJob();
+ // [END storage_batch_create_job]
+}
+
+process.on('unhandledRejection', err => {
+ console.error(err.message);
+ process.exitCode = 1;
+});
+main(...process.argv.slice(2));
diff --git a/storagebatchoperations/deleteJob.js b/storagebatchoperations/deleteJob.js
new file mode 100644
index 0000000000..0934a2311f
--- /dev/null
+++ b/storagebatchoperations/deleteJob.js
@@ -0,0 +1,81 @@
+// Copyright 2025 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+'use strict';
+
+/**
+ * This application demonstrates how to perform basic operations on an Batch Operations
+ * instance with the Google Cloud Storage API.
+ *
+ * For more information, see the documentation at https://cloud.google.com/storage/docs/batch-operations/overview.
+ */
+
+function main(projectId, jobId) {
+ // [START storage_batch_delete_job]
+ /**
+ * Delete a batch job instance.
+ *
+ * This operation is used to remove a completed, failed, or cancelled Batch Operation
+ * job from the system's list. It is essentially a cleanup action.
+ *
+ * @param {string} projectId Your Google Cloud project ID.
+ * Example: 'my-project-id'
+ * @param {string} jobId A unique identifier for this job.
+ * Example: '94d60cc1-2d95-41c5-b6e3-ff66cd3532d5'
+ */
+
+ // Imports the Control library
+ const {StorageBatchOperationsClient} =
+ require('@google-cloud/storagebatchoperations').v1;
+
+ // Instantiates a client
+ const client = new StorageBatchOperationsClient();
+
+ async function deleteJob() {
+ const name = client.jobPath(projectId, 'global', jobId);
+
+ // Create the request
+ const request = {
+ name,
+ };
+
+ try {
+ // Run request
+ await client.deleteJob(request);
+ console.log(`Deleted job: ${name}`);
+ } catch (error) {
+ console.error(
+ `Error deleting batch jobs for jobId ${jobId}:`,
+ error.message
+ );
+
+ if (error.code === 5) {
+ // NOT_FOUND (gRPC code 5) error can occur if the batch job does not exist.
+ console.error(
+ `Ensure the job '${jobId}' exists in project '${projectId}'.`
+ );
+ }
+ throw error;
+ }
+ }
+
+ deleteJob();
+ // [END storage_batch_delete_job]
+}
+
+process.on('unhandledRejection', err => {
+ console.error(err.message);
+ process.exitCode = 1;
+});
+main(...process.argv.slice(2));
diff --git a/storagebatchoperations/getJob.js b/storagebatchoperations/getJob.js
new file mode 100644
index 0000000000..b81bf7c33c
--- /dev/null
+++ b/storagebatchoperations/getJob.js
@@ -0,0 +1,87 @@
+// Copyright 2025 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+'use strict';
+
+/**
+ * This application demonstrates how to perform basic operations on an Batch Operations
+ * instance with the Google Cloud Storage API.
+ *
+ * For more information, see the documentation at https://cloud.google.com/storage/docs/batch-operations/overview.
+ */
+
+function main(projectId, jobId) {
+ // [START storage_batch_get_job]
+ /**
+ * Retrieves details of a specific batch job instance.
+ *
+ * This operation is used to retrieve the detailed current state, execution status,
+ * and original configuration of a specific Batch Operation job that was previously
+ * created for a Google Cloud Storage bucket.
+ *
+ * @param {string} projectId Your Google Cloud project ID.
+ * Example: 'my-project-id'
+ * @param {string} jobId A unique identifier for this job.
+ * Example: '94d60cc1-2d95-41c5-b6e3-ff66cd3532d5'
+ */
+
+ // Imports the Control library
+ const {StorageBatchOperationsClient} =
+ require('@google-cloud/storagebatchoperations').v1;
+
+ // Instantiates a client
+ const client = new StorageBatchOperationsClient();
+
+ async function getJob() {
+ const name = client.jobPath(projectId, 'global', jobId);
+
+ // Create the request
+ const request = {
+ name,
+ };
+
+ try {
+ // Run request
+ const [response] = await client.getJob(request);
+ console.log(`Batch job details for '${jobId}':`);
+ console.log(`Name: ${response.name}`);
+ console.log(`State: ${response.state}`);
+ console.log(
+ `Create Time: ${new Date(response.createTime.seconds * 1000).toISOString()}`
+ );
+ } catch (error) {
+ console.error(
+ `Error retrieving batch jobs for jobId ${jobId}:`,
+ error.message
+ );
+
+ if (error.code === 5) {
+ // NOT_FOUND (gRPC code 5) error can occur if the batch job does not exist.
+ console.error(
+ `Ensure the job '${jobId}' exists in project '${projectId}'.`
+ );
+ }
+ throw error;
+ }
+ }
+
+ getJob();
+ // [END storage_batch_get_job]
+}
+
+process.on('unhandledRejection', err => {
+ console.error(err.message);
+ process.exitCode = 1;
+});
+main(...process.argv.slice(2));
diff --git a/storagebatchoperations/listJobs.js b/storagebatchoperations/listJobs.js
new file mode 100644
index 0000000000..a72dbd4878
--- /dev/null
+++ b/storagebatchoperations/listJobs.js
@@ -0,0 +1,83 @@
+// Copyright 2025 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+'use strict';
+
+/**
+ * This application demonstrates how to perform basic operations on an Batch Operations
+ * instance with the Google Cloud Storage API.
+ *
+ * For more information, see the documentation at https://cloud.google.com/storage/docs/batch-operations/overview.
+ */
+
+function main(projectId) {
+ // [START storage_batch_list_jobs]
+ /**
+ * Lists all Jobs operation is used to query the status and configuration of all
+ * Storage Batch Operations jobs within a specific Google Cloud project.
+ * This feature is essential for tasks that affect a large number of objects,
+ * such as changing storage classes, deleting objects, or running custom functions
+ * on object metadata.
+ *
+ * @param {string} projectId Your Google Cloud project ID.
+ * Example: 'my-project-id'
+ */
+
+ // Imports the Control library
+ const {StorageBatchOperationsClient} =
+ require('@google-cloud/storagebatchoperations').v1;
+
+ // Instantiates a client
+ const client = new StorageBatchOperationsClient();
+
+ async function listJobs() {
+ const parent = await client.locationPath(projectId, 'global');
+
+ // Create the request
+ const request = {
+ parent,
+ };
+
+ try {
+ // Run request. The response is an array where the first element is the list of jobs.
+ const [response] = await client.listJobs(request);
+ if (response && response.length > 0) {
+ console.log(
+ `Found ${response.length} batch jobs for project: ${projectId}`
+ );
+ for (const job of response) {
+ console.log(job.name);
+ }
+ } else {
+ // Case: Successful but empty list (No batch jobs found)
+ console.log(`No batch jobs found for project: ${projectId}.`);
+ }
+ } catch (error) {
+ console.error(
+ `Error listing batch jobs for project ${projectId}:`,
+ error.message
+ );
+ throw error;
+ }
+ }
+
+ listJobs();
+ // [END storage_batch_list_jobs]
+}
+
+process.on('unhandledRejection', err => {
+ console.error(err.message);
+ process.exitCode = 1;
+});
+main(...process.argv.slice(2));
diff --git a/storagebatchoperations/package.json b/storagebatchoperations/package.json
new file mode 100644
index 0000000000..e382054dda
--- /dev/null
+++ b/storagebatchoperations/package.json
@@ -0,0 +1,22 @@
+{
+ "name": "storage-batch-operations-samples",
+ "version": "0.0.1",
+ "author": "Google Inc.",
+ "license": "Apache-2.0",
+ "description": "Examples of how to utilize the @google-cloud/storagebatchoperations library.",
+ "scripts": {
+ "test": "c8 mocha -p -j 2 system-test --timeout 600000"
+ },
+ "repository": {
+ "type": "git",
+ "url": "https://github.com/GoogleCloudPlatform/nodejs-docs-samples.git"
+ },
+ "devDependencies": {
+ "@google-cloud/storage": "^7.17.1",
+ "@google-cloud/storagebatchoperations": "^0.1.0",
+ "c8": "^10.0.0",
+ "chai": "^4.5.0",
+ "mocha": "^10.7.0",
+ "uuid": "^10.0.0"
+ }
+ }
diff --git a/storagebatchoperations/quickstart.js b/storagebatchoperations/quickstart.js
new file mode 100644
index 0000000000..4a022041e1
--- /dev/null
+++ b/storagebatchoperations/quickstart.js
@@ -0,0 +1,88 @@
+// Copyright 2025 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+'use strict';
+
+/**
+ * This application demonstrates how to perform basic operations on an Batch Operations
+ * instance with the Google Cloud Storage API.
+ *
+ * For more information, see the documentation at https://cloud.google.com/storage/docs/batch-operations/overview.
+ */
+
+async function main(projectId, jobId) {
+ // [START storage_batch_quickstart]
+
+ // Imports the Google Cloud client library
+ const {StorageBatchOperationsClient} =
+ require('@google-cloud/storagebatchoperations').v1;
+
+ /**
+ * Retrieves details of a specific batch job instance.
+ *
+ * This operation is used to retrieve the detailed current state, execution status,
+ * and original configuration of a specific Batch Operation job that was previously
+ * created for a Google Cloud Storage bucket.
+ *
+ * @param {string} projectId Your Google Cloud project ID.
+ * Example: 'my-project-id'
+ * @param {string} jobId A unique identifier for this job.
+ * Example: '94d60cc1-2d95-41c5-b6e3-ff66cd3532d5'
+ */
+
+ // Creates a client
+ const client = new StorageBatchOperationsClient();
+
+ async function quickstart() {
+ const name = client.jobPath(projectId, 'global', jobId);
+
+ // Create the request
+ const request = {
+ name,
+ };
+
+ try {
+ // Run request
+ const [response] = await client.getJob(request);
+ console.log(`Batch job details for '${jobId}':`);
+ console.log(` Name: ${response.name}`);
+ console.log(` State: ${response.state}`);
+ console.log(
+ ` Create Time: ${new Date(response.createTime.seconds * 1000).toISOString()}`
+ );
+ } catch (error) {
+ console.error(
+ `Error retrieving batch jobs for jobId ${jobId}:`,
+ error.message
+ );
+
+ if (error.code === 5) {
+ // NOT_FOUND (gRPC code 5) error can occur if the batch job does not exist.
+ console.error(
+ `Ensure the job '${jobId}' exists in project '${projectId}'.`
+ );
+ }
+ throw error;
+ }
+ }
+ quickstart();
+ // [END storage_batch_quickstart]
+}
+
+main(...process.argv.slice(2));
+
+process.on('unhandledRejection', err => {
+ console.error(err.message);
+ process.exitCode = 1;
+});
diff --git a/storagebatchoperations/system-test/storagebatchoperations.test.js b/storagebatchoperations/system-test/storagebatchoperations.test.js
new file mode 100644
index 0000000000..608240bcc4
--- /dev/null
+++ b/storagebatchoperations/system-test/storagebatchoperations.test.js
@@ -0,0 +1,99 @@
+// Copyright 2025 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+const {Storage, Bucket} = require('@google-cloud/storage');
+const cp = require('child_process');
+const {assert} = require('chai');
+const {describe, it, before, after} = require('mocha');
+const uuid = require('uuid');
+
+const execSync = cmd => cp.execSync(cmd, {encoding: 'utf-8'});
+const projectId = process.env.GCLOUD_PROJECT;
+const bucketPrefix = 'sbo-samples';
+const bucketName = `${bucketPrefix}-${uuid.v4()}`;
+const storage = new Storage({projectId: projectId});
+const bucket = new Bucket(storage, bucketName);
+const jobId = uuid.v4();
+const jobName = `projects/${projectId}/locations/global/jobs/${jobId}`;
+
+describe('Batch Operations', () => {
+ before(async () => {
+ await storage.createBucket(bucketName, {
+ iamConfiguration: {
+ uniformBucketLevelAccess: {
+ enabled: true,
+ },
+ },
+ hierarchicalNamespace: {enabled: true},
+ });
+ });
+
+ after(async () => {
+ await bucket.delete();
+ });
+
+ it('should create a job', async () => {
+ const output = execSync(
+ `node createJob.js ${projectId} ${jobId} ${bucketName} objectPrefix`
+ );
+ assert.match(output, /Created job:/);
+ assert.match(output, new RegExp(jobName));
+ });
+
+ it('should list jobs', async () => {
+ const output = execSync(`node listJobs.js ${projectId}`);
+ assert.match(output, new RegExp(jobName));
+ });
+
+ it('should run quickstart', async () => {
+ const output = execSync(`node quickstart.js ${projectId} ${jobId}`);
+ const detailsHeader = `Batch job details for '${jobId}':`;
+ assert.match(output, new RegExp(detailsHeader));
+ assert.match(output, /Name:/);
+ assert.match(output, new RegExp(jobName));
+ assert.match(output, /State:/);
+ assert.match(output, /Create Time:/);
+ });
+
+ it('should get a job', async () => {
+ const output = execSync(`node getJob.js ${projectId} ${jobId}`);
+ const detailsHeader = `Batch job details for '${jobId}':`;
+ assert.match(output, new RegExp(detailsHeader));
+ assert.match(output, /Name:/);
+ assert.match(output, new RegExp(jobName));
+ assert.match(output, /State:/);
+ assert.match(output, /Create Time:/);
+ });
+
+ it('should cancel a job (or gracefully handle terminal state)', async () => {
+ try {
+ const output = execSync(`node cancelJob.js ${projectId} ${jobId}`);
+ assert.match(output, /Cancelled job:/);
+ assert.match(output, new RegExp(jobName));
+ } catch (error) {
+ // This might be expected if the job completed quickly or failed creation
+ const errorMessage = error.stderr.toString();
+ assert.match(
+ errorMessage,
+ /9 FAILED_PRECONDITION: Job run.* is in a terminal state and can not be changed./
+ );
+ }
+ });
+
+ it('should delete a job', async () => {
+ const output = execSync(`node deleteJob.js ${projectId} ${jobId}`);
+ assert.match(output, /Deleted job:/);
+ assert.match(output, new RegExp(jobName));
+ });
+});