Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
23 commits
Select commit Hold shift + click to select a range
ab30342
feat(storagecontrol): Add samples for Storage Anywhere Cache
thiyaguk09 Sep 3, 2025
d5ec75f
add testcase
thiyaguk09 Sep 3, 2025
3b805ea
lint fix
thiyaguk09 Sep 3, 2025
46ee0b2
fix: test case
thiyaguk09 Sep 3, 2025
a39cd56
fix: remove cacheName
thiyaguk09 Sep 4, 2025
6afd1d2
skip test case
thiyaguk09 Sep 4, 2025
e3c1483
test case remove projectId
thiyaguk09 Sep 5, 2025
a0abee8
addressing review comments
thiyaguk09 Sep 9, 2025
4056e10
index on anywhere-cache-samples: e83ba77b addressing review comments
thiyaguk09 Oct 8, 2025
76a7024
feat(storage-control): Improve Anywhere Cache API samples
thiyaguk09 Oct 8, 2025
2c026b1
test(storagecontrol): Enhance Anywhere Cache test assertions and cove…
thiyaguk09 Oct 8, 2025
41e2ade
Fix: Remove duplicate call to disableAnywhereCache
thiyaguk09 Oct 22, 2025
13967e9
Merge branch 'main' into anywhere-cache-samples
thiyaguk09 Oct 28, 2025
7b3d9d2
Merge branch 'main' into anywhere-cache-samples
glasnt Nov 4, 2025
9fa9e73
Merge branch 'main' into anywhere-cache-samples
thiyaguk09 Nov 18, 2025
228b467
Merge branch 'main' into anywhere-cache-samples
thiyaguk09 Dec 10, 2025
8deb05a
Merge branch 'main' into anywhere-cache-samples
iennae Dec 19, 2025
2e110ef
Merge branch 'main' into anywhere-cache-samples
iennae Jan 7, 2026
b224619
Merge branch 'main' into anywhere-cache-samples
chandra-siri Mar 12, 2026
9ed3cf4
Merge branch 'main' into anywhere-cache-samples
thiyaguk09 Mar 30, 2026
108c825
Merge branch 'main' into anywhere-cache-samples
thiyaguk09 Apr 6, 2026
3a7ad1c
feat(spanner): migrate batch 2 core samples and tests (#4274)
angelcaamal Apr 6, 2026
b270bfd
Merge branch 'main' into anywhere-cache-samples
thiyaguk09 Apr 7, 2026
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
125 changes: 125 additions & 0 deletions spanner/batch-write.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,125 @@
/**
* Copyright 2024 Google LLC
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

// sample-metadata:
// title: Batch Write
// usage: node batch-write.js <INSTANCE_ID> <DATABASE_ID> <PROJECT_ID>

'use strict';

async function main(
instanceId = 'my-instance',
databaseId = 'my-database',
projectId = 'my-project-id'
) {
// [START spanner_batch_write_at_least_once]

// Imports the Google Cloud client library
const {Spanner, MutationGroup} = require('@google-cloud/spanner');

/**
* TODO(developer): Uncomment the following lines before running the sample.
*/
// const instanceId = 'my-instance';
// const databaseId = 'my-database';
// const projectId = 'my-project-id';

// Creates a client
const spanner = new Spanner({
projectId: projectId,
});

// Gets a reference to a Cloud Spanner instance and database
const instance = spanner.instance(instanceId);
const database = instance.database(databaseId);

// Create Mutation Groups
/**
* Related mutations should be placed in a group, such as insert mutations for both a parent and a child row.
* A group must contain related mutations.
* Please see {@link https://cloud.google.com/spanner/docs/reference/rpc/google.spanner.v1#google.spanner.v1.BatchWriteRequest.MutationGroup}
* for more details and examples.
*/
const mutationGroup1 = new MutationGroup();
mutationGroup1.insert('Singers', {
SingerId: 1,
FirstName: 'Scarlet',
LastName: 'Terry',
});

const mutationGroup2 = new MutationGroup();
mutationGroup2.insert('Singers', {
SingerId: 2,
FirstName: 'Marc',
});
mutationGroup2.insert('Singers', {
SingerId: 3,
FirstName: 'Catalina',
LastName: 'Smith',
});
mutationGroup2.insert('Albums', {
AlbumId: 1,
SingerId: 2,
AlbumTitle: 'Total Junk',
});
mutationGroup2.insert('Albums', {
AlbumId: 2,
SingerId: 3,
AlbumTitle: 'Go, Go, Go',
});

const options = {
transactionTag: 'batch-write-tag',
};

try {
await new Promise((resolve, reject) => {
database
.batchWriteAtLeastOnce([mutationGroup1, mutationGroup2], options)
.on('error', err => {
reject(err);
})
.on('data', response => {
// Check the response code of each response to determine whether the mutation group(s) were applied successfully.
if (response.status.code === 0) {
console.log(
`Mutation group indexes ${
response.indexes
}, have been applied with commit timestamp ${Spanner.timestamp(
response.commitTimestamp
).toJSON()}`
);
}
// Mutation groups that fail to commit trigger a response with a non-zero status code.
else {
console.log(
`Mutation group indexes ${response.indexes}, could not be applied with error code ${response.status.code}, and error message ${response.status.message}`
);
}
})
.on('end', () => {
console.log('Request completed successfully');
resolve();
});
});
} catch (err) {
console.log(err);
} finally {
await database.close();
}
// [END spanner_batch_write_at_least_once]
}

main(...process.argv.slice(2));
149 changes: 149 additions & 0 deletions spanner/batch.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,149 @@
// Copyright 2018 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

'use strict';

async function createAndExecuteQueryPartitions(
instanceId,
databaseId,
projectId
) {
// [START spanner_batch_client]
// Imports the Google Cloud client library
const {Spanner} = require('@google-cloud/spanner');

/**
* TODO(developer): Uncomment the following lines before running the sample.
*/
// const projectId = 'my-project-id';
// const instanceId = 'my-instance';
// const databaseId = 'my-database';

// Creates a client
const spanner = new Spanner({
projectId: projectId,
});

// Gets a reference to a Cloud Spanner instance and database
const instance = spanner.instance(instanceId);
const database = instance.database(databaseId);

let transaction;

try {
[transaction] = await database.createBatchTransaction();

const query = {
sql: 'SELECT * FROM Singers',
// DataBoost option is an optional parameter which can also be used for partition read
// and query to execute the request via spanner independent compute resources.
dataBoostEnabled: true,
};

// A Partition object is serializable and can be used from a different process.
const [partitions] = await transaction.createQueryPartitions(query);
console.log(`Successfully created ${partitions.length} query partitions.`);

let rowCount = 0;
const promises = partitions.map(partition =>
transaction.execute(partition).then(results => {
const rows = results[0].map(row => row.toJSON());
rowCount += rows.length;
})
);
await Promise.all(promises);
console.log(`Successfully received ${rowCount} from executed partitions.`);
} catch (err) {
console.error('Error executing query partitions:', err);
} finally {
if (transaction) {
transaction.close();
}
await database.close();
}
// [END spanner_batch_client]
}

async function executePartition(
instanceId,
databaseId,
identifier,
partition,
projectId
) {
// [START spanner_batch_execute_partitions]
// Imports the Google Cloud client library
const {Spanner} = require('@google-cloud/spanner');

/**
* TODO(developer): Uncomment the following lines before running the sample.
*/
// const projectId = 'my-project-id';
// const instanceId = 'my-instance';
// const databaseId = 'my-database';
// const identifier = {};
// const partition = {};

// Creates a client
const spanner = new Spanner({
projectId: projectId,
});

// Gets a reference to a Cloud Spanner instance and database
const instance = spanner.instance(instanceId);
const database = instance.database(databaseId);
const transaction = database.batchTransaction(identifier);

const [rows] = await transaction.execute(partition);
console.log(`Successfully received ${rows.length} from executed partition.`);
// [END spanner_batch_execute_partitions]
}

require('yargs')
.demandCommand(1)
.command(
'create-and-execute-query-partitions <instanceName> <databaseName> <projectId>',
'Creates query partitions and executes them.',
{},
opts =>
createAndExecuteQueryPartitions(
opts.instanceName,
opts.databaseName,
opts.projectId
)
)
.command(
'execute-partition <instanceName> <databaseName> <identifier> <partition> <projectId>',
'Executes a partition.',
{},
opts =>
executePartition(
opts.instanceName,
opts.databaseName,
JSON.parse(opts.identifier),
JSON.parse(opts.partition),
opts.projectId
)
)
.example(
'node $0 create-and-execute-query-partitions "my-instance" "my-database" "my-project-id"'
)
.example(
'node $0 execute-partition "my-instance" "my-database" "{}" "{}" "my-project-id"'
)
.wrap(120)
.recommendCommands()
.epilogue('For more information, see https://cloud.google.com/spanner/docs')
.strict()
.help().argv;
Loading