-
Notifications
You must be signed in to change notification settings - Fork 2k
Expand file tree
/
Copy pathquickstart.test.js
More file actions
92 lines (79 loc) · 2.76 KB
/
quickstart.test.js
File metadata and controls
92 lines (79 loc) · 2.76 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
// Copyright 2017 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
'use strict';
const {assert} = require('chai');
const {describe, it, beforeEach, afterEach} = require('mocha');
const cp = require('child_process');
const {v4} = require('uuid');
const dataproc = require('@google-cloud/dataproc');
const {Storage} = require('@google-cloud/storage');
const myUuid = v4();
const region = 'us-central1';
const clusterName = `node-qs-test-${myUuid}`;
const bucketName = `node-dataproc-qs-test-${myUuid}`;
const projectId = process.env.PROJECT_ID;
const jobFileName = 'sum.py';
const jobFilePath = `gs://${bucketName}/${jobFileName}`;
const sortCode =
'import pyspark\n' +
'sc = pyspark.SparkContext()\n' +
'rdd = sc.parallelize((1,2,3,4,5))\n' +
'sum = rdd.reduce(lambda x, y: x + y)\n';
const clusterClient = new dataproc.v1.ClusterControllerClient({
apiEndpoint: `${region}-dataproc.googleapis.com`,
});
const storage = new Storage();
const execSync = cmd =>
cp.execSync(cmd, {
encoding: 'utf-8',
});
const {delay} = require('./util');
describe('execute the quickstart', () => {
beforeEach(async () => {
const [bucket] = await storage.createBucket(bucketName);
await bucket.file(jobFileName).save(sortCode);
});
it('should execute the quickstart', async function () {
this.retries(4);
await delay(this.test);
const stdout = execSync(
`node quickstart.js "${projectId}" "${region}" "${clusterName}" "${jobFilePath}"`
);
assert.match(stdout, /Cluster created successfully/);
assert.match(stdout, /Job finished successfully/);
assert.match(stdout, /successfully deleted/);
});
afterEach(async () => {
try {
await storage.bucket(bucketName).file(jobFileName).delete();
await storage.bucket(bucketName).delete();
const [clusters] = await clusterClient.listClusters({
projectId: projectId,
region: region,
});
for (const cluster of clusters) {
if (cluster.clusterName === clusterName) {
await clusterClient.deleteCluster({
projectId: projectId,
region: region,
clusterName: clusterName,
});
break;
}
}
} catch (err) {
console.error('Cannot clean up resources:', err);
}
});
});