Skip to content

Commit d457f72

Browse files
committed
Merge pull request #275 from stephenplusplus/spp--fix-storage-tests
remove bucket env var req. & stop deleting all buckets
2 parents c10073f + 17d8f77 commit d457f72

File tree

4 files changed

+78
-81
lines changed

4 files changed

+78
-81
lines changed

CONTRIBUTING.md

-1
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,6 @@ $ npm test
2020
To run the regression tests, first create and configure a project in the Google Developers Console following the [instructions on how to run gcloud-node][elsewhere]. After that, set the following environment variables:
2121

2222
- **GCLOUD_TESTS_PROJECT_ID**: Developers Console project's ID (e.g. bamboo-shift-455)
23-
- **GCLOUD_TESTS_BUCKET_NAME**: The name of the bucket to use for the Cloud Storage API tests
2423
- **GCLOUD_TESTS_KEY**: The path to the JSON key file.
2524

2625
Install the [gcloud command-line tool][gcloudcli] to your machine and use it to create the indexes used in the datastore regression tests with indexes found in `regression/data/index/yaml`:

regression/bigquery.js

+10-14
Original file line numberDiff line numberDiff line change
@@ -24,16 +24,18 @@ var Dataset = require('../lib/bigquery/dataset');
2424
var env = require('./env');
2525
var fs = require('fs');
2626
var Job = require('../lib/bigquery/job');
27+
var uuid = require('node-uuid');
2728

2829
var gcloud = require('../lib')(env);
2930
var bigquery = gcloud.bigquery();
30-
var bucket = gcloud.storage().bucket();
31+
var storage = gcloud.storage();
3132

3233
describe('BigQuery', function() {
3334
var DATASET_ID = 'testDatasetId';
3435
var dataset;
3536
var TABLE_ID = 'myKittens';
3637
var table;
38+
var bucket;
3739

3840
var query = 'SELECT url FROM [publicdata:samples.github_nested] LIMIT 100';
3941

@@ -82,23 +84,17 @@ describe('BigQuery', function() {
8284
});
8385
},
8486

85-
// Create a Bucket, if necessary.
87+
// Create a Bucket.
8688
function(next) {
87-
bucket.getMetadata(function(err) {
88-
if (!err) {
89-
next();
89+
var bucketName = 'gcloud-test-bucket-temp-' + uuid.v1();
90+
storage.createBucket(bucketName, function(err, b) {
91+
if (err) {
92+
next(err);
9093
return;
9194
}
9295

93-
gcloud.storage().createBucket(bucket.name, function(err, b) {
94-
if (err) {
95-
next(err);
96-
return;
97-
}
98-
99-
bucket = b;
100-
next();
101-
});
96+
bucket = b;
97+
next();
10298
});
10399
}
104100
], done);

regression/env.js

+3-6
Original file line numberDiff line numberDiff line change
@@ -16,9 +16,7 @@
1616

1717
'use strict';
1818

19-
if (!process.env.GCLOUD_TESTS_PROJECT_ID &&
20-
!process.env.GCLOUD_TESTS_BUCKET_NAME &&
21-
!process.env.GCLOUD_TESTS_KEY) {
19+
if (!process.env.GCLOUD_TESTS_PROJECT_ID && !process.env.GCLOUD_TESTS_KEY) {
2220
var error = [
2321
'To run the regression tests, you need to set some environment variables.',
2422
'Please check the Contributing guide for instructions.'
@@ -27,7 +25,6 @@ if (!process.env.GCLOUD_TESTS_PROJECT_ID &&
2725
}
2826

2927
module.exports = {
30-
projectId: process.env.GCLOUD_TESTS_PROJECT_ID,
31-
bucketName: process.env.GCLOUD_TESTS_BUCKET_NAME,
32-
keyFilename: process.env.GCLOUD_TESTS_KEY
28+
keyFilename: process.env.GCLOUD_TESTS_KEY,
29+
projectId: process.env.GCLOUD_TESTS_PROJECT_ID
3330
};

regression/storage.js

+65-60
Original file line numberDiff line numberDiff line change
@@ -24,10 +24,13 @@ var crypto = require('crypto');
2424
var fs = require('fs');
2525
var request = require('request');
2626
var tmp = require('tmp');
27+
var uuid = require('node-uuid');
2728

2829
var env = require('./env.js');
2930
var storage = require('../lib/storage')(env);
3031

32+
var BUCKET_NAME = generateBucketName();
33+
3134
var files = {
3235
logo: {
3336
path: 'regression/data/CloudPlatform_128px_Retina.png'
@@ -37,30 +40,6 @@ var files = {
3740
}
3841
};
3942

40-
function setHash(obj, file, done) {
41-
var hash = crypto.createHash('md5');
42-
fs.createReadStream(obj[file].path)
43-
.on('data', hash.update.bind(hash))
44-
.on('end', function() {
45-
obj[file].hash = hash.digest('base64');
46-
done();
47-
});
48-
}
49-
50-
function deleteBucketsAndFiles(callback) {
51-
storage.getBuckets(function(err, buckets) {
52-
if (err) {
53-
callback(err);
54-
return;
55-
}
56-
async.map(buckets, function(bucket, next) {
57-
deleteFiles(bucket, function() {
58-
bucket.delete(next);
59-
});
60-
}, callback);
61-
});
62-
}
63-
6443
function deleteFiles(bucket, callback) {
6544
bucket.getFiles(function(err, files) {
6645
if (err) {
@@ -73,55 +52,81 @@ function deleteFiles(bucket, callback) {
7352
});
7453
}
7554

55+
function generateBucketName() {
56+
return 'gcloud-test-bucket-temp-' + uuid.v1();
57+
}
58+
59+
function setHash(obj, file, done) {
60+
var hash = crypto.createHash('md5');
61+
fs.createReadStream(obj[file].path)
62+
.on('data', hash.update.bind(hash))
63+
.on('end', function() {
64+
obj[file].hash = hash.digest('base64');
65+
done();
66+
});
67+
}
68+
7669
describe('storage', function() {
7770
var bucket;
7871

7972
before(function(done) {
80-
deleteBucketsAndFiles(function() {
81-
storage.createBucket('new' + Date.now(), function(err, newBucket) {
82-
if (err) {
83-
done(err);
84-
return;
85-
}
86-
bucket = newBucket;
87-
done();
88-
});
73+
storage.createBucket(BUCKET_NAME, function(err, newBucket) {
74+
assert.ifError(err);
75+
bucket = newBucket;
76+
done();
8977
});
9078
});
9179

92-
after(deleteBucketsAndFiles);
93-
94-
describe('creating a bucket', function() {
95-
it('should create a bucket', function(done) {
96-
storage.createBucket('a-new-bucket', function(err, bucket) {
97-
assert.ifError(err);
98-
bucket.delete(done);
99-
});
80+
after(function(done) {
81+
deleteFiles(bucket, function(err) {
82+
assert.ifError(err);
83+
bucket.delete(done);
10084
});
10185
});
10286

10387
describe('getting buckets', function() {
88+
var bucketsToCreate = [
89+
generateBucketName(), generateBucketName(), generateBucketName()
90+
];
91+
92+
before(function(done) {
93+
async.map(bucketsToCreate, storage.createBucket.bind(storage), done);
94+
});
95+
96+
after(function(done) {
97+
async.parallel(bucketsToCreate.map(function(bucket) {
98+
return function(done) {
99+
storage.bucket(bucket).delete(done);
100+
};
101+
}), done);
102+
});
103+
104104
it('should get buckets', function(done) {
105-
var bucketsToCreate = [
106-
'new' + Date.now(),
107-
'newer' + Date.now(),
108-
'newest' + Date.now()
109-
];
110-
async.map(
111-
bucketsToCreate,
112-
storage.createBucket.bind(storage),
113-
function(err) {
114-
assert.ifError(err);
115-
storage.getBuckets(function(err, buckets) {
116-
assert.equal(
117-
buckets.filter(function(bucket) {
118-
return bucketsToCreate.indexOf(bucket.name) > -1;
119-
}).length,
120-
bucketsToCreate.length
121-
);
122-
done();
123-
});
105+
storage.getBuckets(getBucketsHandler);
106+
107+
var createdBuckets = [];
108+
var retries = 0;
109+
var MAX_RETRIES = 2;
110+
111+
function getBucketsHandler(err, buckets, nextQuery) {
112+
buckets.forEach(function(bucket) {
113+
if (bucketsToCreate.indexOf(bucket.name) > -1) {
114+
createdBuckets.push(bucket);
115+
}
124116
});
117+
118+
if (createdBuckets.length < bucketsToCreate.length && nextQuery) {
119+
retries++;
120+
121+
if (retries <= MAX_RETRIES) {
122+
storage.getBuckets(nextQuery, getBucketsHandler);
123+
return;
124+
}
125+
}
126+
127+
assert.equal(createdBuckets.length, bucketsToCreate.length);
128+
done();
129+
}
125130
});
126131
});
127132

0 commit comments

Comments
 (0)