Skip to content

Commit 2a2aa3f

Browse files
datastore: update to v1beta3
This reverts commit f686198.
1 parent b299aca commit 2a2aa3f

30 files changed

+3298
-4024
lines changed

.jshintignore

+2
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
system-test/data/*
2+
test/testdata/*

README.md

+8-6
Original file line numberDiff line numberDiff line change
@@ -136,33 +136,35 @@ var gcloud = require('gcloud');
136136
// Authenticating on a per-API-basis. You don't need to do this if you auth on a
137137
// global basis (see Authentication section above).
138138

139-
var dataset = gcloud.datastore.dataset({
139+
var datastore = gcloud.datastore({
140140
projectId: 'my-project',
141141
keyFilename: '/path/to/keyfile.json'
142142
});
143143

144-
dataset.get(dataset.key(['Product', 'Computer']), function(err, entity) {
144+
var key = datastore.key(['Product', 'Computer']);
145+
146+
datastore.get(key, function(err, entity) {
145147
console.log(err || entity);
146148
});
147149

148-
// Save data to your dataset.
150+
// Save data to Datastore.
149151
var blogPostData = {
150152
title: 'How to make the perfect homemade pasta',
151153
author: 'Andrew Chilton',
152154
isDraft: true
153155
};
154156

155-
var blogPostKey = dataset.key('BlogPost');
157+
var blogPostKey = datastore.key('BlogPost');
156158

157-
dataset.save({
159+
datastore.save({
158160
key: blogPostKey,
159161
data: blogPostData
160162
}, function(err) {
161163
// `blogPostKey` has been updated with an ID so you can do more operations
162164
// with it, such as an update.
163165
blogPostData.isDraft = false;
164166

165-
dataset.save({
167+
datastore.save({
166168
key: blogPostKey,
167169
data: blogPostData
168170
}, function(err) {

docs/toc.json

-3
Original file line numberDiff line numberDiff line change
@@ -100,9 +100,6 @@
100100
"title": "Datastore",
101101
"type": "datastore",
102102
"nav": [{
103-
"title": "Dataset",
104-
"type": "datastore/dataset"
105-
}, {
106103
"title": "Query",
107104
"type": "datastore/query"
108105
}, {

docs/troubleshooting.md

-7
Original file line numberDiff line numberDiff line change
@@ -58,10 +58,3 @@ async.eachLimit(subscriptions, PARALLEL_LIMIT, deleteSubscription, function(err)
5858
This will only allow 10 at a time to go through, making it easier on the API to keep up with your requests.
5959

6060
Reference Issue: [#1101](https://github.com/GoogleCloudPlatform/gcloud-node/issues/1101)
61-
62-
63-
## I cannot connect to Datastore from a Compute Engine instance.
64-
65-
Currently, the version of Datastore our library supports (v1beta2) requires not only the `cloud-platform` auth scope, but the `userinfo.email` scope as well. When you create a VM, be sure to select both of these scopes (possibly referred to as "Cloud Datastore" and "User info") in order to access the API from gcloud-node without receiving a 401 error.
66-
67-
Reference Issue: [#1169](https://github.com/GoogleCloudPlatform/gcloud-node/issues/1169#issuecomment-198428431)

lib/common/grpc-service.js

+8-49
Original file line numberDiff line numberDiff line change
@@ -20,13 +20,11 @@
2020

2121
'use strict';
2222

23-
var camelize = require('camelize');
2423
var googleProtoFiles = require('google-proto-files');
2524
var grpc = require('grpc');
2625
var is = require('is');
2726
var nodeutil = require('util');
2827
var path = require('path');
29-
var snakeize = require('snakeize');
3028

3129
/**
3230
* @type {module:common/service}
@@ -169,11 +167,15 @@ function GrpcService(config, options) {
169167

170168
for (var protoService in protoServices) {
171169
var protoFilePath = protoServices[protoService];
170+
var grpcOpts = {
171+
binaryAsBase64: true,
172+
convertFieldsToCamelCase: true
173+
};
172174

173175
this.protos[protoService] = grpc.load({
174176
root: rootDir,
175177
file: path.relative(rootDir, protoFilePath)
176-
}).google[service][apiVersion];
178+
}, 'proto', grpcOpts).google[service][apiVersion];
177179
}
178180
}
179181

@@ -239,14 +241,7 @@ GrpcService.prototype.request = function(protoOpts, reqOpts, callback) {
239241
grpcOpts.deadline = new Date(Date.now() + protoOpts.timeout);
240242
}
241243

242-
// snakeize and camelize are used to transform camelCase request options to
243-
// snake_case. This is what ProtoBuf.js (via gRPC) expects. Similarly, the
244-
// response is in snake_case, which is why we use camelize to return it to
245-
// camelCase.
246-
//
247-
// An option will be added to gRPC to allow us to skip this step:
248-
// https://github.com/grpc/grpc/issues/5005
249-
service[protoOpts.method](snakeize(reqOpts), function(err, resp) {
244+
service[protoOpts.method](reqOpts, grpcOpts, function(err, resp) {
250245
if (err) {
251246
if (HTTP_ERROR_CODE_MAP[err.code]) {
252247
var httpError = HTTP_ERROR_CODE_MAP[err.code];
@@ -257,44 +252,8 @@ GrpcService.prototype.request = function(protoOpts, reqOpts, callback) {
257252
return;
258253
}
259254

260-
callback(null, GrpcService.convertBuffers_(camelize(resp)));
261-
}, null, grpcOpts);
262-
};
263-
264-
/**
265-
* Iterate over an object, finding anything that resembles a Buffer, then
266-
* convert it to a base64 string representation.
267-
*
268-
* @todo Replace this function: https://github.com/grpc/grpc/issues/5006
269-
*
270-
* @private
271-
*
272-
* @param {*} data - An object or array to iterate over.
273-
* @return {*} - The converted object.
274-
*/
275-
GrpcService.convertBuffers_ = function(data) {
276-
if (is.array(data)) {
277-
return data.map(GrpcService.convertBuffers_);
278-
}
279-
280-
if (is.object(data)) {
281-
for (var prop in data) {
282-
if (data.hasOwnProperty(prop)) {
283-
var value = data[prop];
284-
285-
if (Buffer.isBuffer(value)) {
286-
data[prop] = value.toString('base64');
287-
} else if (GrpcService.isBufferLike_(value)) {
288-
var arrayValue = GrpcService.objToArr_(value);
289-
data[prop] = new Buffer(arrayValue).toString('base64');
290-
} else {
291-
data[prop] = GrpcService.convertBuffers_(value);
292-
}
293-
}
294-
}
295-
}
296-
297-
return data;
255+
callback(null, resp);
256+
});
298257
};
299258

300259
/**

0 commit comments

Comments
 (0)