Skip to content

Commit b2d4063

Browse files
authored
feat: implement manual methods for objectLocalization and productSearch on v1p3beta1 (#128)
1 parent adb9c91 commit b2d4063

File tree

2 files changed

+160
-0
lines changed

2 files changed

+160
-0
lines changed

packages/google-cloud-vision/src/helpers.js

+117
Original file line numberDiff line numberDiff line change
@@ -828,5 +828,122 @@ module.exports = apiVersion => {
828828
_createSingleFeatureMethod(features.WEB_DETECTION)
829829
);
830830

831+
/**
832+
* Annotate a single image with the result from Product Search.
833+
*
834+
* @see v1p3beta1.ImageAnnotatorClient#batchAnnotateImages
835+
* @see google.cloud.vision.v1p3beta1.AnnotateImageRequest
836+
*
837+
* @method v1p3beta1.ImageAnnotatorClient#productSearch
838+
* @param {object|string|Buffer} request A representation of the request
839+
* being sent to the Vision API. This is an
840+
* {@link google.cloud.vision.v1p3beta1.AnnotateImageRequest AnnotateImageRequest}.
841+
* For simple cases, you may also send a string (the URL or filename of
842+
* the image) or a buffer (the image itself).
843+
* @param {object} request.image A dictionary-like object representing the
844+
* image. This should have a single key (`source`, `content`).
845+
*
846+
* If the key is `source`, the value should be another object containing
847+
* `imageUri` or `filename` as a key and a string as a value.
848+
*
849+
* If the key is `content`, the value should be a Buffer.
850+
* @param {object} [callOptions] Optional parameters. You can override the
851+
* default settings for this call, e.g, timeout, retries, paginations,
852+
* @param {function(?Error, ?object)} [callback] The function which will be
853+
* called with the result of the API call.
854+
*
855+
* The second parameter to the callback is an object representing
856+
* [BatchAnnotateImagesResponse]{@link BatchAnnotateImagesResponse}.
857+
* @returns {Promise} The promise which resolves to an array. The first
858+
* element of the array is an object representing
859+
* [BatchAnnotateImagesResponse]{@link BatchAnnotateImagesResponse}.
860+
* The promise has a method named "cancel" which cancels the ongoing API
861+
* call.
862+
*
863+
* @example
864+
* const vision = require('@google-cloud/vision').v1p3beta1;
865+
* const client = new vision.ImageAnnotatorClient();
866+
*
867+
* const request = {
868+
* image: {
869+
* source: {imageUri: 'gs://path/to/image.jpg'}
870+
* }
871+
* };
872+
*
873+
* client
874+
* .webDetection(request)
875+
* .then(response => {
876+
* // doThingsWith(response);
877+
* })
878+
* .catch(err => {
879+
* console.error(err);
880+
* });
881+
*/
882+
if (features.PRODUCT_SEARCH !== undefined) {
883+
methods.productSearch = promisify(
884+
_createSingleFeatureMethod(features.PRODUCT_SEARCH)
885+
);
886+
}
887+
888+
/**
889+
* Annotate a single image with localization vectors.
890+
*
891+
* @see v1p3beta1.ImageAnnotatorClient#batchAnnotateImages
892+
* @see google.cloud.vision.v1p3beta1.AnnotateImageRequest
893+
*
894+
* @method v1p3beta1.ImageAnnotatorClient#objectLocalization
895+
* @param {object|string|Buffer} request A representation of the request
896+
* being sent to the Vision API. This is an
897+
* {@link google.cloud.vision.v1.AnnotateImageRequest AnnotateImageRequest}.
898+
* For simple cases, you may also send a string (the URL or filename of
899+
* the image) or a buffer (the image itself).
900+
* @param {object} request.image A dictionary-like object representing the
901+
* image. This should have a single key (`source`, `content`).
902+
*
903+
* If the key is `source`, the value should be another object containing
904+
* `imageUri` or `filename` as a key and a string as a value.
905+
*
906+
* If the key is `content`, the value should be a Buffer.
907+
* @param {object} [callOptions] Optional parameters. You can override the
908+
* default settings for this call, e.g, timeout, retries, paginations,
909+
* etc. See [gax.CallOptions]{@link https://googleapis.github.io/gax-nodejs/global.html#CallOptions}
910+
* for the details.
911+
* @param {function(?Error, ?object)} [callback] The function which will be
912+
* called with the result of the API call.
913+
*
914+
* The second parameter to the callback is an object representing
915+
* [BatchAnnotateImagesResponse]{@link BatchAnnotateImagesResponse}.
916+
* @returns {Promise} The promise which resolves to an array. The first
917+
* element of the array is an object representing
918+
* [BatchAnnotateImagesResponse]{@link BatchAnnotateImagesResponse}.
919+
* The promise has a method named "cancel" which cancels the ongoing API
920+
* call.
921+
*
922+
* @example
923+
* // Object localization is only available in v1p3beta1.
924+
* const vision = require('@google-cloud/vision').v1p3beta1;
925+
* const client = new vision.ImageAnnotatorClient();
926+
*
927+
* const request = {
928+
* image: {
929+
* source: {imageUri: 'gs://path/to/image.jpg'}
930+
* }
931+
* };
932+
*
933+
* client
934+
* .objectLocalization(request)
935+
* .then(response => {
936+
* // doThingsWith(response);
937+
* })
938+
* .catch(err => {
939+
* console.error(err);
940+
* });
941+
*/
942+
if (features.OBJECT_LOCALIZATION !== undefined) {
943+
methods.objectLocalization = promisify(
944+
_createSingleFeatureMethod(features.OBJECT_LOCALIZATION)
945+
);
946+
}
947+
831948
return methods;
832949
};

packages/google-cloud-vision/test/helpers.test.js

+43
Original file line numberDiff line numberDiff line change
@@ -478,5 +478,48 @@ describe('Vision helper methods', () => {
478478
assert(ex.message.indexOf('Setting explicit') > -1);
479479
});
480480
});
481+
482+
it('creates and promisify methods that are available in certain versions', () => {
483+
const client = new vision.v1p3beta1.ImageAnnotatorClient();
484+
let request = {
485+
image: {
486+
source: {
487+
imageUri: 'https://cloud.google.com/vision/docs/images/bicycle.jpg',
488+
},
489+
},
490+
};
491+
let batchAnnotate = sandbox.stub(client, 'batchAnnotateImages');
492+
batchAnnotate.callsArgWith(2, undefined, {
493+
responses: [
494+
{
495+
localizedObjectAnnotations: [{dummy: 'response'}],
496+
},
497+
],
498+
});
499+
500+
client
501+
.productSearch(request)
502+
.then(r => {
503+
let response = r[0];
504+
505+
assert.deepEqual(response, {
506+
localizedObjectAnnotations: [{dummy: 'response'}],
507+
});
508+
509+
assert(batchAnnotate.callCount === 1);
510+
assert(batchAnnotate.calledWith({requests: [request]}));
511+
})
512+
.catch(assert.ifError);
513+
});
514+
515+
it('throws an error if trying to invoke a method not available in current version', () => {
516+
// Use v1 version of client.
517+
const client = new vision.v1.ImageAnnotatorClient(CREDENTIALS);
518+
519+
assert.throws(() => {
520+
// Object localization is only available for v1p3beta1.
521+
client.objectLocalization({});
522+
}, 'TypeError: client.objectLocalization is not a function');
523+
});
481524
});
482525
});

0 commit comments

Comments
 (0)