@@ -21,7 +21,7 @@ function analyzeSentimentOfText (text) {
21
21
const Language = require ( '@google-cloud/language' ) ;
22
22
23
23
// Instantiates a client
24
- const language = Language ( ) ;
24
+ const language = Language ( { apiVersion : 'v1beta2' } ) ;
25
25
26
26
// The text to analyze, e.g. "Hello, world!"
27
27
// const text = 'Hello, world!';
@@ -32,9 +32,17 @@ function analyzeSentimentOfText (text) {
32
32
// Detects the sentiment of the document
33
33
document . detectSentiment ( )
34
34
. then ( ( results ) => {
35
- const sentiment = results [ 0 ] ;
36
- console . log ( `Score: ${ sentiment . score } ` ) ;
37
- console . log ( `Magnitude: ${ sentiment . magnitude } ` ) ;
35
+ const sentiment = results [ 1 ] . documentSentiment ;
36
+ console . log ( `Document sentiment:` )
37
+ console . log ( ` Score: ${ sentiment . score } ` ) ;
38
+ console . log ( ` Magnitude: ${ sentiment . magnitude } ` ) ;
39
+
40
+ const sentences = results [ 1 ] . sentences ;
41
+ sentences . forEach ( ( sentence ) => {
42
+ console . log ( `Sentence: ${ sentence . text . content } ` ) ;
43
+ console . log ( ` Score: ${ sentence . sentiment . score } ` ) ;
44
+ console . log ( ` Magnitude: ${ sentence . sentiment . magnitude } ` ) ;
45
+ } ) ;
38
46
} )
39
47
. catch ( ( err ) => {
40
48
console . error ( 'ERROR:' , err ) ;
@@ -49,7 +57,7 @@ function analyzeSentimentInFile (bucketName, fileName) {
49
57
const Storage = require ( '@google-cloud/storage' ) ;
50
58
51
59
// Instantiates the clients
52
- const language = Language ( ) ;
60
+ const language = Language ( { apiVersion : 'v1beta2' } ) ;
53
61
const storage = Storage ( ) ;
54
62
55
63
// The name of the bucket where the file resides, e.g. "my-bucket"
@@ -67,9 +75,17 @@ function analyzeSentimentInFile (bucketName, fileName) {
67
75
// Detects the sentiment of the document
68
76
document . detectSentiment ( )
69
77
. then ( ( results ) => {
70
- const sentiment = results [ 0 ] ;
71
- console . log ( `Score: ${ sentiment . score } ` ) ;
72
- console . log ( `Magnitude: ${ sentiment . magnitude } ` ) ;
78
+ const sentiment = results [ 1 ] . documentSentiment ;
79
+ console . log ( `Document sentiment:` )
80
+ console . log ( ` Score: ${ sentiment . score } ` ) ;
81
+ console . log ( ` Magnitude: ${ sentiment . magnitude } ` ) ;
82
+
83
+ const sentences = results [ 1 ] . sentences ;
84
+ sentences . forEach ( ( sentence ) => {
85
+ console . log ( `Sentence: ${ sentence . text . content } ` ) ;
86
+ console . log ( ` Score: ${ sentence . sentiment . score } ` ) ;
87
+ console . log ( ` Magnitude: ${ sentence . sentiment . magnitude } ` ) ;
88
+ } ) ;
73
89
} )
74
90
. catch ( ( err ) => {
75
91
console . error ( 'ERROR:' , err ) ;
@@ -83,7 +99,7 @@ function analyzeEntitiesOfText (text) {
83
99
const Language = require ( '@google-cloud/language' ) ;
84
100
85
101
// Instantiates a client
86
- const language = Language ( ) ;
102
+ const language = Language ( { apiVersion : 'v1beta2' } ) ;
87
103
88
104
// The text to analyze, e.g. "Hello, world!"
89
105
// const text = 'Hello, world!';
@@ -94,12 +110,15 @@ function analyzeEntitiesOfText (text) {
94
110
// Detects entities in the document
95
111
document . detectEntities ( )
96
112
. then ( ( results ) => {
97
- const entities = results [ 0 ] ;
113
+ const entities = results [ 1 ] . entities ;
98
114
99
115
console . log ( 'Entities:' ) ;
100
116
entities . forEach ( ( entity ) => {
101
117
console . log ( entity . name ) ;
102
118
console . log ( ` - Type: ${ entity . type } , Salience: ${ entity . salience } ` ) ;
119
+ if ( entity . metadata && entity . metadata . wikipedia_url ) {
120
+ console . log ( ` - Wikipedia URL: ${ entity . metadata . wikipedia_url } $` ) ;
121
+ }
103
122
} ) ;
104
123
} )
105
124
. catch ( ( err ) => {
@@ -115,7 +134,7 @@ function analyzeEntitiesInFile (bucketName, fileName) {
115
134
const Storage = require ( '@google-cloud/storage' ) ;
116
135
117
136
// Instantiates the clients
118
- const language = Language ( ) ;
137
+ const language = Language ( { apiVersion : 'v1beta2' } ) ;
119
138
const storage = Storage ( ) ;
120
139
121
140
// The name of the bucket where the file resides, e.g. "my-bucket"
@@ -139,6 +158,9 @@ function analyzeEntitiesInFile (bucketName, fileName) {
139
158
entities . forEach ( ( entity ) => {
140
159
console . log ( entity . name ) ;
141
160
console . log ( ` - Type: ${ entity . type } , Salience: ${ entity . salience } ` ) ;
161
+ if ( entity . metadata && entity . metadata . wikipedia_url ) {
162
+ console . log ( ` - Wikipedia URL: ${ entity . metadata . wikipedia_url } $` ) ;
163
+ }
142
164
} ) ;
143
165
} )
144
166
. catch ( ( err ) => {
@@ -153,7 +175,7 @@ function analyzeSyntaxOfText (text) {
153
175
const Language = require ( '@google-cloud/language' ) ;
154
176
155
177
// Instantiates a client
156
- const language = Language ( ) ;
178
+ const language = Language ( { apiVersion : 'v1beta2' } ) ;
157
179
158
180
// The text to analyze, e.g. "Hello, world!"
159
181
// const text = 'Hello, world!';
@@ -168,7 +190,8 @@ function analyzeSyntaxOfText (text) {
168
190
169
191
console . log ( 'Parts of speech:' ) ;
170
192
syntax . forEach ( ( part ) => {
171
- console . log ( `${ part . partOfSpeech . tag } :\t ${ part . text . content } ` ) ;
193
+ console . log ( `${ part . partOfSpeech . tag } : ${ part . text . content } ` ) ;
194
+ console . log ( `Morphology:` , part . partOfSpeech ) ;
172
195
} ) ;
173
196
} )
174
197
. catch ( ( err ) => {
@@ -184,7 +207,7 @@ function analyzeSyntaxInFile (bucketName, fileName) {
184
207
const Storage = require ( '@google-cloud/storage' ) ;
185
208
186
209
// Instantiates the clients
187
- const language = Language ( ) ;
210
+ const language = Language ( { apiVersion : 'v1beta2' } ) ;
188
211
const storage = Storage ( ) ;
189
212
190
213
// The name of the bucket where the file resides, e.g. "my-bucket"
@@ -206,7 +229,8 @@ function analyzeSyntaxInFile (bucketName, fileName) {
206
229
207
230
console . log ( 'Parts of speech:' ) ;
208
231
syntax . forEach ( ( part ) => {
209
- console . log ( `${ part . partOfSpeech . tag } :\t ${ part . text . content } ` ) ;
232
+ console . log ( `${ part . partOfSpeech . tag } : ${ part . text . content } ` ) ;
233
+ console . log ( `Morphology:` , part . partOfSpeech ) ;
210
234
} ) ;
211
235
} )
212
236
. catch ( ( err ) => {
@@ -215,6 +239,87 @@ function analyzeSyntaxInFile (bucketName, fileName) {
215
239
// [END language_syntax_file]
216
240
}
217
241
242
+ function analyzeEntitySentimentOfText ( text ) {
243
+ // [START language_entity_sentiment_string]
244
+ // Imports the Google Cloud client library
245
+ const Language = require ( '@google-cloud/language' ) . v1beta2 ( ) ;
246
+
247
+ // Instantiates a client
248
+ const language = Language . languageServiceClient ( ) ;
249
+
250
+ // The text to analyze, e.g. "Hello, world!"
251
+ // const text = 'Hello, world!';
252
+
253
+ // Configure a request containing a string
254
+ const request = {
255
+ document : {
256
+ type : 'PLAIN_TEXT' ,
257
+ content : text
258
+ }
259
+ } ;
260
+
261
+ // Detects sentiment of entities in the document
262
+ language . analyzeEntitySentiment ( request )
263
+ . then ( ( results ) => {
264
+ const entities = results [ 0 ] . entities ;
265
+
266
+ console . log ( `Entities and sentiments:` )
267
+ entities . forEach ( ( entity ) => {
268
+ console . log ( ` Name: ${ entity . name } ` ) ;
269
+ console . log ( ` Type: ${ entity . type } ` ) ;
270
+ console . log ( ` Score: ${ entity . sentiment . score } ` ) ;
271
+ console . log ( ` Magnitude: ${ entity . sentiment . magnitude } ` ) ;
272
+ } ) ;
273
+ } )
274
+ . catch ( ( err ) => {
275
+ console . error ( 'ERROR:' , err ) ;
276
+ } ) ;
277
+ // [END language_entity_sentiment_string]
278
+ }
279
+
280
+ function analyzeEntitySentimentInFile ( bucketName , fileName ) {
281
+ // [START language_entity_sentiment_file]
282
+ // Imports the Google Cloud client libraries
283
+ const Language = require ( '@google-cloud/language' ) . v1beta2 ( ) ;
284
+ const Storage = require ( '@google-cloud/storage' ) ;
285
+
286
+ // Instantiates the clients
287
+ const language = Language . languageServiceClient ( ) ;
288
+ const storage = Storage ( ) ;
289
+
290
+ // The name of the bucket where the file resides, e.g. "my-bucket"
291
+ // const bucketName = 'my-bucket';
292
+
293
+ // The name of the file to analyze, e.g. "file.txt"
294
+ // const fileName = 'file.txt';
295
+
296
+ // Configure a request containing a string
297
+ const request = {
298
+ document : {
299
+ type : 'PLAIN_TEXT' ,
300
+ gcsContentUri : `gs://${ bucketName } /${ fileName } `
301
+ }
302
+ } ;
303
+
304
+ // Detects sentiment of entities in the document
305
+ language . analyzeEntitySentiment ( request )
306
+ . then ( ( results ) => {
307
+ const entities = results [ 0 ] . entities ;
308
+
309
+ console . log ( `Entities and sentiments:` )
310
+ entities . forEach ( ( entity ) => {
311
+ console . log ( ` Name: ${ entity . name } ` ) ;
312
+ console . log ( ` Type: ${ entity . type } ` ) ;
313
+ console . log ( ` Score: ${ entity . sentiment . score } ` ) ;
314
+ console . log ( ` Magnitude: ${ entity . sentiment . magnitude } ` ) ;
315
+ } ) ;
316
+ } )
317
+ . catch ( ( err ) => {
318
+ console . error ( 'ERROR:' , err ) ;
319
+ } ) ;
320
+ // [END language_entity_sentiment_file]
321
+ }
322
+
218
323
require ( `yargs` )
219
324
. demand ( 1 )
220
325
. command (
@@ -253,12 +358,26 @@ require(`yargs`)
253
358
{ } ,
254
359
( opts ) => analyzeSyntaxInFile ( opts . bucketName , opts . fileName )
255
360
)
361
+ . command (
362
+ `entity-sentiment-text <text>` ,
363
+ `Detects sentiment of the entities in a string.` ,
364
+ { } ,
365
+ ( opts ) => analyzeEntitySentimentOfText ( opts . text )
366
+ )
367
+ . command (
368
+ `entity-sentiment-file <bucketName> <fileName>` ,
369
+ `Detects sentiment of the entities in a file in Google Cloud Storage.` ,
370
+ { } ,
371
+ ( opts ) => analyzeEntitySentimentInFile ( opts . bucketName , opts . fileName )
372
+ )
256
373
. example ( `node $0 sentiment-text "President Obama is speaking at the White House."` )
257
374
. example ( `node $0 sentiment-file my-bucket file.txt` , `Detects sentiment in gs://my-bucket/file.txt` )
258
375
. example ( `node $0 entities-text "President Obama is speaking at the White House."` )
259
376
. example ( `node $0 entities-file my-bucket file.txt` , `Detects entities in gs://my-bucket/file.txt` )
260
377
. example ( `node $0 syntax-text "President Obama is speaking at the White House."` )
261
378
. example ( `node $0 syntax-file my-bucket file.txt` , `Detects syntax in gs://my-bucket/file.txt` )
379
+ . example ( `node $0 entity-sentiment-text "President Obama is speaking at the White House."` )
380
+ . example ( `node $0 entity-sentiment-file my-bucket file.txt` , `Detects sentiment of entities in gs://my-bucket/file.txt` )
262
381
. wrap ( 120 )
263
382
. recommendCommands ( )
264
383
. epilogue ( `For more information, see https://cloud.google.com/natural-language/docs` )
0 commit comments