22
22
import com .google .api .client .http .HttpRequestInitializer ;
23
23
import com .google .api .client .json .JsonFactory ;
24
24
import com .google .api .client .json .jackson2 .JacksonFactory ;
25
- import com .google .api .services .language .v1beta1 .CloudNaturalLanguage ;
26
- import com .google .api .services .language .v1beta1 .CloudNaturalLanguageScopes ;
27
- import com .google .api .services .language .v1beta1 .model .AnalyzeEntitiesRequest ;
28
- import com .google .api .services .language .v1beta1 .model .AnalyzeEntitiesResponse ;
29
- import com .google .api .services .language .v1beta1 .model .AnalyzeSentimentRequest ;
30
- import com .google .api .services .language .v1beta1 .model .AnalyzeSentimentResponse ;
31
- import com .google .api .services .language .v1beta1 .model .AnnotateTextRequest ;
32
- import com .google .api .services .language .v1beta1 .model .AnnotateTextResponse ;
33
- import com .google .api .services .language .v1beta1 .model .Document ;
34
- import com .google .api .services .language .v1beta1 .model .Entity ;
35
- import com .google .api .services .language .v1beta1 .model .Features ;
36
- import com .google .api .services .language .v1beta1 .model .Sentiment ;
37
- import com .google .api .services .language .v1beta1 .model .Token ;
25
+ import com .google .api .services .language .v1 .CloudNaturalLanguage ;
26
+ import com .google .api .services .language .v1 .CloudNaturalLanguageScopes ;
27
+ import com .google .api .services .language .v1 .model .AnalyzeEntitiesRequest ;
28
+ import com .google .api .services .language .v1 .model .AnalyzeEntitiesResponse ;
29
+ import com .google .api .services .language .v1 .model .AnalyzeSentimentRequest ;
30
+ import com .google .api .services .language .v1 .model .AnalyzeSentimentResponse ;
31
+ import com .google .api .services .language .v1 .model .AnalyzeSyntaxRequest ;
32
+ import com .google .api .services .language .v1 .model .AnalyzeSyntaxResponse ;
33
+ import com .google .api .services .language .v1 .model .AnnotateTextRequest ;
34
+ import com .google .api .services .language .v1 .model .AnnotateTextResponse ;
35
+ import com .google .api .services .language .v1 .model .Document ;
36
+ import com .google .api .services .language .v1 .model .Entity ;
37
+ import com .google .api .services .language .v1 .model .EntityMention ;
38
+ import com .google .api .services .language .v1 .model .Features ;
39
+ import com .google .api .services .language .v1 .model .Sentiment ;
40
+ import com .google .api .services .language .v1 .model .Token ;
38
41
39
42
import java .io .IOException ;
40
43
import java .io .PrintStream ;
@@ -99,6 +102,13 @@ public static void printEntities(PrintStream out, List<Entity> entities) {
99
102
out .printf ("\t Metadata: %s = %s\n " , metadata .getKey (), metadata .getValue ());
100
103
}
101
104
}
105
+ if (entity .getMentions () != null ) {
106
+ for (EntityMention mention : entity .getMentions ()) {
107
+ for (Map .Entry <String , Object > mentionSetMember : mention .entrySet ()) {
108
+ out .printf ("\t Mention: %s = %s\n " , mentionSetMember .getKey (), mentionSetMember .getValue ());
109
+ }
110
+ }
111
+ }
102
112
}
103
113
}
104
114
@@ -112,7 +122,7 @@ public static void printSentiment(PrintStream out, Sentiment sentiment) {
112
122
}
113
123
out .println ("Found sentiment." );
114
124
out .printf ("\t Magnitude: %.3f\n " , sentiment .getMagnitude ());
115
- out .printf ("\t Polarity : %.3f\n " , sentiment .getPolarity ());
125
+ out .printf ("\t Score : %.3f\n " , sentiment .getScore ());
116
126
}
117
127
118
128
public static void printSyntax (PrintStream out , List <Token > tokens ) {
@@ -127,6 +137,17 @@ public static void printSyntax(PrintStream out, List<Token> tokens) {
127
137
out .printf ("\t BeginOffset: %d\n " , token .getText ().getBeginOffset ());
128
138
out .printf ("Lemma: %s\n " , token .getLemma ());
129
139
out .printf ("PartOfSpeechTag: %s\n " , token .getPartOfSpeech ().getTag ());
140
+ out .printf ("\t Aspect: %s\n " ,token .getPartOfSpeech ().getAspect ());
141
+ out .printf ("\t Case: %s\n " , token .getPartOfSpeech ().getCase ());
142
+ out .printf ("\t Form: %s\n " , token .getPartOfSpeech ().getForm ());
143
+ out .printf ("\t Gender: %s\n " ,token .getPartOfSpeech ().getGender ());
144
+ out .printf ("\t Mood: %s\n " , token .getPartOfSpeech ().getMood ());
145
+ out .printf ("\t Number: %s\n " , token .getPartOfSpeech ().getNumber ());
146
+ out .printf ("\t Person: %s\n " , token .getPartOfSpeech ().getPerson ());
147
+ out .printf ("\t Proper: %s\n " , token .getPartOfSpeech ().getProper ());
148
+ out .printf ("\t Reciprocity: %s\n " , token .getPartOfSpeech ().getReciprocity ());
149
+ out .printf ("\t Tense: %s\n " , token .getPartOfSpeech ().getTense ());
150
+ out .printf ("\t Voice: %s\n " , token .getPartOfSpeech ().getVoice ());
130
151
out .println ("DependencyEdge" );
131
152
out .printf ("\t HeadTokenIndex: %d\n " , token .getDependencyEdge ().getHeadTokenIndex ());
132
153
out .printf ("\t Label: %s\n " , token .getDependencyEdge ().getLabel ());
@@ -195,15 +216,13 @@ public Sentiment analyzeSentiment(String text) throws IOException {
195
216
* Gets {@link Token}s from the string {@code text}.
196
217
*/
197
218
public List <Token > analyzeSyntax (String text ) throws IOException {
198
- AnnotateTextRequest request =
199
- new AnnotateTextRequest ()
219
+ AnalyzeSyntaxRequest request =
220
+ new AnalyzeSyntaxRequest ()
200
221
.setDocument (new Document ().setContent (text ).setType ("PLAIN_TEXT" ))
201
- .setFeatures (new Features ().setExtractSyntax (true ))
202
222
.setEncodingType ("UTF16" );
203
- CloudNaturalLanguage .Documents .AnnotateText analyze =
204
- languageApi .documents ().annotateText (request );
205
-
206
- AnnotateTextResponse response = analyze .execute ();
223
+ CloudNaturalLanguage .Documents .AnalyzeSyntax analyze =
224
+ languageApi .documents ().analyzeSyntax (request );
225
+ AnalyzeSyntaxResponse response = analyze .execute ();
207
226
return response .getTokens ();
208
227
}
209
228
}
0 commit comments