17
17
18
18
require ( `../../system-test/_setup` ) ;
19
19
20
+ const client = require ( `@google-cloud/monitoring` ) . v3 ( ) . metricServiceClient ( ) ;
20
21
const path = require ( `path` ) ;
21
22
22
23
const cmd = `node metrics.js` ;
23
24
const cwd = path . join ( __dirname , `..` ) ;
24
25
const customMetricId = `custom.googleapis.com/stores/daily_sales` ;
25
26
const computeMetricId = `compute.googleapis.com/instance/cpu/utilization` ;
27
+ const filter = `metric.type="${ computeMetricId } "` ;
26
28
const projectId = process . env . GCLOUD_PROJECT ;
27
29
const resourceId = `cloudsql_database` ;
28
30
@@ -50,6 +52,11 @@ test.serial(`should get a metric descriptor`, async (t) => {
50
52
} ) . start ( ) ;
51
53
} ) ;
52
54
55
+ test . serial ( `should write time series data` , async ( t ) => {
56
+ const output = await runAsync ( `${ cmd } write` , cwd ) ;
57
+ t . true ( output . includes ( `Done writing time series data.` ) ) ;
58
+ } ) ;
59
+
53
60
test . serial ( `should delete a metric descriptor` , async ( t ) => {
54
61
const output = await runAsync ( `${ cmd } delete ${ customMetricId } ` , cwd ) ;
55
62
t . true ( output . includes ( `Deleted ${ customMetricId } ` ) ) ;
@@ -64,3 +71,109 @@ test(`should get a monitored resource descriptor`, async (t) => {
64
71
const output = await runAsync ( `${ cmd } get-resource ${ resourceId } ` , cwd ) ;
65
72
t . true ( output . includes ( `Type: ${ resourceId } ` ) ) ;
66
73
} ) ;
74
+
75
+ test ( `should read time series data` , async ( t ) => {
76
+ const [ timeSeries ] = await client . listTimeSeries ( {
77
+ name : client . projectPath ( projectId ) ,
78
+ filter : filter ,
79
+ interval : {
80
+ startTime : {
81
+ // Limit results to the last 20 minutes
82
+ seconds : ( Date . now ( ) / 1000 ) - ( 60 * 20 )
83
+ } ,
84
+ endTime : {
85
+ seconds : Date . now ( ) / 1000
86
+ }
87
+ }
88
+ } ) ;
89
+ const output = await runAsync ( `${ cmd } read '${ filter } '` , cwd ) ;
90
+ timeSeries . forEach ( ( data ) => {
91
+ t . true ( output . includes ( `${ data . metric . labels . instance_name } :` ) ) ;
92
+ data . points . forEach ( ( point ) => {
93
+ t . true ( output . includes ( JSON . stringify ( point . value ) ) ) ;
94
+ } ) ;
95
+ } ) ;
96
+ } ) ;
97
+
98
+ test ( `should read time series data fields` , async ( t ) => {
99
+ const [ timeSeries ] = await client . listTimeSeries ( {
100
+ name : client . projectPath ( projectId ) ,
101
+ filter : filter ,
102
+ interval : {
103
+ startTime : {
104
+ // Limit results to the last 20 minutes
105
+ seconds : ( Date . now ( ) / 1000 ) - ( 60 * 20 )
106
+ } ,
107
+ endTime : {
108
+ seconds : Date . now ( ) / 1000
109
+ }
110
+ } ,
111
+ // Don't return time series data, instead just return information about
112
+ // the metrics that match the filter
113
+ view : `HEADERS`
114
+ } ) ;
115
+ const output = await runAsync ( `${ cmd } read-fields` , cwd ) ;
116
+ t . true ( output . includes ( `Found data points for the following instances:` ) ) ;
117
+ timeSeries . forEach ( ( data ) => {
118
+ t . true ( output . includes ( data . metric . labels . instance_name ) ) ;
119
+ } ) ;
120
+ } ) ;
121
+
122
+ test ( `should read time series data aggregated` , async ( t ) => {
123
+ const [ timeSeries ] = await client . listTimeSeries ( {
124
+ name : client . projectPath ( projectId ) ,
125
+ filter : filter ,
126
+ interval : {
127
+ startTime : {
128
+ // Limit results to the last 20 minutes
129
+ seconds : ( Date . now ( ) / 1000 ) - ( 60 * 20 )
130
+ } ,
131
+ endTime : {
132
+ seconds : Date . now ( ) / 1000
133
+ }
134
+ } ,
135
+ // Aggregate results per matching instance
136
+ aggregation : {
137
+ alignmentPeriod : {
138
+ seconds : 600
139
+ } ,
140
+ perSeriesAligner : `ALIGN_MEAN`
141
+ }
142
+ } ) ;
143
+ const output = await runAsync ( `${ cmd } read-aggregate` , cwd ) ;
144
+ t . true ( output . includes ( `CPU utilization:` ) ) ;
145
+ timeSeries . forEach ( ( data ) => {
146
+ t . true ( output . includes ( data . metric . labels . instance_name ) ) ;
147
+ t . true ( output . includes ( ` Now: ${ data . points [ 0 ] . value . doubleValue } ` ) ) ;
148
+ t . true ( output . includes ( ` 10 min ago: ${ data . points [ 1 ] . value . doubleValue } ` ) ) ;
149
+ } ) ;
150
+ } ) ;
151
+
152
+ test ( `should read time series data reduced` , async ( t ) => {
153
+ const [ timeSeries ] = await client . listTimeSeries ( {
154
+ name : client . projectPath ( projectId ) ,
155
+ filter : filter ,
156
+ interval : {
157
+ startTime : {
158
+ // Limit results to the last 20 minutes
159
+ seconds : ( Date . now ( ) / 1000 ) - ( 60 * 20 )
160
+ } ,
161
+ endTime : {
162
+ seconds : Date . now ( ) / 1000
163
+ }
164
+ } ,
165
+ // Aggregate results per matching instance
166
+ aggregation : {
167
+ alignmentPeriod : {
168
+ seconds : 600
169
+ } ,
170
+ crossSeriesReducer : `REDUCE_MEAN` ,
171
+ perSeriesAligner : `ALIGN_MEAN`
172
+ }
173
+ } ) ;
174
+ const reductions = timeSeries [ 0 ] . points ;
175
+ const output = await runAsync ( `${ cmd } read-reduce` , cwd ) ;
176
+ t . true ( output . includes ( `Average CPU utilization across all GCE instances:` ) ) ;
177
+ t . true ( output . includes ( ` Last 10 min: ${ reductions [ 0 ] . value . doubleValue } ` ) ) ;
178
+ t . true ( output . includes ( ` 10-20 min ago: ${ reductions [ 0 ] . value . doubleValue } ` ) ) ;
179
+ } ) ;
0 commit comments