Skip to content
This repository was archived by the owner on Oct 3, 2023. It is now read-only.

Commit b74cefa

Browse files
authored
Update examples with latest OC version and format using semistandard (#371)
1 parent 608943d commit b74cefa

File tree

3 files changed

+68
-67
lines changed

3 files changed

+68
-67
lines changed

examples/stats/exporter/package.json

+8-5
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,10 @@
33
"version": "0.0.8",
44
"description": "OpenCensus is a toolkit for collecting application performance and behavior data.",
55
"repository": "census-instrumentation/opencensus-node",
6-
"scripts": {},
6+
"scripts": {
7+
"lint": "semistandard *.js",
8+
"fix": "semistandard --fix"
9+
},
710
"keywords": [
811
"opencensus",
912
"nodejs",
@@ -20,11 +23,11 @@
2023
"access": "public"
2124
},
2225
"devDependencies": {
23-
"gts": "^0.9.0"
26+
"semistandard": "^13.0.1"
2427
},
2528
"dependencies": {
26-
"@opencensus/core": "^0.0.8",
27-
"@opencensus/exporter-prometheus": "^0.0.8",
28-
"@opencensus/exporter-stackdriver": "^0.0.8"
29+
"@opencensus/core": "^0.0.9",
30+
"@opencensus/exporter-prometheus": "^0.0.9",
31+
"@opencensus/exporter-stackdriver": "^0.0.9"
2932
}
3033
}

examples/stats/exporter/prometheus.js

+29-29
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
/**
1+
/**
22
* Copyright 2018, OpenCensus Authors
33
*
44
* Licensed under the Apache License, Version 2.0 (the "License");
@@ -19,11 +19,11 @@
1919
* OpenCensus to Prometheus.
2020
*/
2121

22-
const { globalStats, MeasureUnit, AggregationType, TagMap } = require("@opencensus/core");
23-
const { PrometheusStatsExporter } = require("@opencensus/exporter-prometheus");
22+
const { globalStats, MeasureUnit, AggregationType, TagMap } = require('@opencensus/core');
23+
const { PrometheusStatsExporter } = require('@opencensus/exporter-prometheus');
2424

25-
const fs = require("fs");
26-
const readline = require("readline");
25+
const fs = require('fs');
26+
const readline = require('readline');
2727

2828
// [START setup_exporter]
2929
// Enable OpenCensus exporters to export metrics to Prometheus Monitoring.
@@ -39,61 +39,61 @@ globalStats.registerExporter(exporter);
3939

4040
// The latency in milliseconds
4141
const mLatencyMs = globalStats.createMeasureDouble(
42-
"repl/latency",
42+
'repl/latency',
4343
MeasureUnit.MS,
44-
"The latency in milliseconds per REPL loop"
44+
'The latency in milliseconds per REPL loop'
4545
);
4646

4747
// Counts/groups the lengths of lines read in.
4848
const mLineLengths = globalStats.createMeasureInt64(
49-
"repl/line_lengths",
49+
'repl/line_lengths',
5050
MeasureUnit.BYTE,
51-
"The distribution of line lengths"
51+
'The distribution of line lengths'
5252
);
5353

5454
// Create a stream to read our file
55-
const stream = fs.createReadStream("./test.txt");
55+
const stream = fs.createReadStream('./test.txt');
5656

5757
// Create an interface to read and process our file line by line
5858
const lineReader = readline.createInterface({ input: stream });
5959

60-
const methodKey = { name: "method" };
61-
const statusKey = { name: "status" };
60+
const methodKey = { name: 'method' };
61+
const statusKey = { name: 'status' };
6262
const tagKeys = [methodKey, statusKey];
6363

6464
// Create & Register the view.
6565
const latencyView = globalStats.createView(
66-
"demo/latency",
66+
'demo/latency',
6767
mLatencyMs,
6868
AggregationType.DISTRIBUTION,
6969
tagKeys,
70-
"The distribution of the repl latencies",
70+
'The distribution of the repl latencies',
7171
// Latency in buckets:
72-
// [>=0ms, >=25ms, >=50ms, >=75ms, >=100ms, >=200ms, >=400ms, >=600ms, >=800ms, >=1s, >=2s, >=4s, >=6s]
73-
[0, 25, 50, 75, 100, 200, 400, 600, 800, 1000, 2000, 4000, 6000]
72+
// [>=25ms, >=50ms, >=75ms, >=100ms, >=200ms, >=400ms, >=600ms, >=800ms, >=1s, >=2s, >=4s, >=6s]
73+
[25, 50, 75, 100, 200, 400, 600, 800, 1000, 2000, 4000, 6000]
7474
);
7575
globalStats.registerView(latencyView);
7676

7777
// Create & Register the view.
7878
const lineCountView = globalStats.createView(
79-
"demo/lines_in",
79+
'demo/lines_in',
8080
mLineLengths,
8181
AggregationType.COUNT,
8282
tagKeys,
83-
"The number of lines from standard input"
83+
'The number of lines from standard input'
8484
);
8585
globalStats.registerView(lineCountView);
8686

8787
// Create & Register the view.
8888
const lineLengthView = globalStats.createView(
89-
"demo/line_lengths",
89+
'demo/line_lengths',
9090
mLineLengths,
9191
AggregationType.DISTRIBUTION,
9292
tagKeys,
93-
"Groups the lengths of keys in buckets",
93+
'Groups the lengths of keys in buckets',
9494
// Bucket Boudaries:
95-
// [>=0B, >=5B, >=10B, >=15B, >=20B, >=40B, >=60B, >=80, >=100B, >=200B, >=400, >=600, >=800, >=1000]
96-
[0, 5, 10, 15, 20, 40, 60, 80, 100, 200, 400, 600, 800, 1000]
95+
// [>=5B, >=10B, >=15B, >=20B, >=40B, >=60B, >=80, >=100B, >=200B, >=400, >=600, >=800, >=1000]
96+
[5, 10, 15, 20, 40, 60, 80, 100, 200, 400, 600, 800, 1000]
9797
);
9898
globalStats.registerView(lineLengthView);
9999

@@ -102,7 +102,7 @@ let [_, startNanoseconds] = process.hrtime();
102102
let endNanoseconds;
103103

104104
// REPL is the read, evaluate, print and loop
105-
lineReader.on("line", function(line) {
105+
lineReader.on('line', function (line) {
106106
// Read
107107
try {
108108
const processedLine = processLine(line); // Evaluate
@@ -112,8 +112,8 @@ lineReader.on("line", function(line) {
112112
[_, endNanoseconds] = process.hrtime();
113113

114114
const tags = new TagMap();
115-
tags.set(methodKey, { value: "REPL" });
116-
tags.set(statusKey, { value: "OK" });
115+
tags.set(methodKey, { value: 'REPL' });
116+
tags.set(statusKey, { value: 'OK' });
117117

118118
globalStats.record([{
119119
measure: mLineLengths,
@@ -130,8 +130,8 @@ lineReader.on("line", function(line) {
130130
console.log(err);
131131

132132
const errTags = new TagMap();
133-
errTags.set(methodKey, { value: "repl" });
134-
errTags.set(statusKey, { value: "ERROR" });
133+
errTags.set(methodKey, { value: 'repl' });
134+
errTags.set(statusKey, { value: 'ERROR' });
135135
globalStats.record([{
136136
measure: mLatencyMs,
137137
value: sinceInMilliseconds(endNanoseconds, startNanoseconds)
@@ -146,7 +146,7 @@ lineReader.on("line", function(line) {
146146
* Takes a line and process it.
147147
* @param {string} line The line to process
148148
*/
149-
function processLine(line) {
149+
function processLine (line) {
150150
// Currently, it just capitalizes it.
151151
return line.toUpperCase();
152152
}
@@ -156,6 +156,6 @@ function processLine(line) {
156156
* @param {number} endNanoseconds The end time of REPL.
157157
* @param {number} startNanoseconds The start time of REPL.
158158
*/
159-
function sinceInMilliseconds(endNanoseconds, startNanoseconds) {
159+
function sinceInMilliseconds (endNanoseconds, startNanoseconds) {
160160
return (endNanoseconds - startNanoseconds) / 1e6;
161161
}

examples/stats/exporter/stackdriver.js

+31-33
Original file line numberDiff line numberDiff line change
@@ -19,12 +19,12 @@
1919
* OpenCensus to Stackdriver.
2020
*/
2121

22-
const { globalStats, MeasureUnit, AggregationType, TagMap } = require("@opencensus/core");
22+
const { globalStats, MeasureUnit, AggregationType, TagMap } = require('@opencensus/core');
2323
const { StackdriverStatsExporter } =
24-
require("@opencensus/exporter-stackdriver");
24+
require('@opencensus/exporter-stackdriver');
2525

26-
const fs = require("fs");
27-
const readline = require("readline");
26+
const fs = require('fs');
27+
const readline = require('readline');
2828

2929
// [START setup_exporter]
3030
// Enable OpenCensus exporters to export metrics to Stackdriver Monitoring.
@@ -38,8 +38,7 @@ const projectId = process.env.GOOGLE_PROJECT_ID;
3838
// GOOGLE_APPLICATION_CREDENTIALS are expected by a dependency of this code
3939
// Not this code itself. Checking for existence here but not retaining (as not needed)
4040
if (!projectId || !process.env.GOOGLE_APPLICATION_CREDENTIALS) {
41-
// Unable to proceed without a Project ID
42-
process.exit(1);
41+
throw Error('Unable to proceed without a Project ID');
4342
}
4443
const exporter = new StackdriverStatsExporter({ projectId: projectId });
4544

@@ -49,61 +48,61 @@ globalStats.registerExporter(exporter);
4948

5049
// The latency in milliseconds
5150
const mLatencyMs = globalStats.createMeasureDouble(
52-
"repl/latency",
51+
'repl/latency',
5352
MeasureUnit.MS,
54-
"The latency in milliseconds per REPL loop"
53+
'The latency in milliseconds per REPL loop'
5554
);
5655

5756
// Counts/groups the lengths of lines read in.
5857
const mLineLengths = globalStats.createMeasureInt64(
59-
"repl/line_lengths",
58+
'repl/line_lengths',
6059
MeasureUnit.BYTE,
61-
"The distribution of line lengths"
60+
'The distribution of line lengths'
6261
);
6362

6463
// Create a stream to read our file
65-
const stream = fs.createReadStream("./test.txt");
64+
const stream = fs.createReadStream('./test.txt');
6665

6766
// Create an interface to read and process our file line by line
6867
const lineReader = readline.createInterface({ input: stream });
6968

70-
const methodKey = { name: "method" };
71-
const statusKey = { name: "status" };
69+
const methodKey = { name: 'method' };
70+
const statusKey = { name: 'status' };
7271
const tagKeys = [methodKey, statusKey];
7372

7473
// Create & Register the view.
7574
const latencyView = globalStats.createView(
76-
"demo/latency",
75+
'demo/latency',
7776
mLatencyMs,
7877
AggregationType.DISTRIBUTION,
7978
tagKeys,
80-
"The distribution of the repl latencies",
79+
'The distribution of the repl latencies',
8180
// Latency in buckets:
82-
// [>=0ms, >=25ms, >=50ms, >=75ms, >=100ms, >=200ms, >=400ms, >=600ms, >=800ms, >=1s, >=2s, >=4s, >=6s]
83-
[0, 25, 50, 75, 100, 200, 400, 600, 800, 1000, 2000, 4000, 6000]
81+
// [>=25ms, >=50ms, >=75ms, >=100ms, >=200ms, >=400ms, >=600ms, >=800ms, >=1s, >=2s, >=4s, >=6s]
82+
[25, 50, 75, 100, 200, 400, 600, 800, 1000, 2000, 4000, 6000]
8483
);
8584
globalStats.registerView(latencyView);
8685

8786
// Create & Register the view.
8887
const lineCountView = globalStats.createView(
89-
"demo/lines_in",
88+
'demo/lines_in',
9089
mLineLengths,
9190
AggregationType.COUNT,
9291
tagKeys,
93-
"The number of lines from standard input"
92+
'The number of lines from standard input'
9493
);
9594
globalStats.registerView(lineCountView);
9695

9796
// Create & Register the view.
9897
const lineLengthView = globalStats.createView(
99-
"demo/line_lengths",
98+
'demo/line_lengths',
10099
mLineLengths,
101100
AggregationType.DISTRIBUTION,
102101
tagKeys,
103-
"Groups the lengths of keys in buckets",
102+
'Groups the lengths of keys in buckets',
104103
// Bucket Boudaries:
105-
// [>=0B, >=5B, >=10B, >=15B, >=20B, >=40B, >=60B, >=80, >=100B, >=200B, >=400, >=600, >=800, >=1000]
106-
[0, 5, 10, 15, 20, 40, 60, 80, 100, 200, 400, 600, 800, 1000]
104+
// [>=5B, >=10B, >=15B, >=20B, >=40B, >=60B, >=80, >=100B, >=200B, >=400, >=600, >=800, >=1000]
105+
[5, 10, 15, 20, 40, 60, 80, 100, 200, 400, 600, 800, 1000]
107106
);
108107
globalStats.registerView(lineLengthView);
109108

@@ -112,7 +111,7 @@ let [_, startNanoseconds] = process.hrtime();
112111
let endNanoseconds;
113112

114113
// REPL is the read, evaluate, print and loop
115-
lineReader.on("line", function(line) {
114+
lineReader.on('line', function (line) {
116115
// Read
117116
try {
118117
const processedLine = processLine(line); // Evaluate
@@ -122,8 +121,8 @@ lineReader.on("line", function(line) {
122121
[_, endNanoseconds] = process.hrtime();
123122

124123
const tags = new TagMap();
125-
tags.set(methodKey, { value: "REPL" });
126-
tags.set(statusKey, { value: "OK" });
124+
tags.set(methodKey, { value: 'REPL' });
125+
tags.set(statusKey, { value: 'OK' });
127126

128127
globalStats.record([{
129128
measure: mLineLengths,
@@ -134,13 +133,12 @@ lineReader.on("line", function(line) {
134133
measure: mLatencyMs,
135134
value: sinceInMilliseconds(endNanoseconds, startNanoseconds)
136135
}], tags);
137-
138136
} catch (err) {
139137
console.log(err);
140138

141139
const errTags = new TagMap();
142-
errTags.set(methodKey, { value: "repl" });
143-
errTags.set(statusKey, { value: "ERROR" });
140+
errTags.set(methodKey, { value: 'repl' });
141+
errTags.set(statusKey, { value: 'ERROR' });
144142
globalStats.record([{
145143
measure: mLatencyMs,
146144
value: sinceInMilliseconds(endNanoseconds, startNanoseconds)
@@ -157,15 +155,15 @@ lineReader.on("line", function(line) {
157155
* metrics that must be collected, or some risk being lost if they are recorded
158156
* after the last export.
159157
*/
160-
setTimeout(function() {
161-
console.log("Completed.");
158+
setTimeout(function () {
159+
console.log('Completed.');
162160
}, 60 * 1000);
163161

164162
/**
165163
* Takes a line and process it.
166164
* @param {string} line The line to process
167165
*/
168-
function processLine(line) {
166+
function processLine (line) {
169167
// Currently, it just capitalizes it.
170168
return line.toUpperCase();
171169
}
@@ -175,6 +173,6 @@ function processLine(line) {
175173
* @param {number} endNanoseconds The end time of REPL.
176174
* @param {number} startNanoseconds The start time of REPL.
177175
*/
178-
function sinceInMilliseconds(endNanoseconds, startNanoseconds) {
176+
function sinceInMilliseconds (endNanoseconds, startNanoseconds) {
179177
return (endNanoseconds - startNanoseconds) / 1e6;
180178
}

0 commit comments

Comments
 (0)