Skip to content
This repository was archived by the owner on Oct 3, 2023. It is now read-only.

Update examples with latest OC version and format using semistandard #371

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 8 additions & 5 deletions examples/stats/exporter/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,10 @@
"version": "0.0.8",
"description": "OpenCensus is a toolkit for collecting application performance and behavior data.",
"repository": "census-instrumentation/opencensus-node",
"scripts": {},
"scripts": {
"lint": "semistandard *.js",
"fix": "semistandard --fix"
},
"keywords": [
"opencensus",
"nodejs",
Expand All @@ -20,11 +23,11 @@
"access": "public"
},
"devDependencies": {
"gts": "^0.9.0"
"semistandard": "^13.0.1"
},
"dependencies": {
"@opencensus/core": "^0.0.8",
"@opencensus/exporter-prometheus": "^0.0.8",
"@opencensus/exporter-stackdriver": "^0.0.8"
"@opencensus/core": "^0.0.9",
"@opencensus/exporter-prometheus": "^0.0.9",
"@opencensus/exporter-stackdriver": "^0.0.9"
}
}
58 changes: 29 additions & 29 deletions examples/stats/exporter/prometheus.js
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
/**
/**
* Copyright 2018, OpenCensus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
Expand All @@ -19,11 +19,11 @@
* OpenCensus to Prometheus.
*/

const { globalStats, MeasureUnit, AggregationType, TagMap } = require("@opencensus/core");
const { PrometheusStatsExporter } = require("@opencensus/exporter-prometheus");
const { globalStats, MeasureUnit, AggregationType, TagMap } = require('@opencensus/core');
const { PrometheusStatsExporter } = require('@opencensus/exporter-prometheus');

const fs = require("fs");
const readline = require("readline");
const fs = require('fs');
const readline = require('readline');

// [START setup_exporter]
// Enable OpenCensus exporters to export metrics to Prometheus Monitoring.
Expand All @@ -39,61 +39,61 @@ globalStats.registerExporter(exporter);

// The latency in milliseconds
const mLatencyMs = globalStats.createMeasureDouble(
"repl/latency",
'repl/latency',
MeasureUnit.MS,
"The latency in milliseconds per REPL loop"
'The latency in milliseconds per REPL loop'
);

// Counts/groups the lengths of lines read in.
const mLineLengths = globalStats.createMeasureInt64(
"repl/line_lengths",
'repl/line_lengths',
MeasureUnit.BYTE,
"The distribution of line lengths"
'The distribution of line lengths'
);

// Create a stream to read our file
const stream = fs.createReadStream("./test.txt");
const stream = fs.createReadStream('./test.txt');

// Create an interface to read and process our file line by line
const lineReader = readline.createInterface({ input: stream });

const methodKey = { name: "method" };
const statusKey = { name: "status" };
const methodKey = { name: 'method' };
const statusKey = { name: 'status' };
const tagKeys = [methodKey, statusKey];

// Create & Register the view.
const latencyView = globalStats.createView(
"demo/latency",
'demo/latency',
mLatencyMs,
AggregationType.DISTRIBUTION,
tagKeys,
"The distribution of the repl latencies",
'The distribution of the repl latencies',
// Latency in buckets:
// [>=0ms, >=25ms, >=50ms, >=75ms, >=100ms, >=200ms, >=400ms, >=600ms, >=800ms, >=1s, >=2s, >=4s, >=6s]
[0, 25, 50, 75, 100, 200, 400, 600, 800, 1000, 2000, 4000, 6000]
// [>=25ms, >=50ms, >=75ms, >=100ms, >=200ms, >=400ms, >=600ms, >=800ms, >=1s, >=2s, >=4s, >=6s]
[25, 50, 75, 100, 200, 400, 600, 800, 1000, 2000, 4000, 6000]
);
globalStats.registerView(latencyView);

// Create & Register the view.
const lineCountView = globalStats.createView(
"demo/lines_in",
'demo/lines_in',
mLineLengths,
AggregationType.COUNT,
tagKeys,
"The number of lines from standard input"
'The number of lines from standard input'
);
globalStats.registerView(lineCountView);

// Create & Register the view.
const lineLengthView = globalStats.createView(
"demo/line_lengths",
'demo/line_lengths',
mLineLengths,
AggregationType.DISTRIBUTION,
tagKeys,
"Groups the lengths of keys in buckets",
'Groups the lengths of keys in buckets',
// Bucket Boudaries:
// [>=0B, >=5B, >=10B, >=15B, >=20B, >=40B, >=60B, >=80, >=100B, >=200B, >=400, >=600, >=800, >=1000]
[0, 5, 10, 15, 20, 40, 60, 80, 100, 200, 400, 600, 800, 1000]
// [>=5B, >=10B, >=15B, >=20B, >=40B, >=60B, >=80, >=100B, >=200B, >=400, >=600, >=800, >=1000]
[5, 10, 15, 20, 40, 60, 80, 100, 200, 400, 600, 800, 1000]
);
globalStats.registerView(lineLengthView);

Expand All @@ -102,7 +102,7 @@ let [_, startNanoseconds] = process.hrtime();
let endNanoseconds;

// REPL is the read, evaluate, print and loop
lineReader.on("line", function(line) {
lineReader.on('line', function (line) {
// Read
try {
const processedLine = processLine(line); // Evaluate
Expand All @@ -112,8 +112,8 @@ lineReader.on("line", function(line) {
[_, endNanoseconds] = process.hrtime();

const tags = new TagMap();
tags.set(methodKey, { value: "REPL" });
tags.set(statusKey, { value: "OK" });
tags.set(methodKey, { value: 'REPL' });
tags.set(statusKey, { value: 'OK' });

globalStats.record([{
measure: mLineLengths,
Expand All @@ -130,8 +130,8 @@ lineReader.on("line", function(line) {
console.log(err);

const errTags = new TagMap();
errTags.set(methodKey, { value: "repl" });
errTags.set(statusKey, { value: "ERROR" });
errTags.set(methodKey, { value: 'repl' });
errTags.set(statusKey, { value: 'ERROR' });
globalStats.record([{
measure: mLatencyMs,
value: sinceInMilliseconds(endNanoseconds, startNanoseconds)
Expand All @@ -146,7 +146,7 @@ lineReader.on("line", function(line) {
* Takes a line and process it.
* @param {string} line The line to process
*/
function processLine(line) {
function processLine (line) {
// Currently, it just capitalizes it.
return line.toUpperCase();
}
Expand All @@ -156,6 +156,6 @@ function processLine(line) {
* @param {number} endNanoseconds The end time of REPL.
* @param {number} startNanoseconds The start time of REPL.
*/
function sinceInMilliseconds(endNanoseconds, startNanoseconds) {
function sinceInMilliseconds (endNanoseconds, startNanoseconds) {
return (endNanoseconds - startNanoseconds) / 1e6;
}
64 changes: 31 additions & 33 deletions examples/stats/exporter/stackdriver.js
Original file line number Diff line number Diff line change
Expand Up @@ -19,12 +19,12 @@
* OpenCensus to Stackdriver.
*/

const { globalStats, MeasureUnit, AggregationType, TagMap } = require("@opencensus/core");
const { globalStats, MeasureUnit, AggregationType, TagMap } = require('@opencensus/core');
const { StackdriverStatsExporter } =
require("@opencensus/exporter-stackdriver");
require('@opencensus/exporter-stackdriver');

const fs = require("fs");
const readline = require("readline");
const fs = require('fs');
const readline = require('readline');

// [START setup_exporter]
// Enable OpenCensus exporters to export metrics to Stackdriver Monitoring.
Expand All @@ -38,8 +38,7 @@ const projectId = process.env.GOOGLE_PROJECT_ID;
// GOOGLE_APPLICATION_CREDENTIALS are expected by a dependency of this code
// Not this code itself. Checking for existence here but not retaining (as not needed)
if (!projectId || !process.env.GOOGLE_APPLICATION_CREDENTIALS) {
// Unable to proceed without a Project ID
process.exit(1);
throw Error('Unable to proceed without a Project ID');
}
const exporter = new StackdriverStatsExporter({ projectId: projectId });

Expand All @@ -49,61 +48,61 @@ globalStats.registerExporter(exporter);

// The latency in milliseconds
const mLatencyMs = globalStats.createMeasureDouble(
"repl/latency",
'repl/latency',
MeasureUnit.MS,
"The latency in milliseconds per REPL loop"
'The latency in milliseconds per REPL loop'
);

// Counts/groups the lengths of lines read in.
const mLineLengths = globalStats.createMeasureInt64(
"repl/line_lengths",
'repl/line_lengths',
MeasureUnit.BYTE,
"The distribution of line lengths"
'The distribution of line lengths'
);

// Create a stream to read our file
const stream = fs.createReadStream("./test.txt");
const stream = fs.createReadStream('./test.txt');

// Create an interface to read and process our file line by line
const lineReader = readline.createInterface({ input: stream });

const methodKey = { name: "method" };
const statusKey = { name: "status" };
const methodKey = { name: 'method' };
const statusKey = { name: 'status' };
const tagKeys = [methodKey, statusKey];

// Create & Register the view.
const latencyView = globalStats.createView(
"demo/latency",
'demo/latency',
mLatencyMs,
AggregationType.DISTRIBUTION,
tagKeys,
"The distribution of the repl latencies",
'The distribution of the repl latencies',
// Latency in buckets:
// [>=0ms, >=25ms, >=50ms, >=75ms, >=100ms, >=200ms, >=400ms, >=600ms, >=800ms, >=1s, >=2s, >=4s, >=6s]
[0, 25, 50, 75, 100, 200, 400, 600, 800, 1000, 2000, 4000, 6000]
// [>=25ms, >=50ms, >=75ms, >=100ms, >=200ms, >=400ms, >=600ms, >=800ms, >=1s, >=2s, >=4s, >=6s]
[25, 50, 75, 100, 200, 400, 600, 800, 1000, 2000, 4000, 6000]
);
globalStats.registerView(latencyView);

// Create & Register the view.
const lineCountView = globalStats.createView(
"demo/lines_in",
'demo/lines_in',
mLineLengths,
AggregationType.COUNT,
tagKeys,
"The number of lines from standard input"
'The number of lines from standard input'
);
globalStats.registerView(lineCountView);

// Create & Register the view.
const lineLengthView = globalStats.createView(
"demo/line_lengths",
'demo/line_lengths',
mLineLengths,
AggregationType.DISTRIBUTION,
tagKeys,
"Groups the lengths of keys in buckets",
'Groups the lengths of keys in buckets',
// Bucket Boudaries:
// [>=0B, >=5B, >=10B, >=15B, >=20B, >=40B, >=60B, >=80, >=100B, >=200B, >=400, >=600, >=800, >=1000]
[0, 5, 10, 15, 20, 40, 60, 80, 100, 200, 400, 600, 800, 1000]
// [>=5B, >=10B, >=15B, >=20B, >=40B, >=60B, >=80, >=100B, >=200B, >=400, >=600, >=800, >=1000]
[5, 10, 15, 20, 40, 60, 80, 100, 200, 400, 600, 800, 1000]
);
globalStats.registerView(lineLengthView);

Expand All @@ -112,7 +111,7 @@ let [_, startNanoseconds] = process.hrtime();
let endNanoseconds;

// REPL is the read, evaluate, print and loop
lineReader.on("line", function(line) {
lineReader.on('line', function (line) {
// Read
try {
const processedLine = processLine(line); // Evaluate
Expand All @@ -122,8 +121,8 @@ lineReader.on("line", function(line) {
[_, endNanoseconds] = process.hrtime();

const tags = new TagMap();
tags.set(methodKey, { value: "REPL" });
tags.set(statusKey, { value: "OK" });
tags.set(methodKey, { value: 'REPL' });
tags.set(statusKey, { value: 'OK' });

globalStats.record([{
measure: mLineLengths,
Expand All @@ -134,13 +133,12 @@ lineReader.on("line", function(line) {
measure: mLatencyMs,
value: sinceInMilliseconds(endNanoseconds, startNanoseconds)
}], tags);

} catch (err) {
console.log(err);

const errTags = new TagMap();
errTags.set(methodKey, { value: "repl" });
errTags.set(statusKey, { value: "ERROR" });
errTags.set(methodKey, { value: 'repl' });
errTags.set(statusKey, { value: 'ERROR' });
globalStats.record([{
measure: mLatencyMs,
value: sinceInMilliseconds(endNanoseconds, startNanoseconds)
Expand All @@ -157,15 +155,15 @@ lineReader.on("line", function(line) {
* metrics that must be collected, or some risk being lost if they are recorded
* after the last export.
*/
setTimeout(function() {
console.log("Completed.");
setTimeout(function () {
console.log('Completed.');
}, 60 * 1000);

/**
* Takes a line and process it.
* @param {string} line The line to process
*/
function processLine(line) {
function processLine (line) {
// Currently, it just capitalizes it.
return line.toUpperCase();
}
Expand All @@ -175,6 +173,6 @@ function processLine(line) {
* @param {number} endNanoseconds The end time of REPL.
* @param {number} startNanoseconds The start time of REPL.
*/
function sinceInMilliseconds(endNanoseconds, startNanoseconds) {
function sinceInMilliseconds (endNanoseconds, startNanoseconds) {
return (endNanoseconds - startNanoseconds) / 1e6;
}