diff --git a/.evergreen/config.in.yml b/.evergreen/config.in.yml index 1a3adee09d..750ef3d81a 100644 --- a/.evergreen/config.in.yml +++ b/.evergreen/config.in.yml @@ -109,6 +109,18 @@ functions: - .evergreen/run-tests.sh "perf send": + - command: s3.put + params: + aws_key: ${aws_key} + aws_secret: ${aws_secret} + local_file: src/test/benchmarks/driver_bench/results.json + optional: true + # TODO NODE-4707 - change upload directory to ${UPLOAD_BUCKET} + remote_file: mongo-node-driver/${revision}/${version_id}/results.${task_name}.json + bucket: mciuploads + permissions: public-read + content_type: application/json + display_name: "Performance Results" - command: subprocess.exec params: working_dir: src diff --git a/.evergreen/config.yml b/.evergreen/config.yml index d9f3b3a1f7..f74052f2a7 100644 --- a/.evergreen/config.yml +++ b/.evergreen/config.yml @@ -81,6 +81,17 @@ functions: args: - .evergreen/run-tests.sh perf send: + - command: s3.put + params: + aws_key: ${aws_key} + aws_secret: ${aws_secret} + local_file: src/test/benchmarks/driver_bench/results.json + optional: true + remote_file: mongo-node-driver/${revision}/${version_id}/results.${task_name}.json + bucket: mciuploads + permissions: public-read + content_type: application/json + display_name: Performance Results - command: subprocess.exec params: working_dir: src diff --git a/.evergreen/perf-send.sh b/.evergreen/perf-send.sh index a3c7f234b8..8e52855af4 100644 --- a/.evergreen/perf-send.sh +++ b/.evergreen/perf-send.sh @@ -1,9 +1,11 @@ #!/usr/bin/env bash -set -euox pipefail +set -euo pipefail source $DRIVERS_TOOLS/.evergreen/init-node-and-npm-env.sh TARGET_FILE=$(realpath "${TARGET_FILE:-./test/benchmarks/driver_bench/results.json}") +set -o xtrace + node ./.evergreen/perf_send.mjs $TARGET_FILE diff --git a/test/benchmarks/driver_bench/src/main.mts b/test/benchmarks/driver_bench/src/main.mts index 04e573b773..90e58e584c 100644 --- a/test/benchmarks/driver_bench/src/main.mts +++ b/test/benchmarks/driver_bench/src/main.mts @@ -110,6 +110,13 @@ for (const [suite, benchmarks] of Object.entries(tests)) { console.groupEnd(); } +const metricInfoFilterByName = + (testName: string) => + ({ info: { test_name } }: MetricInfo) => + test_name === testName; + +const isMBsMetric = ({ name }: Metric) => name === 'megabytes_per_second'; + function calculateCompositeBenchmarks(results: MetricInfo[]) { const composites = { singleBench: ['findOne', 'smallDocInsertOne', 'largeDocInsertOne'], @@ -144,13 +151,6 @@ function calculateCompositeBenchmarks(results: MetricInfo[]) { ] }; - const aMetricInfo = - (testName: string) => - ({ info: { test_name } }: MetricInfo) => - test_name === testName; - - const anMBsMetric = ({ name }: Metric) => name === 'megabytes_per_second'; - let readBenchResult; let writeBenchResult; @@ -162,10 +162,10 @@ function calculateCompositeBenchmarks(results: MetricInfo[]) { let sum = 0; for (const testName of compositeTests) { - const testScore = results.find(aMetricInfo(testName)); + const testScore = results.find(metricInfoFilterByName(testName)); assert.ok(testScore, `${compositeName} suite requires ${testName} for composite score`); - const metric = testScore.metrics.find(anMBsMetric); + const metric = testScore.metrics.find(isMBsMetric); assert.ok(metric, `${testName} is missing a megabytes_per_second metric`); sum += metric.value; @@ -199,31 +199,40 @@ function calculateCompositeBenchmarks(results: MetricInfo[]) { } function calculateNormalizedResults(results: MetricInfo[]): MetricInfo[] { - const baselineBench = results.find(r => r.info.test_name === 'cpuBaseline'); - const pingBench = results.find(r => r.info.test_name === 'ping'); + const baselineBench = results.find(metricInfoFilterByName('cpuBaseline')); + const pingBench = results.find(metricInfoFilterByName('ping')); assert.ok(pingBench, 'ping bench results not found!'); - assert.ok(baselineBench, 'baseline results not found!'); - const pingThroughput = pingBench.metrics[0].value; - const cpuBaseline = baselineBench.metrics[0].value; + assert.ok(baselineBench, 'cpuBaseline results not found!'); + + const cpuBaseline = baselineBench.metrics.find(isMBsMetric); + const pingThroughput = pingBench.metrics.find(isMBsMetric); + + assert.ok(cpuBaseline, 'cpu benchmark does not have a MB/s metric'); + assert.ok(pingThroughput, 'ping does not have a MB/s metric'); for (const bench of results) { if (bench.info.test_name === 'cpuBaseline') continue; + + const currentMetric = bench.metrics.find(isMBsMetric); + assert.ok(currentMetric, `${bench.info.test_name} does not have a MB/s metric`); + if (bench.info.test_name === 'ping') { bench.metrics.push({ name: 'normalized_throughput', - value: bench.metrics[0].value / cpuBaseline, + value: currentMetric.value / cpuBaseline.value, metadata: { + tags: currentMetric.metadata.tags, improvement_direction: 'up' } }); - } - // Compute normalized_throughput of benchmarks against ping bench - else { + } else { + // Compute normalized_throughput of benchmarks against ping bench bench.metrics.push({ name: 'normalized_throughput', - value: bench.metrics[0].value / pingThroughput, + value: currentMetric.value / pingThroughput.value, metadata: { + tags: currentMetric.metadata.tags, improvement_direction: 'up' } });