diff --git a/loadtest/README.md b/loadtest/README.md index 0effa0e3..8caaa441 100644 --- a/loadtest/README.md +++ b/loadtest/README.md @@ -54,11 +54,18 @@ Please clean up afterwards :-) To run only one test: ``` -molotov --max-runs 1 --single-mode -v publish.py +molotov --max-runs 1 --single-mode -v publish.py ``` Notice `-v` enables verbose mode, which outputs errors. This is especially useful while finalizing a specific scenario. +To run only one test, but with several workers in parallel: +``` +molotov -v --single -w --max-runs 1 publish.py +``` +By running this command with `time` it's possible to accurately measure how the +server behaves with a lot of parallel calls. + To run the scenarios during 5 seconds, with 50 workers, this is how we can do it: ``` molotov -d 5 -w 50 -v publish.py diff --git a/loadtest/publish.py b/loadtest/publish.py index ea6ea954..cbaf2dc3 100644 --- a/loadtest/publish.py +++ b/loadtest/publish.py @@ -6,6 +6,7 @@ import json import os import base64 +import gzip from molotov import scenario, setup, global_setup, teardown, global_teardown from molotov import set_var, get_var @@ -26,6 +27,12 @@ # This is the various file sizes we'll generate in the global setup. _FILE_SIZES = (512, 1024, 5 * 1024, 20 * 1024) +# These are gzip-compressed chunks of json that we'll concatenate later to +# generate our payload. This takes advantage that a gzip stream is made of +# concatenated gzip chunks. +_COMPRESSED_JSON_PREFIX = gzip.compress(b'{"foo":"') +_COMPRESSED_JSON_SUFFIX = gzip.compress(b'"}') + def setup_api_endpoint(): """Sets up the _API global that we use in all scenarii. @@ -62,7 +69,11 @@ def test_starts(args): * we generate the various files to be sent in the tests. """ setup_api_endpoint() - files = {x: os.urandom(x * 1024) for x in _FILE_SIZES} + # "512" instead of "1024" because writing in hexadecimal takes 2 bytes. + files = {x: gzip.compress( + os.urandom(x * 512).hex().encode(), + compresslevel=0) + for x in _FILE_SIZES} set_var("files", files) @@ -131,6 +142,25 @@ def jwt_base64_decode(payload): return decoded_str +def payload_from_raw_data(raw_data): + """Returns a data suitable to publish, that's accepted by the profiler server. + + This concatenates separate pre-created gzip-compressed chunks, because we + want that we do as less work as possible at runtime. Here at runtime we + only compress a very small chunk and otherwise concatenate everything. + """ + # By adding some random bytes, the content will change for each test and + # therefore the filename too. This prevents google from erroring while we + # stress test. + unique_data = gzip.compress(os.urandom(10).hex().encode(), compresslevel=0) + return ( + _COMPRESSED_JSON_PREFIX + + raw_data + + unique_data + + _COMPRESSED_JSON_SUFFIX + ) + + async def publish(session, data_size): """Publishes a profile with the passed data size """ @@ -143,10 +173,7 @@ async def publish(session, data_size): ) data = get_var('files')[data_size] - # By adding some random bytes, the content will change for each test and - # therefore the filename too. This prevents google from erroring while we - # stress test. - data = data + os.urandom(10) + data = payload_from_raw_data(data) async with session.post(_API + '/compressed-store', data=data) as resp: assert resp.status == 200 diff --git a/loadtest/publish_short_requests.py b/loadtest/publish_short_requests.py index 461f2b0f..64196e28 100644 --- a/loadtest/publish_short_requests.py +++ b/loadtest/publish_short_requests.py @@ -7,6 +7,7 @@ import json import os import base64 +import gzip from molotov import scenario, setup, global_setup, teardown, global_teardown from molotov import set_var, get_var @@ -27,6 +28,12 @@ # This is the various file sizes we'll generate in the global setup. _FILE_SIZES = (1, 10, 50) +# These are gzip-compressed chunks of json that we'll concatenate later to +# generate our payload. This takes advantage that a gzip stream is made of +# concatenated gzip chunks. +_COMPRESSED_JSON_PREFIX = gzip.compress(b'{"foo":"') +_COMPRESSED_JSON_SUFFIX = gzip.compress(b'"}') + def setup_api_endpoint(): """Sets up the _API global that we use in all scenarii. @@ -63,7 +70,11 @@ def test_starts(args): * we generate the various files to be sent in the tests. """ setup_api_endpoint() - files = {x: os.urandom(x * 1024) for x in _FILE_SIZES} + # "512" instead of "1024" because writing in hexadecimal takes 2 bytes. + files = {x: gzip.compress( + os.urandom(x * 512).hex().encode(), + compresslevel=0) + for x in _FILE_SIZES} set_var("files", files) @@ -132,6 +143,25 @@ def jwt_base64_decode(payload): return decoded_str +def payload_from_raw_data(raw_data): + """Returns a data suitable to publish, that's accepted by the profiler server. + + This concatenates separate pre-created gzip-compressed chunks, because we + want that we do as less work as possible at runtime. Here at runtime we + only compress a very small chunk and otherwise concatenate everything. + """ + # By adding some random bytes, the content will change for each test and + # therefore the filename too. This prevents google from erroring while we + # stress test. + unique_data = gzip.compress(os.urandom(10).hex().encode(), compresslevel=0) + return ( + _COMPRESSED_JSON_PREFIX + + raw_data + + unique_data + + _COMPRESSED_JSON_SUFFIX + ) + + async def publish(session, data_size): """Publishes a profile with the passed data size """ @@ -144,10 +174,7 @@ async def publish(session, data_size): ) data = get_var('files')[data_size] - # By adding some random bytes, the content will change for each test and - # therefore the filename too. This prevents google from erroring while we - # stress test. - data = data + os.urandom(10) + data = payload_from_raw_data(data) async with session.post(_API + '/compressed-store', data=data) as resp: assert resp.status == 200 @@ -173,6 +200,12 @@ async def delete(session, jwt_token): # Each scenario has a weight. Molotov uses it to determine how often the # scenario is picked. +@scenario(1) +async def publish_and_delete(session): + jwt_token = await publish(session=session, data_size=10) + await delete(session=session, jwt_token=jwt_token) + + @scenario(2) async def publish_1k(session): await publish(session=session, data_size=1)