Skip to content

Commit 6a926a8

Browse files
committed
Update the load test scripts to generate gzipped json payloads
1 parent 136a32d commit 6a926a8

File tree

2 files changed

+70
-10
lines changed

2 files changed

+70
-10
lines changed

loadtest/publish.py

+32-5
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@
66
import json
77
import os
88
import base64
9+
import gzip
910
from molotov import scenario, setup, global_setup, teardown, global_teardown
1011
from molotov import set_var, get_var
1112

@@ -26,6 +27,12 @@
2627
# This is the various file sizes we'll generate in the global setup.
2728
_FILE_SIZES = (512, 1024, 5 * 1024, 20 * 1024)
2829

30+
# These are gzip-compressed chunks of json that we'll concatenate later to
31+
# generate our payload. This takes advantage that a gzip stream is made of
32+
# concatenated gzip chunks.
33+
_COMPRESSED_JSON_PREFIX = gzip.compress(b'{"foo":"')
34+
_COMPRESSED_JSON_SUFFIX = gzip.compress(b'"}')
35+
2936

3037
def setup_api_endpoint():
3138
"""Sets up the _API global that we use in all scenarii.
@@ -62,7 +69,11 @@ def test_starts(args):
6269
* we generate the various files to be sent in the tests.
6370
"""
6471
setup_api_endpoint()
65-
files = {x: os.urandom(x * 1024) for x in _FILE_SIZES}
72+
# "512" instead of "1024" because writing in hexadecimal takes 2 bytes.
73+
files = {x: gzip.compress(
74+
os.urandom(x * 512).hex().encode(),
75+
compresslevel=0)
76+
for x in _FILE_SIZES}
6677
set_var("files", files)
6778

6879

@@ -131,6 +142,25 @@ def jwt_base64_decode(payload):
131142
return decoded_str
132143

133144

145+
def payload_from_raw_data(raw_data):
146+
"""Returns a data suitable to publish, that's accepted by the profiler server.
147+
148+
This concatenates separate pre-created gzip-compressed chunks, because we
149+
want that we do as less work as possible at runtime. Here at runtime we
150+
only compress a very small chunk and otherwise concatenate everything.
151+
"""
152+
# By adding some random bytes, the content will change for each test and
153+
# therefore the filename too. This prevents google from erroring while we
154+
# stress test.
155+
unique_data = gzip.compress(os.urandom(10).hex().encode(), compresslevel=0)
156+
return (
157+
_COMPRESSED_JSON_PREFIX +
158+
raw_data +
159+
unique_data +
160+
_COMPRESSED_JSON_SUFFIX
161+
)
162+
163+
134164
async def publish(session, data_size):
135165
"""Publishes a profile with the passed data size
136166
"""
@@ -143,10 +173,7 @@ async def publish(session, data_size):
143173
)
144174

145175
data = get_var('files')[data_size]
146-
# By adding some random bytes, the content will change for each test and
147-
# therefore the filename too. This prevents google from erroring while we
148-
# stress test.
149-
data = data + os.urandom(10)
176+
data = payload_from_raw_data(data)
150177

151178
async with session.post(_API + '/compressed-store', data=data) as resp:
152179
assert resp.status == 200

loadtest/publish_short_requests.py

+38-5
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@
77
import json
88
import os
99
import base64
10+
import gzip
1011
from molotov import scenario, setup, global_setup, teardown, global_teardown
1112
from molotov import set_var, get_var
1213

@@ -27,6 +28,12 @@
2728
# This is the various file sizes we'll generate in the global setup.
2829
_FILE_SIZES = (1, 10, 50)
2930

31+
# These are gzip-compressed chunks of json that we'll concatenate later to
32+
# generate our payload. This takes advantage that a gzip stream is made of
33+
# concatenated gzip chunks.
34+
_COMPRESSED_JSON_PREFIX = gzip.compress(b'{"foo":"')
35+
_COMPRESSED_JSON_SUFFIX = gzip.compress(b'"}')
36+
3037

3138
def setup_api_endpoint():
3239
"""Sets up the _API global that we use in all scenarii.
@@ -63,7 +70,11 @@ def test_starts(args):
6370
* we generate the various files to be sent in the tests.
6471
"""
6572
setup_api_endpoint()
66-
files = {x: os.urandom(x * 1024) for x in _FILE_SIZES}
73+
# "512" instead of "1024" because writing in hexadecimal takes 2 bytes.
74+
files = {x: gzip.compress(
75+
os.urandom(x * 512).hex().encode(),
76+
compresslevel=0)
77+
for x in _FILE_SIZES}
6778
set_var("files", files)
6879

6980

@@ -132,6 +143,25 @@ def jwt_base64_decode(payload):
132143
return decoded_str
133144

134145

146+
def payload_from_raw_data(raw_data):
147+
"""Returns a data suitable to publish, that's accepted by the profiler server.
148+
149+
This concatenates separate pre-created gzip-compressed chunks, because we
150+
want that we do as less work as possible at runtime. Here at runtime we
151+
only compress a very small chunk and otherwise concatenate everything.
152+
"""
153+
# By adding some random bytes, the content will change for each test and
154+
# therefore the filename too. This prevents google from erroring while we
155+
# stress test.
156+
unique_data = gzip.compress(os.urandom(10).hex().encode(), compresslevel=0)
157+
return (
158+
_COMPRESSED_JSON_PREFIX +
159+
raw_data +
160+
unique_data +
161+
_COMPRESSED_JSON_SUFFIX
162+
)
163+
164+
135165
async def publish(session, data_size):
136166
"""Publishes a profile with the passed data size
137167
"""
@@ -144,10 +174,7 @@ async def publish(session, data_size):
144174
)
145175

146176
data = get_var('files')[data_size]
147-
# By adding some random bytes, the content will change for each test and
148-
# therefore the filename too. This prevents google from erroring while we
149-
# stress test.
150-
data = data + os.urandom(10)
177+
data = payload_from_raw_data(data)
151178

152179
async with session.post(_API + '/compressed-store', data=data) as resp:
153180
assert resp.status == 200
@@ -173,6 +200,12 @@ async def delete(session, jwt_token):
173200

174201
# Each scenario has a weight. Molotov uses it to determine how often the
175202
# scenario is picked.
203+
@scenario(1)
204+
async def publish_and_delete(session):
205+
jwt_token = await publish(session=session, data_size=10)
206+
await delete(session=session, jwt_token=jwt_token)
207+
208+
176209
@scenario(2)
177210
async def publish_1k(session):
178211
await publish(session=session, data_size=1)

0 commit comments

Comments
 (0)