-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathserver_stress_testing.py
29 lines (27 loc) · 1.24 KB
/
server_stress_testing.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
import concurrent.futures
import urllib.request
import sys
# payload1 = ["http://127.0.0.1:8080/version?token=default" for i in range(1000)]
# payload2 = ["http://127.0.0.1:8080/diff?token=default" for i in range(1000)]
# payload3 = ["http://127.0.0.1:8080/history?token=default" for i in range(1000)]
# URLS = [payload1, payload2, payload3]
# URLS = [x for t in zip(*URLS) for x in t]
URLS = ["http://127.0.0.1:8080/version?token=default" for i in range(10000)]
# Retrieve a single page and report the url and contents
def load_url(url, timeout):
conn = urllib.request.urlopen(url, timeout=timeout)
return conn.read()
# We can use a with statement to ensure threads are cleaned up promptly
with concurrent.futures.ThreadPoolExecutor(max_workers=5) as executor:
# Start the load operations and mark each future with its URL
future_to_url = {executor.submit(load_url, url, 60): url for url in URLS}
for future in concurrent.futures.as_completed(future_to_url):
url = future_to_url[future]
try:
data = future.result()
# do json processing here
except Exception as exc:
print('%r generated an exception: %s' % (url, exc))
else:
print(data)
sys.stdout.flush()