Added random id generation
Some checks failed
continuous-integration/drone/push Build is failing

This commit is contained in:
Pünkösd Marcell 2020-10-23 05:14:34 +02:00
parent e629b111ba
commit 96b90f4e6e

View File

@ -11,7 +11,7 @@ import threading
import queue import queue
import sys import sys
import csv import csv
import random
@dataclass(frozen=False, init=True) @dataclass(frozen=False, init=True)
class UploadRequest: class UploadRequest:
@ -28,7 +28,7 @@ class ReturnedRequest:
arrived: float arrived: float
def independent_worker(number_generator, result_queue, filename: str, timeout: float): def independent_worker(result_queue, filename: str, timeout: float):
prepared_curl = pycurl.Curl() prepared_curl = pycurl.Curl()
prepared_curl.setopt(pycurl.URL, "https://birb.k8s.kmlabz.com/benchmark") prepared_curl.setopt(pycurl.URL, "https://birb.k8s.kmlabz.com/benchmark")
prepared_curl.setopt(pycurl.SSL_VERIFYPEER, 0) prepared_curl.setopt(pycurl.SSL_VERIFYPEER, 0)
@ -45,7 +45,7 @@ def independent_worker(number_generator, result_queue, filename: str, timeout: f
# Start working!! # Start working!!
worker_start_time = time.time() worker_start_time = time.time()
while True: while True:
jobid = next(number_generator) jobid = random.randint(0, 2147483647)
prepared_curl.setopt(pycurl.HTTPPOST, [ prepared_curl.setopt(pycurl.HTTPPOST, [
('file', ( ('file', (
pycurl.FORM_FILE, filename # Copying buffers from Python memory would be even slower... trust me pycurl.FORM_FILE, filename # Copying buffers from Python memory would be even slower... trust me
@ -102,11 +102,10 @@ def run_benchmark(num_workers: int, timeout: float, filename: str):
result_queue = multiprocessing.Queue() result_queue = multiprocessing.Queue()
workers = [] workers = []
number_gen = itertools.count()
for _ in range(num_workers): for _ in range(num_workers):
workers.append(multiprocessing.Process( workers.append(multiprocessing.Process(
target=independent_worker, target=independent_worker,
args=(number_gen, result_queue, filename, timeout) args=(result_queue, filename, timeout)
)) ))
for w in workers: for w in workers:
@ -114,8 +113,10 @@ def run_benchmark(num_workers: int, timeout: float, filename: str):
completed_workers = 0 completed_workers = 0
all_requests_completed = {} all_requests_completed = {}
count_requests_completed = 0
while completed_workers < num_workers: while completed_workers < num_workers:
results = result_queue.get() results = result_queue.get()
count_requests_completed += results[1]
for result in results[2]: for result in results[2]:
all_requests_completed[result.id] = result all_requests_completed[result.id] = result
@ -124,6 +125,8 @@ def run_benchmark(num_workers: int, timeout: float, filename: str):
for w in workers: for w in workers:
w.join() w.join()
assert count_requests_completed == len(all_requests_completed)
return all_requests_completed return all_requests_completed
@ -134,9 +137,10 @@ def write_results(results, file_handle):
writer.writerow(['id', 'fire_time', 'response_arrive_time', 'latency']) writer.writerow(['id', 'fire_time', 'response_arrive_time', 'latency'])
for result in results.values(): for result in results.values():
latency = int((result.alert_arrived - result.upload_started) * 1000) latency = int((result.alert_arrived - result.upload_started) * 1000) if result.alert_arrived else None
fire_time = datetime.fromtimestamp(result.upload_finished).isoformat() fire_time = datetime.fromtimestamp(result.upload_finished).isoformat() if result.upload_finished else None
response_arrive_time = datetime.fromtimestamp(result.alert_arrived).isoformat() response_arrive_time = datetime.fromtimestamp(
result.alert_arrived).isoformat() if result.alert_arrived else None
row = [result.id, fire_time, response_arrive_time, latency] row = [result.id, fire_time, response_arrive_time, latency]
writer.writerow(row) writer.writerow(row)
@ -233,9 +237,9 @@ def main():
total_answered += 1 total_answered += 1
# print some mini statistics # print some mini statistics
total_runtime = \ total_runtime = max(benchmark_results.values(), key=lambda a: a.upload_finished).upload_finished - \
benchmark_results[max(benchmark_results.keys())].upload_finished - \ min(benchmark_results.values(), key=lambda a: a.upload_started).upload_started
benchmark_results[min(benchmark_results.keys())].upload_started
print( print(
f"{len(benchmark_results)} requests completed: {total_successful_uploads} successfully uploaded and {total_answered} answered" f"{len(benchmark_results)} requests completed: {total_successful_uploads} successfully uploaded and {total_answered} answered"