This commit is contained in:
parent
e629b111ba
commit
96b90f4e6e
@ -11,7 +11,7 @@ import threading
|
||||
import queue
|
||||
import sys
|
||||
import csv
|
||||
|
||||
import random
|
||||
|
||||
@dataclass(frozen=False, init=True)
|
||||
class UploadRequest:
|
||||
@ -28,7 +28,7 @@ class ReturnedRequest:
|
||||
arrived: float
|
||||
|
||||
|
||||
def independent_worker(number_generator, result_queue, filename: str, timeout: float):
|
||||
def independent_worker(result_queue, filename: str, timeout: float):
|
||||
prepared_curl = pycurl.Curl()
|
||||
prepared_curl.setopt(pycurl.URL, "https://birb.k8s.kmlabz.com/benchmark")
|
||||
prepared_curl.setopt(pycurl.SSL_VERIFYPEER, 0)
|
||||
@ -45,7 +45,7 @@ def independent_worker(number_generator, result_queue, filename: str, timeout: f
|
||||
# Start working!!
|
||||
worker_start_time = time.time()
|
||||
while True:
|
||||
jobid = next(number_generator)
|
||||
jobid = random.randint(0, 2147483647)
|
||||
prepared_curl.setopt(pycurl.HTTPPOST, [
|
||||
('file', (
|
||||
pycurl.FORM_FILE, filename # Copying buffers from Python memory would be even slower... trust me
|
||||
@ -102,11 +102,10 @@ def run_benchmark(num_workers: int, timeout: float, filename: str):
|
||||
result_queue = multiprocessing.Queue()
|
||||
|
||||
workers = []
|
||||
number_gen = itertools.count()
|
||||
for _ in range(num_workers):
|
||||
workers.append(multiprocessing.Process(
|
||||
target=independent_worker,
|
||||
args=(number_gen, result_queue, filename, timeout)
|
||||
args=(result_queue, filename, timeout)
|
||||
))
|
||||
|
||||
for w in workers:
|
||||
@ -114,8 +113,10 @@ def run_benchmark(num_workers: int, timeout: float, filename: str):
|
||||
|
||||
completed_workers = 0
|
||||
all_requests_completed = {}
|
||||
count_requests_completed = 0
|
||||
while completed_workers < num_workers:
|
||||
results = result_queue.get()
|
||||
count_requests_completed += results[1]
|
||||
for result in results[2]:
|
||||
all_requests_completed[result.id] = result
|
||||
|
||||
@ -124,6 +125,8 @@ def run_benchmark(num_workers: int, timeout: float, filename: str):
|
||||
for w in workers:
|
||||
w.join()
|
||||
|
||||
assert count_requests_completed == len(all_requests_completed)
|
||||
|
||||
return all_requests_completed
|
||||
|
||||
|
||||
@ -134,9 +137,10 @@ def write_results(results, file_handle):
|
||||
writer.writerow(['id', 'fire_time', 'response_arrive_time', 'latency'])
|
||||
|
||||
for result in results.values():
|
||||
latency = int((result.alert_arrived - result.upload_started) * 1000)
|
||||
fire_time = datetime.fromtimestamp(result.upload_finished).isoformat()
|
||||
response_arrive_time = datetime.fromtimestamp(result.alert_arrived).isoformat()
|
||||
latency = int((result.alert_arrived - result.upload_started) * 1000) if result.alert_arrived else None
|
||||
fire_time = datetime.fromtimestamp(result.upload_finished).isoformat() if result.upload_finished else None
|
||||
response_arrive_time = datetime.fromtimestamp(
|
||||
result.alert_arrived).isoformat() if result.alert_arrived else None
|
||||
row = [result.id, fire_time, response_arrive_time, latency]
|
||||
writer.writerow(row)
|
||||
|
||||
@ -233,9 +237,9 @@ def main():
|
||||
total_answered += 1
|
||||
|
||||
# print some mini statistics
|
||||
total_runtime = \
|
||||
benchmark_results[max(benchmark_results.keys())].upload_finished - \
|
||||
benchmark_results[min(benchmark_results.keys())].upload_started
|
||||
total_runtime = max(benchmark_results.values(), key=lambda a: a.upload_finished).upload_finished - \
|
||||
min(benchmark_results.values(), key=lambda a: a.upload_started).upload_started
|
||||
|
||||
|
||||
print(
|
||||
f"{len(benchmark_results)} requests completed: {total_successful_uploads} successfully uploaded and {total_answered} answered"
|
||||
|
Loading…
Reference in New Issue
Block a user