sucks to suck
All checks were successful
continuous-integration/drone/push Build is passing

This commit is contained in:
Torma Kristóf 2020-10-23 02:56:00 +02:00
parent cc8243f1f5
commit b5c8b73419
Signed by: tormakris
GPG Key ID: DC83C4F2C41B1047

View File

@ -7,6 +7,7 @@ Python module to automatically analyze benchmark results
import csv
import os
import abc
import datetime
import multiprocessing
import matplotlib.pyplot as pplot
import sentry_sdk
@ -135,6 +136,86 @@ class HeyAnalyzer(CsvAnalyzer):
self.walkresponsepersec(responsepersec, shouldprint)
class BirbAnalyzer(CsvAnalyzer):
"""
Analyze Birbnetes benchmark output.
"""
def __init__(self):
"""
Init object
"""
super().__init__()
def processfile(
self,
fname,
shouldprint: bool = False):
"""
Process a single file.
:param fname:
:param shouldprint:
:return:
"""
with open(fname, 'r') as f:
data = csv.reader(f)
fields = next(data)
responsepersec = {}
for row in data:
items = zip(fields, row)
item = {}
for (name, value) in items:
item[name] = value.strip()
sec = item['fire_time']
if sec not in responsepersec:
responsepersec[sec] = []
else:
responsepersec[sec].append(int(item['latency']))
self.walkresponsepersec(responsepersec, shouldprint)
class InputAnalyzer(CsvAnalyzer):
"""
Analyze InputService benchmark output.
"""
def __init__(self):
"""
Init object
"""
super().__init__()
def processfile(
self,
fname,
shouldprint: bool = False):
"""
Process a single file.
:param fname:
:param shouldprint:
:return:
"""
with open(fname, 'r') as f:
data = csv.reader(f)
fields = next(data)
responsepersec = {}
for row in data:
items = zip(fields, row)
item = {}
for (name, value) in items:
item[name] = value.strip()
sec = item['fire_time']
if sec not in responsepersec:
responsepersec[sec] = []
else:
firetime = date_time_obj = datetime.datetime.strptime(sec, '%Y-%m-%dT%H:%M:%S')
responsearrivetime = datetime.datetime.strptime(item['response_arrive_time'],
"%Y-%m-%dT%H:%M:%S.%f")
latency = responsearrivetime - firetime
responsepersec[sec].append(int(latency.total_seconds() * 1000))
self.walkresponsepersec(responsepersec, shouldprint)
class ChartCreator:
"""
Create charts automagically
@ -206,6 +287,32 @@ class ChartCreator:
ChartCreator.savecsvplot(hey, directory)
ChartCreator.savetxtplot(hey, directory)
@staticmethod
def analyze_birb(abs_directory, directory):
"""
Analyze BirbBench output
:param abs_directory:
:param directory:
:return:
"""
birb = BirbAnalyzer()
birb.processallfiles(abs_directory)
ChartCreator.savecsvplot(birb, directory)
ChartCreator.savetxtplot(birb, directory)
@staticmethod
def analyze_input(abs_directory, directory):
"""
Analyze InputSvc output
:param abs_directory:
:param directory:
:return:
"""
inputsvc = InputAnalyzer()
inputsvc.processallfiles(abs_directory)
ChartCreator.savecsvplot(inputsvc, directory + "input")
ChartCreator.savetxtplot(inputsvc, directory + "input")
def doallruns(self):
"""
Process all directories in repo
@ -217,9 +324,20 @@ class ChartCreator:
abs_directory = os.getenv(
'SEARCHDIR', default='.') + '/' + directory
print(abs_directory)
process = multiprocessing.Process(
target=ChartCreator.analyze_hey, args=(
abs_directory, directory,))
if 'HEY' not in abs_directory.upper():
process = multiprocessing.Process(
target=ChartCreator.analyze_hey, args=(
abs_directory, directory,))
jobs.append(process)
else:
process_birb = multiprocessing.Process(
target=ChartCreator.analyze_birb, args=(
abs_directory, directory,))
process_input = multiprocessing.Process(
target=ChartCreator.analyze_input, args=(
abs_directory, directory,))
jobs.append(process_birb)
jobs.append(process_input)
if __name__ == "__main__":