From b5c8b73419791f007c836ce2273bcb8d9c95c9a2 Mon Sep 17 00:00:00 2001 From: Torma Date: Fri, 23 Oct 2020 02:56:00 +0200 Subject: [PATCH] sucks to suck --- make_graphs.py | 124 +++++++++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 121 insertions(+), 3 deletions(-) diff --git a/make_graphs.py b/make_graphs.py index 79ae683..1847ff3 100644 --- a/make_graphs.py +++ b/make_graphs.py @@ -7,6 +7,7 @@ Python module to automatically analyze benchmark results import csv import os import abc +import datetime import multiprocessing import matplotlib.pyplot as pplot import sentry_sdk @@ -135,6 +136,86 @@ class HeyAnalyzer(CsvAnalyzer): self.walkresponsepersec(responsepersec, shouldprint) +class BirbAnalyzer(CsvAnalyzer): + """ + Analyze Birbnetes benchmark output. + """ + + def __init__(self): + """ + Init object + """ + super().__init__() + + def processfile( + self, + fname, + shouldprint: bool = False): + """ + Process a single file. + :param fname: + :param shouldprint: + :return: + """ + with open(fname, 'r') as f: + data = csv.reader(f) + fields = next(data) + responsepersec = {} + for row in data: + items = zip(fields, row) + item = {} + for (name, value) in items: + item[name] = value.strip() + sec = item['fire_time'] + if sec not in responsepersec: + responsepersec[sec] = [] + else: + responsepersec[sec].append(int(item['latency'])) + self.walkresponsepersec(responsepersec, shouldprint) + + +class InputAnalyzer(CsvAnalyzer): + """ + Analyze InputService benchmark output. + """ + + def __init__(self): + """ + Init object + """ + super().__init__() + + def processfile( + self, + fname, + shouldprint: bool = False): + """ + Process a single file. + :param fname: + :param shouldprint: + :return: + """ + with open(fname, 'r') as f: + data = csv.reader(f) + fields = next(data) + responsepersec = {} + for row in data: + items = zip(fields, row) + item = {} + for (name, value) in items: + item[name] = value.strip() + sec = item['fire_time'] + if sec not in responsepersec: + responsepersec[sec] = [] + else: + firetime = date_time_obj = datetime.datetime.strptime(sec, '%Y-%m-%dT%H:%M:%S') + responsearrivetime = datetime.datetime.strptime(item['response_arrive_time'], + "%Y-%m-%dT%H:%M:%S.%f") + latency = responsearrivetime - firetime + responsepersec[sec].append(int(latency.total_seconds() * 1000)) + self.walkresponsepersec(responsepersec, shouldprint) + + class ChartCreator: """ Create charts automagically @@ -206,6 +287,32 @@ class ChartCreator: ChartCreator.savecsvplot(hey, directory) ChartCreator.savetxtplot(hey, directory) + @staticmethod + def analyze_birb(abs_directory, directory): + """ + Analyze BirbBench output + :param abs_directory: + :param directory: + :return: + """ + birb = BirbAnalyzer() + birb.processallfiles(abs_directory) + ChartCreator.savecsvplot(birb, directory) + ChartCreator.savetxtplot(birb, directory) + + @staticmethod + def analyze_input(abs_directory, directory): + """ + Analyze InputSvc output + :param abs_directory: + :param directory: + :return: + """ + inputsvc = InputAnalyzer() + inputsvc.processallfiles(abs_directory) + ChartCreator.savecsvplot(inputsvc, directory + "input") + ChartCreator.savetxtplot(inputsvc, directory + "input") + def doallruns(self): """ Process all directories in repo @@ -217,9 +324,20 @@ class ChartCreator: abs_directory = os.getenv( 'SEARCHDIR', default='.') + '/' + directory print(abs_directory) - process = multiprocessing.Process( - target=ChartCreator.analyze_hey, args=( - abs_directory, directory,)) + if 'HEY' not in abs_directory.upper(): + process = multiprocessing.Process( + target=ChartCreator.analyze_hey, args=( + abs_directory, directory,)) + jobs.append(process) + else: + process_birb = multiprocessing.Process( + target=ChartCreator.analyze_birb, args=( + abs_directory, directory,)) + process_input = multiprocessing.Process( + target=ChartCreator.analyze_input, args=( + abs_directory, directory,)) + jobs.append(process_birb) + jobs.append(process_input) if __name__ == "__main__":