From 4b7f1752eca525f1893ec3fe540b5b178517361a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Torma=20Krist=C3=B3f?= Date: Sun, 3 Nov 2019 23:11:23 +0100 Subject: [PATCH] log processor script --- benchmark/classic/data/log_analyze.py | 86 +++++++++++++++++++++++++++ benchmark/classic/data/process.py | 4 +- benchmark/classic/wacher.sh | 2 +- 3 files changed, 89 insertions(+), 3 deletions(-) create mode 100644 benchmark/classic/data/log_analyze.py diff --git a/benchmark/classic/data/log_analyze.py b/benchmark/classic/data/log_analyze.py new file mode 100644 index 0000000..1c100ab --- /dev/null +++ b/benchmark/classic/data/log_analyze.py @@ -0,0 +1,86 @@ +#!/usr/bin/env python3 + + +import json +import datetime +import re +import pprint + + +def average(lst: list) -> float: + return sum(lst) / len(lst) + + +def listtodict(inlist: list) -> dict: + it = iter(inlist) + res_dct = dict(zip(it, it)) + return res_dct + + +def readfile() -> list: + lines = [] + with open("log.txt", 'r') as inputFile: + line = inputFile.readline() + while line: + line = inputFile.readline() + try: + linedict = json.loads(line) + lines.append(linedict) + except json.JSONDecodeError: + continue + return lines + + +def readconfigdates() -> dict: + dates = {} + with open("dates.txt", 'r') as inputFile: + line = inputFile.readline() + currline = 0 + while line: + line = inputFile.readline() + currline += 1 + dateformatted = datetime.datetime.strptime( + line, '%Y-%m-%d %H:%M:%S') + dates[currline] = dateformatted + return dates + + +def procesfile(file: list, start: datetime, end: datetime) -> dict: + dictofsecs = {} + for line in file: + try: + currdate = line['ts'].split('.')[0].replace('T', ' ') + dateformatted = datetime.datetime.strptime( + currdate, '%Y-%m-%d %H:%M:%S') + if start < dateformatted < end: + message = line['msg'] + messagelist = re.split(' |=', message) + messagedict = listtodict(messagelist) + messagedict['ts'] = dateformatted + if 'ObservedStableValue' in messagedict: + if messagedict['ts'] not in dictofsecs: + dictofsecs[messagedict['ts']] = [] + dictofsecs[messagedict['ts']].append( + float(messagedict['ObservedStableValue'])) + else: + dictofsecs[messagedict['ts']].append( + float(messagedict['ObservedStableValue'])) + except Exception as exception: + print(exception) + return dictofsecs + + +def averagepersec(dictoftimes: dict, shouldprint: bool = False) -> dict: + dictofsecs = {} + for key, value in dictoftimes.items(): + dictofsecs[key] = average(value) + if shouldprint: + print(dictofsecs[key]) + return dictofsecs + + +if __name__ == "__main__": + filelines = readfile() + config = readconfigdates() + fitdates = procesfile(filelines, config[0], config[1]) + averageofdict = averagepersec(fitdates, True) diff --git a/benchmark/classic/data/process.py b/benchmark/classic/data/process.py index f5b283f..b1ae051 100644 --- a/benchmark/classic/data/process.py +++ b/benchmark/classic/data/process.py @@ -42,7 +42,7 @@ def processFile(fname): #print("Minimum response time was ", minResponse) else: print("csv is empty") - #pprint(responseCodes) + # pprint(responseCodes) for sec in responsePerSec: if len(responsePerSec[sec]) != 0: #print(sec, ":") @@ -50,7 +50,7 @@ def processFile(fname): #print(" Minimum:", min(responsePerSec[sec])) #print(" Num of responses:", len(responsePerSec[sec])) print(len(responsePerSec[sec])) - #else: + # else: #print(" empty") diff --git a/benchmark/classic/wacher.sh b/benchmark/classic/wacher.sh index 04af65d..371b695 100644 --- a/benchmark/classic/wacher.sh +++ b/benchmark/classic/wacher.sh @@ -5,6 +5,6 @@ function=hello-kubeless-go-sc for (( i = 1 ; i <= $time ; i++ )); do kubectl get deployment | grep $function | awk '{print $2}' | cut -d/ -f1 >> ./data/$function.monitor.csv - echo '\n' >> ./data/$function.monitor.csv +# echo '\n' >> ./data/$function.monitor.csv sleep 1 done