Implemented model_service stuff
All checks were successful
continuous-integration/drone/push Build is passing
All checks were successful
continuous-integration/drone/push Build is passing
This commit is contained in:
parent
d7ae58c2d1
commit
94b5066b16
@ -1,13 +1,16 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
import json
|
|
||||||
from json import JSONEncoder
|
|
||||||
import numpy
|
|
||||||
import os
|
import os
|
||||||
import os.path
|
import os.path
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
import json
|
||||||
|
import tempfile
|
||||||
|
from json import JSONEncoder
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
from pyAudioAnalysis import audioBasicIO
|
from pyAudioAnalysis import audioBasicIO
|
||||||
from pyAudioAnalysis import ShortTermFeatures
|
from pyAudioAnalysis import MidTermFeatures
|
||||||
|
import numpy
|
||||||
|
|
||||||
|
|
||||||
class NumpyArrayEncoder(JSONEncoder):
|
class NumpyArrayEncoder(JSONEncoder):
|
||||||
@ -18,19 +21,48 @@ class NumpyArrayEncoder(JSONEncoder):
|
|||||||
|
|
||||||
|
|
||||||
def do_extraction(file_path: str):
|
def do_extraction(file_path: str):
|
||||||
|
logging.info("Getting default model details...")
|
||||||
|
r = requests.get("http://model-service/model/$default/details")
|
||||||
|
r.raise_for_status()
|
||||||
|
|
||||||
|
model_details = r.json()
|
||||||
|
|
||||||
logging.info("Running extraction...")
|
logging.info("Running extraction...")
|
||||||
|
|
||||||
[Fs, x] = audioBasicIO.read_audio_file(file_path)
|
sampling_rate, signal = audioBasicIO.read_audio_file(file_path)
|
||||||
F, f_names = ShortTermFeatures.feature_extraction(x, Fs, 0.050 * Fs, 0.025 * Fs)
|
signal = audioBasicIO.stereo_to_mono(signal)
|
||||||
|
|
||||||
return {"F": F, "f_names": f_names}
|
if sampling_rate == 0:
|
||||||
|
raise Exception("Could not read the file properly: Sampling rate zero")
|
||||||
|
|
||||||
|
if signal.shape[0] / float(sampling_rate) <= model_details['mid_window']:
|
||||||
|
raise Exception("Could not read the file properly: Signal shape is not good")
|
||||||
|
|
||||||
|
# feature extraction:
|
||||||
|
mid_features, s, _ = \
|
||||||
|
MidTermFeatures.mid_feature_extraction(signal, sampling_rate,
|
||||||
|
model_details['mid_window'] * sampling_rate,
|
||||||
|
model_details['mid_step'] * sampling_rate,
|
||||||
|
round(sampling_rate * model_details['short_window']),
|
||||||
|
round(sampling_rate * model_details['short_step']))
|
||||||
|
|
||||||
|
# long term averaging of mid-term statistics
|
||||||
|
mid_features = mid_features.mean(axis=1)
|
||||||
|
if model_details['compute_beat']:
|
||||||
|
beat, beat_conf = MidTermFeatures.beat_extraction(s, model_details['short_step'])
|
||||||
|
mid_features = numpy.append(mid_features, beat)
|
||||||
|
mid_features = numpy.append(mid_features, beat_conf)
|
||||||
|
|
||||||
|
#feature_vector = (mid_features - mean) / std # normalization
|
||||||
|
|
||||||
|
return mid_features
|
||||||
|
|
||||||
|
|
||||||
def run_everything(parameters: dict):
|
def run_everything(parameters: dict):
|
||||||
tag = parameters['tag']
|
tag = parameters['tag']
|
||||||
logging.info(f"Downloading sample: {tag}")
|
logging.info(f"Downloading sample: {tag}")
|
||||||
|
|
||||||
file_path = os.path.join("/tmp/extractor-service/", f"{tag}.wav")
|
_, file_path = tempfile.mktemp(prefix=f"{tag}_", suffix=".wav", dir="extractor-service")
|
||||||
r = requests.get(f"http://storage-service/object/{tag}")
|
r = requests.get(f"http://storage-service/object/{tag}")
|
||||||
with open(file_path, 'wb') as f:
|
with open(file_path, 'wb') as f:
|
||||||
f.write(r.content)
|
f.write(r.content)
|
||||||
@ -41,7 +73,7 @@ def run_everything(parameters: dict):
|
|||||||
finally:
|
finally:
|
||||||
os.remove(file_path)
|
os.remove(file_path)
|
||||||
|
|
||||||
logging.info(f"Pushing results to AI service...")
|
logging.info(f"Pushing results to Classifier service...")
|
||||||
|
|
||||||
response = {
|
response = {
|
||||||
"tag": tag,
|
"tag": tag,
|
||||||
@ -50,6 +82,6 @@ def run_everything(parameters: dict):
|
|||||||
|
|
||||||
logging.debug(f"Data being pushed: {str(response)}")
|
logging.debug(f"Data being pushed: {str(response)}")
|
||||||
|
|
||||||
# r = requests.post('http://ai-service/asd', data=json.dumps(results, cls=NumpyArrayEncoder), headers={'Content-Type': 'application/json'})
|
r = requests.post('http://classification-service/classify', data=json.dumps(results, cls=NumpyArrayEncoder), headers={'Content-Type': 'application/json'})
|
||||||
|
#r.raise_for_status() # An error in a service should not kill other services
|
||||||
# r.raise_for_status()
|
logging.info(f"Classification service response: {r.status_code}")
|
||||||
|
Reference in New Issue
Block a user