Compare commits

21 Commits

Author SHA1 Message Date
cbaf2f2981 change path
All checks were successful
continuous-integration/drone/push Build is passing
2021-08-16 15:19:51 +02:00
3d21e3a543 jomáhát
All checks were successful
continuous-integration/drone/push Build is passing
2021-08-11 13:22:49 +02:00
2030230258 oopsie woopsie
All checks were successful
continuous-integration/drone/push Build is passing
2021-08-11 13:07:29 +02:00
f90571afc3 Fixed stuff
All checks were successful
continuous-integration/drone/push Build is passing
2021-08-11 12:56:56 +02:00
459b3fa6df Added pagination features
All checks were successful
continuous-integration/drone/push Build is passing
2021-08-11 12:10:42 +02:00
9c3f8c65fb Added injetion to outgoing messages
All checks were successful
continuous-integration/drone/push Build is passing
2021-08-10 15:13:23 +02:00
00e9d02478 Added more spans 2021-08-10 14:40:57 +02:00
98234f0e8a Merge branch 'master' of ssh://git.kmlabz.com:2222/birbnetes/input-service
All checks were successful
continuous-integration/drone/push Build is passing
2021-08-10 14:16:38 +02:00
4e3efb7295 Added basic tracing 2021-08-10 14:16:32 +02:00
9bfedf0090 remove workers and threads
All checks were successful
continuous-integration/drone/push Build is passing
2021-08-09 11:34:23 +02:00
3d423c71c6 Changed the order of things
All checks were successful
continuous-integration/drone/push Build is passing
also fixed spelling of amqp
2021-07-28 15:13:44 +02:00
414330b3d5 Lowered warning tresholds
All checks were successful
continuous-integration/drone/push Build is passing
2021-07-26 17:07:55 +02:00
67c5d723ca Fixed log handler
All checks were successful
continuous-integration/drone/push Build is passing
2021-07-26 17:06:15 +02:00
a844a13608 Added more error handling and reporting
All checks were successful
continuous-integration/drone/push Build is passing
2021-07-26 17:01:10 +02:00
ba69b9c2b1 Fixed indentation
All checks were successful
continuous-integration/drone/push Build is passing
2021-07-26 16:19:05 +02:00
eb7f6498ab eh?
All checks were successful
continuous-integration/drone/push Build is passing
2021-07-26 15:36:17 +02:00
57b757cb41 Added amqp to health check
All checks were successful
continuous-integration/drone/push Build is passing
2021-07-26 15:24:17 +02:00
e64137ca56 Did stuff with rabbitmq
All checks were successful
continuous-integration/drone/push Build is passing
2021-07-26 15:18:08 +02:00
f15517af62 made InfluxDB optional
All checks were successful
continuous-integration/drone/push Build is passing
2021-07-26 12:51:52 +02:00
3c10a351ba Merge branch 'influx' 2021-07-26 12:45:01 +02:00
30525ac967 Small code cleanups
Some checks failed
continuous-integration/drone/push Build is failing
2021-07-26 12:32:36 +02:00
9 changed files with 434 additions and 152 deletions

View File

@ -16,4 +16,4 @@ COPY ./src .
EXPOSE 8080
ENTRYPOINT ["gunicorn", "-b", "0.0.0.0:8080", "--workers", "1", "--threads", "1", "app:app"]
ENTRYPOINT ["gunicorn", "-b", "0.0.0.0:8080", "app:app"]

View File

@ -1,18 +1,24 @@
sentry_sdk[flask]
gunicorn
Flask
Flask-RESTful
requests
gunicorn~=20.1.0
Flask~=2.0.1
Flask-RESTful~=0.3.9
requests~=2.26.0
werkzeug
sqlalchemy
flask_sqlalchemy
xeger
pika
sqlalchemy~=1.4.22
flask_sqlalchemy~=2.5.1
xeger~=0.3.5
pika~=1.2.0
psycopg2-binary
marshmallow
marshmallow-sqlalchemy
marshmallow~=3.13.0
marshmallow-sqlalchemy~=0.26.1
flask-marshmallow
py-healthcheck
Flask-InfluxDB
tzdata
tzlocal
tzlocal
apscheduler~=3.7.0
opentracing~=2.4.0
jaeger-client
requests-opentracing
Flask-Opentracing

View File

@ -6,15 +6,25 @@ from sentry_sdk.integrations.flask import FlaskIntegration
from sentry_sdk.integrations.sqlalchemy import SqlalchemyIntegration
from healthcheck import HealthCheck
from config import *
from config import Config
from db import db
from marshm import ma
from influxus import influx_db
from resources import SampleResource, SampleParameterResource
from healthchecks import health_database_status
from healthchecks import health_database_status, amqp_connection_status
import atexit
from apscheduler.schedulers.background import BackgroundScheduler
from magic_amqp import magic_amqp
from error_handlers import register_all_error_handlers
import jaeger_client
import opentracing
from flask_opentracing import FlaskTracing
"""
Main Flask RESTful APIm
Main Flask RESTful API
"""
__author__ = "@tormakris"
@ -22,44 +32,66 @@ __copyright__ = "Copyright 2020, Birbnetes Team"
__module_name__ = "app"
__version__text__ = "1"
if SENTRY_DSN:
if Config.SENTRY_DSN:
sentry_sdk.init(
dsn=SENTRY_DSN,
dsn=Config.SENTRY_DSN,
integrations=[FlaskIntegration(), SqlalchemyIntegration()],
traces_sample_rate=1.0,
traces_sample_rate=0.0,
send_default_pii=True,
release=RELEASE_ID,
environment=RELEASEMODE,
release=Config.RELEASE_ID,
environment=Config.RELEASEMODE,
_experiments={"auto_enabling_integrations": True}
)
app = Flask(__name__)
app.config[
'SQLALCHEMY_DATABASE_URI'] = f"postgresql://{POSTGRES_USERNAME}:{POSTGRES_PASSWORD}@{POSTGRES_HOSTNAME}:5432/{POSTGRES_DB}{POSTGRES_OPTS}"
app.config['EXCHANGE_NAME'] = RABBITMQ_EXCHANGE
app.config['FLASK_PIKA_PARAMS'] = {'host': RABBITMQ_HOST,
'username': RABBITMQ_USERNAME,
'password': RABBITMQ_PASSWORD,
'port': 5672,
'virtual_host': '/'}
app.config['INFLUXDB_HOST'] = INFLUXDB_HOST
app.config['INFLUXDB_PORT'] = INFLUXDB_PORT
app.config['INFLUXDB_USER'] = INFLUXDB_USERNAME
app.config['INFLUXDB_PASSWORD'] = INFLUXDB_PASSWORD
app.config['INFLUXDB_DATABASE'] = INFLUXDB_DB
app.config.from_object(Config)
api = Api(app)
health = HealthCheck()
db.init_app(app)
ma.init_app(app)
influx_db.init_app(app)
with app.app_context():
# influx_db.database.create(INFLUXDB_DB)
# ampq magic stuff
magic_amqp.init_app(app)
ampq_loop_scheduler = BackgroundScheduler()
ampq_loop_scheduler.add_job(func=lambda: magic_amqp.loop(), trigger="interval", seconds=5)
atexit.register(lambda: ampq_loop_scheduler.shutdown())
ampq_loop_scheduler.start()
if Config.ENABLE_INFLUXDB:
influx_db.init_app(app)
@app.before_first_request
def init_db():
if Config.ENABLE_INFLUXDB:
influx_db.database.create(Config.INFLUXDB_DATABASE)
db.create_all()
api.add_resource(SampleResource, "/sample")
api.add_resource(SampleParameterResource, '/sample/<tag>')
# Setup tracing
def initialize_tracer():
app.logger.info("Initializing jaeger...")
jaeger_cfg = jaeger_client.Config(config={}, service_name='input-service', validate=True)
tracer = jaeger_cfg.initialize_tracer()
return tracer
tracing = FlaskTracing(initialize_tracer, True, app)
api.add_resource(SampleResource, "/input")
api.add_resource(SampleParameterResource, '/input/<tag>')
health.add_check(health_database_status)
health.add_check(amqp_connection_status)
register_all_error_handlers(app)
app.add_url_rule("/healthz", "healthcheck", view_func=lambda: health.run())
if __name__ != '__main__':
import logging
gunicorn_logger = logging.getLogger('gunicorn.error')
app.logger.handlers = gunicorn_logger.handlers
app.logger.setLevel(gunicorn_logger.level)

View File

@ -1,42 +1,48 @@
#!/usr/bin/env python3
import os
"""
Main Flask RESTful API
"""
__author__ = "@tormakris"
__copyright__ = "Copyright 2020, Birbnetes Team"
__module_name__ = "app"
__version__text__ = "1"
PORT = os.environ.get("INPUT_SERVICE_PORT", 8080)
DEBUG = os.environ.get("INPUT_SERVICE_DEBUG", True)
_POSTGRES_HOSTNAME = os.getenv("INPUT_POSTGRES_HOSTNAME", "localhost")
_POSTGRES_USERNAME = os.getenv("INPUT_POSTGRES_USERNAME", "input-service")
_POSTGRES_PASSWORD = os.getenv("INPUT_POSTGRES_PASSWORD", "input-service")
_POSTGRES_DB = os.getenv("INPUT_POSTGRES_DB", "input-service")
_POSTGRES_OPTS = os.getenv("INPUT_POSTGRES_OPTS", "")
SENTRY_DSN = os.environ.get("SENTRY_DSN")
RELEASE_ID = os.environ.get("RELEASE_ID", "test")
RELEASEMODE = os.environ.get("INPUT_SERVICE_RELEASEMODE", "dev")
class Config:
PORT = 8080
DEBUG = os.environ.get("INPUT_SERVICE_DEBUG", "true").lower() in ["true", "yes", "1"]
RABBITMQ_HOST = os.getenv("INPUT_RABBITMQ_HOSTNAME", "localhost")
RABBITMQ_EXCHANGE = os.getenv("INPUT_RABBITMQ_EXCHANGE", "dev")
RABBITMQ_QUEUE = os.getenv("INPUT_RABBITMQ_QUEUE", "wave-extract")
RABBITMQ_USERNAME = os.getenv("INPUT_RABBITMQ_USERNAME", "rabbitmq")
RABBITMQ_PASSWORD = os.getenv("INPUT_RABBITMQ_PASSWORD", "rabbitmq")
SENTRY_DSN = os.environ.get("SENTRY_DSN")
RELEASE_ID = os.environ.get("RELEASE_ID", "test")
RELEASEMODE = os.environ.get("INPUT_SERVICE_RELEASEMODE", "dev")
POSTGRES_HOSTNAME = os.getenv("INPUT_POSTGRES_HOSTNAME", "localhost")
POSTGRES_USERNAME = os.getenv("INPUT_POSTGRES_USERNAME", "input-service")
POSTGRES_PASSWORD = os.getenv("INPUT_POSTGRES_PASSWORD", "input-service")
POSTGRES_DB = os.getenv("INPUT_POSTGRES_DB", "input-service")
POSTGRES_OPTS = os.getenv("INPUT_POSTGRES_OPTS", "")
EXCHANGE_NAME = os.getenv("INPUT_RABBITMQ_EXCHANGE", "dev")
RABBITMQ_QUEUE = os.getenv("INPUT_RABBITMQ_QUEUE", "wave-extract")
STORAGE_HOSTNAME = os.getenv("INPUT_STORAGE_HOSTNAME", "localhost:8042")
FLASK_PIKA_PARAMS = {
'host': os.getenv("INPUT_RABBITMQ_HOSTNAME", "localhost"),
'username': os.getenv("INPUT_RABBITMQ_USERNAME", "rabbitmq"),
'password': os.getenv("INPUT_RABBITMQ_PASSWORD", "rabbitmq"),
'port': int(os.getenv("INPUT_RABBITMQ_PORT", 5672)),
'virtual_host': '/'
}
INFLUXDB_HOST = os.getenv("INFLUX_HOST", "input-influx")
INFLUXDB_PORT = os.getenv("INFLUX_PORT", "8086")
INFLUXDB_USERNAME = os.getenv("INFLUX_USERNAME", "input-service")
INFLUXDB_PASSWORD = os.getenv("INFLUX_PASSWORD", "input-service-supersecret")
INFLUXDB_DB = os.getenv("INFLUX_DB", "input-service")
SQLALCHEMY_DATABASE_URI = f"postgresql://{_POSTGRES_USERNAME}:{_POSTGRES_PASSWORD}@{_POSTGRES_HOSTNAME}:5432/{_POSTGRES_DB}{_POSTGRES_OPTS}"
STORAGE_HOSTNAME = os.getenv("INPUT_STORAGE_HOSTNAME", "localhost:8042")
ENABLE_INFLUXDB = os.environ.get("INPUT_ENABLE_INFLUX", "true").lower() in ["true", "yes", "1"]
INFLUXDB_HOST = os.getenv("INFLUX_HOST", "input-influx")
INFLUXDB_PORT = os.getenv("INFLUX_PORT", "8086")
INFLUXDB_USER = os.getenv("INFLUX_USERNAME", "input-service")
INFLUXDB_PASSWORD = os.getenv("INFLUX_PASSWORD", "input-service-supersecret")
INFLUXDB_DATABASE = os.getenv("INFLUX_DB", "input-service")

18
src/error_handlers.py Normal file
View File

@ -0,0 +1,18 @@
#!/usr/bin/env python3
def get_standard_error_handler(code: int):
def error_handler(err):
return {"msg": str(err)}, code
return error_handler
# function to register all handlers
def register_all_error_handlers(app):
error_codes_to_override = [404, 403, 401, 405, 400, 409, 422, 500]
for code in error_codes_to_override:
app.register_error_handler(code, get_standard_error_handler(code))

View File

@ -1,6 +1,7 @@
#!/usr/bin/env python3
from db import db
from magic_amqp import magic_amqp
"""
Healthchek functions
@ -21,3 +22,14 @@ def health_database_status():
output = str(e)
is_database_working = False
return is_database_working, output
def amqp_connection_status():
if magic_amqp.is_healthy():
result = True
text = "amqp connection is ok"
else:
result = False
text = "amqp connection is unhealthy"
return result, text

132
src/magic_amqp.py Normal file
View File

@ -0,0 +1,132 @@
from flask import Flask
from threading import Lock
import pika
import pika.exceptions
import json
import time
import opentracing
from opentracing.ext import tags
from opentracing.propagation import Format
class MagicAMQP:
"""
This is my pathetic attempt to make RabbitMQ connection in a Flask app reliable and performant.
"""
def __init__(self, app: Flask = None):
self.app = app
if app:
self.init_app(app)
self._lock = Lock()
self._credentials = None
def init_app(self, app: Flask):
self.app = app
self.app.config.setdefault('FLASK_PIKA_PARAMS', {})
self.app.config.setdefault('EXCHANGE_NAME', None)
self.app.config.setdefault('RABBITMQ_QUEUE', None)
self._credentials = pika.PlainCredentials(
app.config['FLASK_PIKA_PARAMS']['username'],
app.config['FLASK_PIKA_PARAMS']['password']
)
self._reconnect_ampq()
def _reconnect_ampq(self):
self._pika_connection = pika.BlockingConnection(
pika.ConnectionParameters(
host=self.app.config['FLASK_PIKA_PARAMS']['host'],
credentials=self._credentials,
heartbeat=10,
socket_timeout=5)
)
self._pika_channel = self._pika_connection.channel()
self._pika_channel.exchange_declare(
exchange=self.app.config['EXCHANGE_NAME'],
exchange_type='direct'
)
def loop(self):
"""
This method should be called periodically to keep up the connection
"""
lock_start = time.time()
with self._lock:
lock_acquire_time = time.time() - lock_start
if lock_acquire_time >= 0.5:
self.app.logger.warning(f"Loop: Lock acquire took {lock_acquire_time:5f} sec")
try:
self._pika_connection.process_data_events(0)
# We won't attempt retry if this fail
except pika.exceptions.AMQPConnectionError as e:
self.app.logger.warning(f"Connection error during process loop: {e} (attempting reconnect)")
self._reconnect_ampq()
total_time = time.time() - lock_start
if total_time > 1:
self.app.logger.warning(f"Loop: Total loop took {total_time:5f} sec")
def publish(self, payload=None):
"""
Publish a simple json serialized message to the configured queue.
If the connection is broken, then this call will block until the connection is restored
"""
span_tags = {tags.SPAN_KIND: tags.SPAN_KIND_PRODUCER}
with opentracing.tracer.start_active_span('magic_amqp.publish', tags=span_tags) as scope:
opentracing.tracer.inject(scope.span.context, Format.TEXT_MAP, payload)
lock_start = time.time()
with self._lock:
scope.span.log_kv({'event': 'lockAcquired'})
lock_acquire_time = time.time() - lock_start
if lock_acquire_time >= 0.2:
self.app.logger.warning(f"Publish: Lock acquire took {lock_acquire_time:5f} sec")
tries = 0
while True:
try:
self._pika_channel.basic_publish(
exchange=self.app.config['EXCHANGE_NAME'],
routing_key='feature',
body=json.dumps(payload).encode('UTF-8')
)
self.app.logger.debug(f"Published: {payload}")
break # message sent successfully
except pika.exceptions.AMQPConnectionError as e:
scope.span.log_kv({'event': 'connectionError', 'error': str(e)})
self.app.logger.warning(f"Connection error during publish: {e} (attempting reconnect)")
if tries > 30:
raise # just give up
while True:
try:
self._reconnect_ampq()
break
except pika.exceptions.AMQPConnectionError as e:
self.app.logger.warning(
f"Connection error during reconnection: {e} (attempting reconnect)")
tries += 1
if tries > 30:
raise # just give up
if tries > 10:
time.sleep(2)
total_time = time.time() - lock_start
if total_time > 0.4:
self.app.logger.warning(f"Publish: Total publish took {total_time:5f} sec")
def is_healthy(self) -> bool:
with self._lock:
if not self._pika_channel:
return False
return self._pika_channel.is_open and self._pika_connection.is_open
# instance to be used in the flask app
magic_amqp = MagicAMQP()

View File

@ -1,17 +1,19 @@
#!/usr/bin/env python3
import json
import time
from datetime import datetime
import tzlocal
from xeger import Xeger
from flask_restful import Resource
from flask import request, current_app
from flask import request, current_app, abort
import requests
import pika
from magic_amqp import magic_amqp
from db import db
from influxus import influx_db
from models import SampleMetadata
from schemas import SampleSchema, SampleMetadataSchema
from config import *
from requests_opentracing import SessionTracing
import opentracing
"""
Flask Restful endpoints
@ -37,99 +39,111 @@ class SampleResource(Resource):
Post request send to the endpoint
:return:
"""
if 'file' not in request.files:
return {"err_msg": "no file found"}, 469
else:
soundfile = request.files['file']
with opentracing.tracer.start_active_span('parseAndValidate'):
if 'file' not in request.files:
return abort(400, "no file found")
else:
soundfile = request.files['file']
if 'description' not in request.form:
return {"err_msg": "no description found"}, 470
else:
description = request.form.get("description")
if 'description' not in request.form:
return abort(400, "no description found")
else:
description = request.form.get("description")
if soundfile.content_type != 'audio/wave':
current_app.logger.info(
f"Input file was not WAV.")
return {'err_msg': 'Input file not a wave file.'}, 415
if soundfile.content_type != 'audio/wave':
current_app.logger.info(f"Input file was not WAV.")
return abort(415, 'Input file not a wave file.')
try:
desc = self.sampleschema.loads(description)
except Exception as e:
current_app.logger.exception(e)
return abort(417, 'Input JSON schema invalid')
try:
desc = self.sampleschema.loads(description)
except Exception as e:
current_app.logger.exception(e)
return {'err_msg': 'Input JSON schema invalid'}, 417
xeger = Xeger(limit=30)
while True:
generated_tag = xeger.xeger(r'^[a-zA-Z]+[0-9a-zA-Z_]*$')[:32]
if len(generated_tag) > 2: # Ensure minimum length
break
with opentracing.tracer.start_active_span('generateTag'):
xeger = Xeger(limit=30)
while True:
generated_tag = xeger.xeger(r'^[a-zA-Z]+[0-9a-zA-Z_]*$')[:32]
if len(generated_tag) > 2: # Ensure minimum length
break
# Handle mega-autismo-cliento
soundfile_content_length = soundfile.content_length
if soundfile_content_length <= 0: # BRUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUH
current_app.logger.debug(
"The uploader did not provide content-length for the sound file... Calculating manually..."
with opentracing.tracer.start_active_span(
'calculateContentLength'): # In an ideal scenario this span is missing
current_app.logger.debug(
"The uploader did not provide content-length for the sound file... Calculating manually..."
)
# So, this is a seekable stream, so we just seek to the end
old_ptr = soundfile.tell()
soundfile.seek(0, 2)
# Check where is the end (= content length)
soundfile_content_length = soundfile.tell()
# Seek back to where the stream was
soundfile.seek(old_ptr, 0)
# It's insane, that you can not set this field in curl
with opentracing.tracer.start_active_span('sqlalchemy.create'):
record = SampleMetadata(
device_id=desc['device_id'],
device_date=desc['date'],
tag=generated_tag
)
# So, this is a seekable stream, so we just seek to the end
old_ptr = soundfile.tell()
soundfile.seek(0, 2)
# Check where is the end (= content length)
soundfile_content_length = soundfile.tell()
# Seek back to where the stream was
soundfile.seek(old_ptr, 0)
# It's insane, that you can not set this field in curl
record = SampleMetadata(
device_id=desc['device_id'],
device_date=desc['date'],
tag=generated_tag)
try:
db.session.add(record)
requests.post(
f"http://{STORAGE_HOSTNAME}/object",
files={
'description': (None, json.dumps({'tag': generated_tag}), 'application/json'),
'soundFile': (
'wave.wav',
soundfile,
soundfile.content_type,
{'Content-Length': soundfile_content_length})}).raise_for_status() # Anyádat curl am
credentials = pika.PlainCredentials(current_app.config['FLASK_PIKA_PARAMS']['username'],
current_app.config['FLASK_PIKA_PARAMS']['password'])
connection = pika.BlockingConnection(
pika.ConnectionParameters(host=current_app.config['FLASK_PIKA_PARAMS']['host'],
credentials=credentials,
heartbeat=0,
socket_timeout=5))
channel = connection.channel()
channel.exchange_declare(exchange=current_app.config['EXCHANGE_NAME'],
exchange_type='direct')
channel.basic_publish(exchange=current_app.config['EXCHANGE_NAME'],
routing_key='feature',
body=json.dumps({'tag': generated_tag}).encode('UTF-8'))
connection.close()
influx_db.write_points(
[
{
'time': datetime.now(tz=tzlocal.get_localzone()),
'measurement': 'cloudinput',
'tags': {
'device': desc['device_id']
},
'fields': {
'bruh': 1.0
}
}
]
)
except Exception as e:
current_app.logger.exception(e)
db.session.rollback()
return {"err_msg": str(
e), "hint": "DB or downstream service error"}, 569
db.session.commit()
with opentracing.tracer.start_active_span('uploadToStorageService'):
files = {
'description': (None, json.dumps({'tag': generated_tag}), 'application/json'),
'soundFile': (
'wave.wav',
soundfile,
soundfile.content_type,
{'Content-Length': soundfile_content_length})}
upload_started = time.time()
r = SessionTracing(propagate=True).post(
f"http://{current_app.config.get('STORAGE_HOSTNAME')}/object",
files=files
)
upload_time = time.time() - upload_started
if upload_time > 0.8:
current_app.logger.warning(f"Uploading to storage-service took {upload_time:5} sec")
if r.status_code not in [200, 201]:
return abort(500,
f"Failed to upload sample to storage service. Upstream status: {r.status_code}: {r.text}")
with opentracing.tracer.start_active_span('sqlalchemy.commit'):
db.session.commit()
# Announce only after the data is successfully committed
with opentracing.tracer.start_active_span('publishMessage'):
try:
magic_amqp.publish({'tag': generated_tag})
except Exception as e:
current_app.logger.exception(e)
return abort(500, f"AMQP Publish error: {str(e)}")
# metrics
if current_app.config['ENABLE_INFLUXDB']:
with opentracing.tracer.start_active_span('influxdb.write_points'):
influx_db.write_points(
[
{
'time': datetime.now(tz=tzlocal.get_localzone()),
'measurement': 'cloudinput',
'tags': {
'device': desc['device_id']
},
'fields': {
'bruh': 1.0
}
}
]
)
return {"tag": generated_tag}, 200
def get(self):
@ -137,8 +151,68 @@ class SampleResource(Resource):
Get all stored items
:return:
"""
samples = SampleMetadata.query.all()
return self.samplemetadataschema.dump(list(samples)), 200
with opentracing.tracer.start_active_span('compileQuery'):
query = SampleMetadata.query
## Compile filters ##
filters = []
try:
first = int(request.args.get('first'))
except (ValueError, TypeError):
first = None
else:
filters.append(
SampleMetadata.id >= first
)
try:
after = datetime.fromisoformat(request.args.get('after'))
except (ValueError, TypeError):
after = None
else:
filters.append(
SampleMetadata.timestamp > after
)
try:
before = datetime.fromisoformat(request.args.get('before'))
except (ValueError, TypeError):
before = None
else:
filters.append(
SampleMetadata.timestamp < before
)
if filters:
query = query.filter(db.and_(*filters))
try:
limit = int(request.args.get('limit'))
except (ValueError, TypeError):
limit = None
else:
query = query.limit(limit)
## Run query ##
count = "count" in request.args
tags = {
"first": first,
"limit": limit,
"after": after,
"before": before
}
if count:
with opentracing.tracer.start_active_span('sqlalchemy.count', tags=tags):
rows = query.count()
return {"count": rows}, 200
else:
with opentracing.tracer.start_active_span('sqlalchemy.select', tags=tags):
samples = query.all()
return self.samplemetadataschema.dump(list(samples)), 200
class SampleParameterResource(Resource):
@ -154,5 +228,7 @@ class SampleParameterResource(Resource):
:param tag:
:return:
"""
sample = SampleMetadata.query.filter_by(tag=tag).first_or_404()
with opentracing.tracer.start_active_span('sqlalchemy.select', tags={"tag": tag}):
sample = SampleMetadata.query.filter_by(tag=tag).first_or_404()
return self.samplemetadataschema.dump(sample), 200

View File

@ -34,5 +34,5 @@ class SampleMetadataSchema(ma.SQLAlchemyAutoSchema):
"""
class Meta:
model = SampleMetadata
exclude = ('timestamp', 'id', 'device_date',)
exclude = ('timestamp', 'id', 'device_date')
date = auto_field("device_date", dump_only=False)