This repository has been archived on 2020-09-24. You can view files and clone it, but cannot push or open issues or pull requests.
consumer-scheduler/app.py

210 lines
6.4 KiB
Python
Raw Normal View History

2020-03-29 16:48:34 +02:00
#!/usr/bin/env python
import sentry_sdk
2020-03-29 19:13:42 +02:00
import time
2020-03-29 19:26:52 +02:00
import requests
import requests.exceptions
2020-03-29 19:13:42 +02:00
import os
import redis
import json
2020-03-30 15:42:48 +02:00
import logging
2020-04-17 16:25:00 +02:00
import socket
2020-03-29 16:48:34 +02:00
"""
2020-03-29 17:00:35 +02:00
Scheduler
2020-03-29 16:48:34 +02:00
"""
2020-03-29 17:16:41 +02:00
__author__ = "@kocsisr"
2020-03-29 16:48:34 +02:00
__copyright__ = "Copyright 2020, GoldenPogácsa Team"
__module_name__ = "app"
__version__text__ = "1"
2020-03-29 19:13:42 +02:00
sentry_sdk.init("https://0a106e104e114bc9a3fa47f9cb0db2f4@sentry.kmlabz.com/10")
2020-03-29 16:48:34 +02:00
2020-03-29 19:30:59 +02:00
2020-05-08 20:47:02 +02:00
class RedisSuperStorage:
2020-05-08 21:24:37 +02:00
def __init__(self, redis_url: str):
self.r = redis.from_url(redis_url)
2020-03-30 15:42:48 +02:00
2020-05-08 21:24:37 +02:00
def get_consumer_list(self) -> dict:
keys = self.r.keys('consumer_*')
2020-03-29 19:13:42 +02:00
2020-05-08 21:24:37 +02:00
list_of_customers = {}
2020-04-17 16:51:27 +02:00
2020-05-08 21:24:37 +02:00
for key in keys:
info = json.loads((self.r.get(key) or b"{}").decode('utf-8'))
if info:
list_of_customers[info['uuid']] = info
return list_of_customers
def get_producer_list(self) -> list:
keys = self.r.keys('producer_*')
list_of_producer_ip = []
for key in keys:
ip = (self.r.get(key) or b"").decode('utf-8')
if ip:
list_of_producer_ip.append(ip)
return list_of_producer_ip
def is_ip_changed(self) -> bool:
old_ip = self.r.get('current_ip')
if old_ip:
old_ip = old_ip.decode('utf-8')
host_name = socket.gethostname()
current_ip = socket.gethostbyname(host_name)
if current_ip != old_ip:
self.r.set('current_ip', current_ip.encode('utf-8'))
return current_ip != old_ip
def update_consumer(self, uuid: str, ip: str):
cust_key = f"consumer_{uuid}"
info = {
"uuid": uuid,
"ip": ip,
"last_seen": time.time()
}
self.r.set(cust_key, json.dumps(info).encode('utf-8'))
self.r.expire(cust_key, os.environ["CUSTOMER_TIMEOUT"])
2020-03-29 19:13:42 +02:00
2020-05-08 20:47:02 +02:00
class Scheduler:
2020-05-08 21:24:37 +02:00
def __init__(self):
pass
2020-03-29 19:13:42 +02:00
2020-05-08 20:47:02 +02:00
def request_first_sync(self, ip_list, consumer_list_redis):
temp_dict = {}
for ip in ip_list:
try:
# request synchronization
response = requests.post(f"http://{ip}/sync", json={'uuid': os.environ['LOCAL_UUID']}, timeout=5)
except (requests.exceptions.ConnectionError, requests.exceptions.Timeout) as e:
logging.error(f"Error while syncing to {ip}: {str(e)}")
continue
if response.status_code == 200:
temp_dict[response.json()['uuid']] = {'ip': ip}
consumer_list_redis.update(temp_dict)
self.r.set('consumer_list', json.dumps(consumer_list_redis).encode('utf-8'))
logging.debug('Update redis consumers ip list from first answers: Done')
2020-04-22 04:49:45 +02:00
2020-05-08 20:47:02 +02:00
def request_sync(self, consumer_list_redis):
2020-04-22 04:49:45 +02:00
temp_dict = {}
2020-03-29 19:30:59 +02:00
for uuid, info in consumer_list_redis.items():
ip = info['ip']
2020-03-29 19:26:52 +02:00
try:
2020-04-04 13:58:08 +02:00
# request synchronization
2020-04-22 01:35:29 +02:00
response = requests.post(f"http://{ip}/sync", json={'uuid': os.environ['LOCAL_UUID']}, timeout=5)
except (requests.exceptions.ConnectionError, requests.exceptions.Timeout) as e:
logging.error(f"Error while syncing to {ip}: {str(e)}")
2020-03-29 19:26:52 +02:00
continue
2020-03-29 19:13:42 +02:00
if response.status_code == 200:
2020-04-17 16:25:00 +02:00
temp_dict[response.json()['uuid']] = {'ip': ip}
2020-04-04 13:58:08 +02:00
# update the dictionary of the currently available consumers
2020-03-29 19:30:59 +02:00
consumer_list_redis.update(temp_dict)
2020-05-08 20:47:02 +02:00
self.r.set('consumer_list', json.dumps(consumer_list_redis).encode('utf-8'))
logging.debug('Update redis consumer ip list from answers: Done')
2020-03-29 19:13:42 +02:00
2020-05-08 20:47:02 +02:00
def test_ip_change(self, host_name):
2020-04-22 01:35:29 +02:00
2020-05-08 20:47:02 +02:00
old_ip = self.r.get('current_ip')
2020-04-22 01:35:29 +02:00
2020-04-22 05:01:17 +02:00
if old_ip:
old_ip = old_ip.decode('utf-8')
2020-04-22 04:49:45 +02:00
2020-04-22 05:01:17 +02:00
current_ip = socket.gethostbyname(host_name)
if not old_ip: # Not set yet. I this case no update required
2020-05-08 20:47:02 +02:00
self.r.set('current_ip', current_ip.encode('utf-8'))
2020-04-22 04:49:45 +02:00
logging.debug(f"Previous info about the ip address could not be found! Current: {current_ip}")
2020-04-17 16:51:27 +02:00
2020-04-22 05:01:17 +02:00
elif old_ip != current_ip:
logging.info(f'IP changed: {old_ip} -> {current_ip} Pushing updates...')
2020-04-22 04:49:45 +02:00
# pushing updates...
2020-05-08 20:47:02 +02:00
keys = self.r.keys('producer_*')
2020-04-17 16:51:27 +02:00
logging.debug(f'Pushing update to the following producers: ' + ', '.join(k.decode('utf-8') for k in keys))
2020-04-17 16:25:51 +02:00
for key in keys:
2020-05-08 20:47:02 +02:00
ip = self.r.get(key)
2020-04-17 16:51:27 +02:00
if ip:
ip = ip.decode('utf-8')
else:
continue
try:
2020-04-22 01:35:29 +02:00
response = requests.post(
f"http://{ip}/ip",
json={'uuid': os.environ['LOCAL_UUID'], 'ip': current_ip},
timeout=5
)
2020-04-17 16:51:27 +02:00
logging.debug(f"Pushed update to {key.decode('utf-8')} at {ip}. Response: {response.status_code}")
2020-04-22 01:35:29 +02:00
except (requests.exceptions.ConnectionError, requests.exceptions.Timeout) as e:
2020-04-17 16:51:27 +02:00
logging.warning(f"Could not push update to {key.decode('utf-8')}: {str(e)}")
2020-04-17 16:25:51 +02:00
continue
2020-04-17 16:51:27 +02:00
else:
2020-04-22 04:49:45 +02:00
logging.debug(f'IP unchanged: {current_ip}')
2020-04-17 16:51:27 +02:00
2020-04-22 01:35:29 +02:00
logging.debug('Waiting for next turn')
time.sleep(os.environ.get("RUN_INTERVAL", 30))
2020-03-29 19:13:42 +02:00
2020-03-29 19:26:52 +02:00
2020-05-08 21:24:37 +02:00
def get_ip_list(self):
# set initial consumer addresses
ip_list = os.environ['INITIAL_SERVERS'].split(',')
logging.debug('Get consumer list from environ at first: Done')
return ip_list
2020-05-08 20:47:02 +02:00
def main():
# set logging preferences
logging.basicConfig(filename='', level=logging.DEBUG)
2020-05-08 21:24:37 +02:00
redis_storage = RedisSuperStorage(os.environ.get('REDIS_URL', "redis://localhost:6379/0"))
scheduler = Scheduler()
2020-05-08 20:47:02 +02:00
# get the dictionary of the currently available consumers
consumer_list_redis = redis_storage.get_consumer_dictionary()
logging.debug('Get consumer list from redis at first: Done')
ip_list = redis.get_ip_list()
scheduler.request_list(ip_list)
scheduler.request_first_sync(ip_list, consumer_list_redis)
while True:
logging.debug('Infinite Cycle start : Done')
# get the dictionary of the currently available consumers
consumer_list_redis = redis.get_consumer_dictionary()
logging.debug('Get consumer list from redis: Done')
temp_dict = {}
scheduler.request_sync(consumer_list_redis)
# Test ip change stuff
scheduler.test_ip_change(host_name)
2020-03-29 19:26:52 +02:00
if __name__ == "__main__":
2020-03-29 19:31:43 +02:00
try:
2020-05-08 20:47:02 +02:00
2020-03-29 19:31:43 +02:00
main()
except KeyboardInterrupt:
pass