This repository has been archived on 2020-09-24. You can view files and clone it, but cannot push or open issues or pull requests.
consumer-scheduler/app.py

131 lines
4.6 KiB
Python
Raw Normal View History

2020-03-29 16:48:34 +02:00
#!/usr/bin/env python
import sentry_sdk
2020-03-29 19:13:42 +02:00
import time
2020-03-29 19:26:52 +02:00
import requests
import requests.exceptions
2020-03-29 19:13:42 +02:00
import os
import redis
import json
2020-03-30 15:42:48 +02:00
import logging
2020-04-17 16:25:00 +02:00
import socket
2020-03-29 16:48:34 +02:00
"""
2020-03-29 17:00:35 +02:00
Scheduler
2020-03-29 16:48:34 +02:00
"""
2020-03-29 17:16:41 +02:00
__author__ = "@kocsisr"
2020-03-29 16:48:34 +02:00
__copyright__ = "Copyright 2020, GoldenPogácsa Team"
__module_name__ = "app"
__version__text__ = "1"
2020-03-29 19:13:42 +02:00
sentry_sdk.init("https://0a106e104e114bc9a3fa47f9cb0db2f4@sentry.kmlabz.com/10")
2020-03-29 16:48:34 +02:00
2020-03-29 19:30:59 +02:00
2020-03-29 19:26:52 +02:00
def main():
2020-04-04 13:58:08 +02:00
# set logging preferences
2020-04-17 16:51:27 +02:00
logging.basicConfig(filename='', level=logging.DEBUG)
2020-03-30 15:42:48 +02:00
2020-04-04 13:58:08 +02:00
# connect to redis
2020-04-22 01:35:29 +02:00
r = redis.from_url(os.environ.get('REDIS_URL', "redis://localhost:6379/0"))
2020-03-29 19:13:42 +02:00
2020-04-04 13:58:08 +02:00
# set initial consumer addresses
2020-03-29 19:13:42 +02:00
ip_list = os.environ['INITIAL_SERVERS'].split(',')
logging.debug('Get consumer list from environ at first: Done')
2020-04-04 13:58:08 +02:00
# get the dictionary of the currently available consumers
2020-03-29 19:30:59 +02:00
consumer_list_redis = json.loads((r.get('consumer_list') or b'{}').decode('utf-8'))
logging.debug('Get consumer list from redis at first: Done')
2020-04-17 16:51:27 +02:00
temp_dict = {}
2020-04-17 16:27:13 +02:00
host_name = socket.gethostname()
2020-03-29 19:13:42 +02:00
2020-03-29 19:26:52 +02:00
for ip in ip_list:
try:
2020-04-04 13:58:08 +02:00
# request synchronization
2020-04-22 01:35:29 +02:00
response = requests.post(f"http://{ip}/sync", json={'uuid': os.environ['LOCAL_UUID']}, timeout=5)
except (requests.exceptions.ConnectionError, requests.exceptions.Timeout) as e:
logging.error(f"Error while syncing to {ip}: {str(e)}")
2020-03-29 19:26:52 +02:00
continue
2020-03-29 19:13:42 +02:00
if response.status_code == 200:
2020-04-17 16:51:27 +02:00
temp_dict[response.json()['uuid']] = {'ip': ip}
2020-03-29 19:13:42 +02:00
2020-03-29 19:30:59 +02:00
consumer_list_redis.update(temp_dict)
r.set('consumer_list', json.dumps(consumer_list_redis).encode('utf-8'))
logging.debug('Update redis consumers ip list from first answers: Done')
2020-03-29 19:13:42 +02:00
while True:
logging.debug('Infinite Cycle start : Done')
2020-04-04 13:58:08 +02:00
# get the dictionary of the currently available consumers
2020-03-29 19:30:59 +02:00
consumer_list_redis = json.loads((r.get('consumer_list') or b'{}').decode('utf-8'))
logging.debug('Get consumer list from redis: Done')
2020-04-22 04:49:45 +02:00
temp_dict = {}
2020-03-29 19:30:59 +02:00
for uuid, info in consumer_list_redis.items():
ip = info['ip']
2020-03-29 19:26:52 +02:00
try:
2020-04-04 13:58:08 +02:00
# request synchronization
2020-04-22 01:35:29 +02:00
response = requests.post(f"http://{ip}/sync", json={'uuid': os.environ['LOCAL_UUID']}, timeout=5)
except (requests.exceptions.ConnectionError, requests.exceptions.Timeout) as e:
logging.error(f"Error while syncing to {ip}: {str(e)}")
2020-03-29 19:26:52 +02:00
continue
2020-03-29 19:13:42 +02:00
if response.status_code == 200:
2020-04-17 16:25:00 +02:00
temp_dict[response.json()['uuid']] = {'ip': ip}
2020-04-04 13:58:08 +02:00
# update the dictionary of the currently available consumers
2020-03-29 19:30:59 +02:00
consumer_list_redis.update(temp_dict)
r.set('consumer_list', json.dumps(consumer_list_redis).encode('utf-8'))
logging.debug('Update redis consumer ip list from answers: Done')
2020-03-29 19:13:42 +02:00
2020-04-22 01:35:29 +02:00
# Test ip change stuff
2020-04-22 05:01:17 +02:00
old_ip = r.get('current_ip')
2020-04-22 01:35:29 +02:00
2020-04-22 05:01:17 +02:00
if old_ip:
old_ip = old_ip.decode('utf-8')
2020-04-22 04:49:45 +02:00
2020-04-22 05:01:17 +02:00
current_ip = socket.gethostbyname(host_name)
if not old_ip: # Not set yet. I this case no update required
2020-04-22 04:49:45 +02:00
r.set('current_ip', current_ip.encode('utf-8'))
logging.debug(f"Previous info about the ip address could not be found! Current: {current_ip}")
2020-04-17 16:51:27 +02:00
2020-04-22 05:01:17 +02:00
elif old_ip != current_ip:
logging.info(f'IP changed: {old_ip} -> {current_ip} Pushing updates...')
2020-04-22 01:35:29 +02:00
r.set('current_ip', current_ip.encode('utf-8'))
2020-04-22 04:49:45 +02:00
# pushing updates...
2020-04-17 16:51:27 +02:00
keys = r.keys('producer_*')
logging.debug(f'Pushing update to the following producers: ' + ', '.join(k.decode('utf-8') for k in keys))
2020-04-17 16:25:51 +02:00
for key in keys:
2020-04-17 16:51:27 +02:00
ip = r.get(key)
if ip:
ip = ip.decode('utf-8')
else:
continue
try:
2020-04-22 01:35:29 +02:00
response = requests.post(
f"http://{ip}/ip",
json={'uuid': os.environ['LOCAL_UUID'], 'ip': current_ip},
timeout=5
)
2020-04-17 16:51:27 +02:00
logging.debug(f"Pushed update to {key.decode('utf-8')} at {ip}. Response: {response.status_code}")
2020-04-22 01:35:29 +02:00
except (requests.exceptions.ConnectionError, requests.exceptions.Timeout) as e:
2020-04-17 16:51:27 +02:00
logging.warning(f"Could not push update to {key.decode('utf-8')}: {str(e)}")
2020-04-17 16:25:51 +02:00
continue
2020-04-17 16:51:27 +02:00
else:
2020-04-22 04:49:45 +02:00
logging.debug(f'IP unchanged: {current_ip}')
2020-04-17 16:51:27 +02:00
2020-04-22 01:35:29 +02:00
logging.debug('Waiting for next turn')
time.sleep(os.environ.get("RUN_INTERVAL", 30))
2020-03-29 19:13:42 +02:00
2020-03-29 19:26:52 +02:00
if __name__ == "__main__":
2020-03-29 19:31:43 +02:00
try:
main()
except KeyboardInterrupt:
pass