From 77e915c63e5ea31870f3753c4cf9adfadb31795b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?F=C3=BCleki=20F=C3=A1bi=C3=A1n?= Date: Fri, 8 May 2020 21:48:32 +0200 Subject: [PATCH] Added communicators --- app.py | 55 +-------------------------------------- communicators.py | 59 ++++++++++++++++++++++++++++++++++++++++++ redis_super_storage.py | 6 ++--- 3 files changed, 63 insertions(+), 57 deletions(-) create mode 100644 communicators.py diff --git a/app.py b/app.py index 4d2a6c1..3d6793c 100644 --- a/app.py +++ b/app.py @@ -45,23 +45,7 @@ class Scheduler: logging.debug('Update redis consumers ip list from first answers: Done') def request_sync(self, consumer_list_redis): - temp_dict = {} - for uuid, info in consumer_list_redis.items(): - ip = info['ip'] - try: - # request synchronization - response = requests.post(f"http://{ip}/sync", json={'uuid': os.environ['LOCAL_UUID']}, timeout=5) - except (requests.exceptions.ConnectionError, requests.exceptions.Timeout) as e: - logging.error(f"Error while syncing to {ip}: {str(e)}") - continue - if response.status_code == 200: - temp_dict[response.json()['uuid']] = {'ip': ip} - - # update the dictionary of the currently available consumers - consumer_list_redis.update(temp_dict) - self.r.set('consumer_list', json.dumps(consumer_list_redis).encode('utf-8')) - logging.debug('Update redis consumer ip list from answers: Done') def test_ip_change(self, host_name): @@ -91,16 +75,7 @@ class Scheduler: else: continue - try: - response = requests.post( - f"http://{ip}/ip", - json={'uuid': os.environ['LOCAL_UUID'], 'ip': current_ip}, - timeout=5 - ) - logging.debug(f"Pushed update to {key.decode('utf-8')} at {ip}. Response: {response.status_code}") - except (requests.exceptions.ConnectionError, requests.exceptions.Timeout) as e: - logging.warning(f"Could not push update to {key.decode('utf-8')}: {str(e)}") - continue + else: logging.debug(f'IP unchanged: {current_ip}') @@ -108,13 +83,6 @@ class Scheduler: time.sleep(os.environ.get("RUN_INTERVAL", 30)) -def get_ip_list(self): - # set initial consumer addresses - ip_list = os.environ['INITIAL_SERVERS'].split(',') - logging.debug('Get consumer list from environ at first: Done') - return ip_list - - def main(): # set logging preferences logging.basicConfig(filename='', level=logging.DEBUG) @@ -122,27 +90,6 @@ def main(): redis_storage = RedisSuperStorage(os.environ.get('REDIS_URL', "redis://localhost:6379/0")) scheduler = Scheduler() - # get the dictionary of the currently available consumers - - consumer_list_redis = redis_storage.get_consumer_dictionary() - logging.debug('Get consumer list from redis at first: Done') - - ip_list = redis.get_ip_list() - - scheduler.request_list(ip_list) - scheduler.request_first_sync(ip_list, consumer_list_redis) - - while True: - logging.debug('Infinite Cycle start : Done') - # get the dictionary of the currently available consumers - consumer_list_redis = redis.get_consumer_dictionary() - logging.debug('Get consumer list from redis: Done') - - temp_dict = {} - - scheduler.request_sync(consumer_list_redis) - # Test ip change stuff - scheduler.test_ip_change(host_name) if __name__ == "__main__": diff --git a/communicators.py b/communicators.py new file mode 100644 index 0000000..c3850ea --- /dev/null +++ b/communicators.py @@ -0,0 +1,59 @@ +#!/usr/bin/env python3 +import os +import logging +import requests +from redis_super_storage import RedisSuperStorage + + +class ProducerCommunicator: + + def __init__(self, redis_store: RedisSuperStorage): + self._redis_store = redis_store + self._session = requests.Session() + + def push_ip_update(self, newip: str): + + for key, ip in self._redis_store.get_producer_list().items(): + + try: + response = requests.post( + f"http://{ip}/ip", + json={'uuid': os.environ['LOCAL_UUID'], 'ip': newip}, + timeout=5 + ) + logging.debug(f"Pushed update to {key} at {ip}. Response: {response.status_code}") + except (requests.exceptions.ConnectionError, requests.exceptions.Timeout) as e: + logging.warning(f"Could not push update to {key}: {str(e)}") + + +class ConsumerCommunicator: + + def __init__(self, redis_store: RedisSuperStorage): + self._redis_store = redis_store + self._session = requests.Session() + + def targeted_snyc(self, ip: str): + try: + # request synchronization + response = requests.post(f"http://{ip}/sync", json={'uuid': os.environ['LOCAL_UUID']}, timeout=5) + + except (requests.exceptions.ConnectionError, requests.exceptions.Timeout) as e: + logging.error(f"Error while syncing to {ip}: {str(e)}") + return + + if response.status_code == 200: + self._redis_store.update_consumer(response.json()['uuid'], ip) + + def sync_all(self): + for uuid, info in self._redis_store.get_consumer_list().items(): + ip = info['ip'] + try: + # request synchronization + response = requests.post(f"http://{ip}/sync", json={'uuid': os.environ['LOCAL_UUID']}, timeout=5) + + except (requests.exceptions.ConnectionError, requests.exceptions.Timeout) as e: + logging.error(f"Error while syncing to {ip}: {str(e)}") + continue + + if response.status_code == 200: + self._redis_store.update_consumer(response.json()['uuid'], ip) diff --git a/redis_super_storage.py b/redis_super_storage.py index f5f22d1..68bd0c3 100644 --- a/redis_super_storage.py +++ b/redis_super_storage.py @@ -24,16 +24,16 @@ class RedisSuperStorage: return list_of_customers - def get_producer_list(self) -> list: + def get_producer_list(self) -> dict: keys = self.r.keys('producer_*') - list_of_producer_ip = [] + list_of_producer_ip = {} for key in keys: ip = (self.r.get(key) or b"").decode('utf-8') if ip: - list_of_producer_ip.append(ip) + list_of_producer_ip[key.decode('utf-8')] = ip return list_of_producer_ip