23 Commits

Author SHA1 Message Date
ffabi1997 850598d519 small fixes
continuous-integration/drone/push Build is passing
2020-05-08 22:32:18 +02:00
ffabi1997 0713cabc6b Implemented ip change checking
continuous-integration/drone/push Build is passing
2020-05-08 22:21:15 +02:00
ffabi1997 7ce88f1a74 updated app.py 2020-05-08 22:05:49 +02:00
ffabi1997 77e915c63e Added communicators 2020-05-08 21:48:32 +02:00
ffabi1997 8668550a3f moved stuff around 2020-05-08 21:29:08 +02:00
ffabi1997 d8bdd717c7 Fixed up Redis super storage 2020-05-08 21:24:37 +02:00
ffabi1997 dbeb35785e moved files around 2020-05-08 20:47:02 +02:00
ricsik52 d84564ff95 Add 'javitas.py'
continuous-integration/drone/push Build is passing
2020-05-08 20:25:22 +02:00
tormakris 59ae404a19 use custom dind to build doc container image
continuous-integration/drone/push Build is passing
2020-04-28 20:54:00 +02:00
tormakris 989b646d45 dockerfile keyword is still needed
continuous-integration/drone/push Build is failing
2020-04-28 20:31:21 +02:00
tormakris fe7fcb3714 change context of document building
continuous-integration/drone/push Build was killed
2020-04-28 20:27:31 +02:00
tormakris 2d326d79f4 change directory before kaniko step
continuous-integration/drone/push Build is failing
2020-04-28 20:14:59 +02:00
tormakris 6825efca87 use kaniko to build container images
continuous-integration/drone/push Build is failing
2020-04-28 19:36:12 +02:00
marcsello 7702cb2c7e fixed confusing logging
continuous-integration/drone/push Build is passing
2020-04-22 05:01:17 +02:00
marcsello d32db92bab fixed ip change detection
continuous-integration/drone/push Build is passing
2020-04-22 04:49:45 +02:00
marcsello 5f2894e727 IP is now saved to redis
continuous-integration/drone/push Build is passing
2020-04-22 01:35:29 +02:00
marcsello fdac9446af Fixed redis not using config variable 2020-04-22 01:20:24 +02:00
marcsello d0d6267c74 Fixed stuff
continuous-integration/drone/push Build is passing
2020-04-17 16:51:27 +02:00
ricsik52 aed27cba95 Update 'app.py'
continuous-integration/drone/push Build is passing
2020-04-17 16:36:53 +02:00
ricsik52 9d94dbd6d6 Update 'app.py'
continuous-integration/drone/push Build is passing
2020-04-17 16:27:13 +02:00
ricsik52 2a77c552bf Update 'app.py'
continuous-integration/drone/push Build is passing
2020-04-17 16:25:51 +02:00
ricsik52 a1ca3285ab Update 'app.py'
continuous-integration/drone/push Build is passing
2020-04-17 16:25:00 +02:00
marcsello a60ee01ae1 Merge pull request 'documentation' (#1) from documentation into master
continuous-integration/drone/push Build is passing
2020-04-17 15:35:03 +02:00
5 changed files with 193 additions and 73 deletions
+15 -19
View File
@@ -13,22 +13,18 @@ steps:
- find . -name "*.py" -exec python3 -m mccabe --min 3 '{}' + || if [ $? -eq 1 ]; then echo "you fail"; fi - find . -name "*.py" -exec python3 -m mccabe --min 3 '{}' + || if [ $? -eq 1 ]; then echo "you fail"; fi
- bandit -r . + || if [ $? -eq 1 ]; then echo "you fail"; fi - bandit -r . + || if [ $? -eq 1 ]; then echo "you fail"; fi
- name: build - name: build-app
image: docker:stable-dind image: banzaicloud/drone-kaniko
volumes: settings:
- name: dockersock registry: registry.kmlabz.com
path: /var/run repo: goldenpogacsa/${DRONE_REPO_NAME}
environment: username:
DOCKER_USERNAME:
from_secret: DOCKER_USERNAME from_secret: DOCKER_USERNAME
DOCKER_PASSWORD: password:
from_secret: DOCKER_PASSWORD from_secret: DOCKER_PASSWORD
commands: tags:
- echo "$DOCKER_PASSWORD" | docker login -u "$DOCKER_USERNAME" --password-stdin - latest
- docker build -t="$DOCKER_USERNAME/consumer-scheduler" . - ${DRONE_BUILD_NUMBER}
- docker build -t="$DOCKER_USERNAME/consumer-scheduler:$DRONE_BUILD_NUMBER" .
- docker push "$DOCKER_USERNAME/consumer-scheduler"
- docker push "$DOCKER_USERNAME/consumer-scheduler:$DRONE_BUILD_NUMBER"
- name: make_docs - name: make_docs
image: python:3.8 image: python:3.8
@@ -50,11 +46,11 @@ steps:
from_secret: DOCKER_PASSWORD from_secret: DOCKER_PASSWORD
commands: commands:
- cd docs - cd docs
- echo "$DOCKER_PASSWORD" | docker login -u "$DOCKER_USERNAME" --password-stdin - echo "$DOCKER_PASSWORD" | docker login -u "$DOCKER_USERNAME" --password-stdin registry.kmlabz.com
- docker build -t="$DOCKER_USERNAME/consumer-scheduler-docs" . - docker build -t="registry.kmlabz.com/goldenpogacsa/consumer-scheduler-docs" .
- docker build -t="$DOCKER_USERNAME/consumer-scheduler-docs:$DRONE_BUILD_NUMBER" . - docker build -t="registry.kmlabz.com/goldenpogacsa/consumer-scheduler-docs:$DRONE_BUILD_NUMBER" .
- docker push "$DOCKER_USERNAME/consumer-scheduler-docs" - docker push "registry.kmlabz.com/goldenpogacsa/consumer-scheduler-docs"
- docker push "$DOCKER_USERNAME/consumer-scheduler-docs:$DRONE_BUILD_NUMBER" - docker push "registry.kmlabz.com/goldenpogacsa/consumer-scheduler-docs:$DRONE_BUILD_NUMBER"
- name: slack - name: slack
image: plugins/slack image: plugins/slack
+26 -50
View File
@@ -1,13 +1,13 @@
#!/usr/bin/env python #!/usr/bin/env python3
import sentry_sdk import sentry_sdk
import time import time
import requests
import requests.exceptions
import os import os
import redis
import json
import logging import logging
from redis_super_storage import RedisSuperStorage
from communicators import ConsumerCommunicator, ProducerCommunicator
from ip_watchdog import IPWatchdog
""" """
Scheduler Scheduler
""" """
@@ -20,64 +20,40 @@ __version__text__ = "1"
sentry_sdk.init("https://0a106e104e114bc9a3fa47f9cb0db2f4@sentry.kmlabz.com/10") sentry_sdk.init("https://0a106e104e114bc9a3fa47f9cb0db2f4@sentry.kmlabz.com/10")
def main(): def get_initial_ip_list():
ip_list = os.environ['INITIAL_SERVERS'].split(',')
logging.debug('Initial ip list ' + ", ".join(ip_list))
return ip_list
def main():
# set logging preferences # set logging preferences
logging.basicConfig(filename='', level=logging.DEBUG) logging.basicConfig(filename='', level=logging.DEBUG)
# connect to redis redis_storage = RedisSuperStorage(os.environ.get('REDIS_URL', "redis://localhost:6379/0"), 5)
r = redis.Redis(host = 'localhost', port = 6379, db = 0) consumer_communicator = ConsumerCommunicator(redis_storage)
producer_communicator = ProducerCommunicator(redis_storage)
ip_watchdog = IPWatchdog(redis_storage)
# set initial consumer addresses logging.info("Syncing to initial consumer list")
ip_list = os.environ['INITIAL_SERVERS'].split(',') for ip in get_initial_ip_list():
logging.debug('Get consumer list from environ at first: Done') logging.debug(f"Syncing to {ip}")
# get the dictionary of the currently available consumers consumer_communicator.targeted_snyc(ip)
consumer_list_redis = json.loads((r.get('consumer_list') or b'{}').decode('utf-8'))
logging.debug('Get consumer list from redis at first: Done')
temp_dict = { }
for ip in ip_list:
try:
# request synchronization
response = requests.post(f"http://{ip}/sync", json = { 'uuid': os.environ['LOCAL_UUID'] })
except requests.exceptions.ConnectionError:
continue
if response.status_code == 200:
temp_dict[response.json()['uuid']] = { 'ip': ip }
consumer_list_redis.update(temp_dict)
r.set('consumer_list', json.dumps(consumer_list_redis).encode('utf-8'))
logging.debug('Update redis consumers ip list from first answers: Done')
while True: while True:
logging.debug('Infinite Cycle start : Done') logging.debug("Doing a sync")
# get the dictionary of the currently available consumers consumer_communicator.sync_all()
consumer_list_redis = json.loads((r.get('consumer_list') or b'{}').decode('utf-8'))
logging.debug('Get consumer list from redis: Done')
for uuid, info in consumer_list_redis.items():
ip = info['ip']
try:
# request synchronization
response = requests.post(f"http://{ip}/sync", json = { 'uuid': os.environ['LOCAL_UUID'] })
except requests.exceptions.ConnectionError:
continue
if response.status_code == 200: ip_changed, ipaddr = ip_watchdog.ip_changed()
temp_dict[response.json()['uuid']] = { 'ip': ip } if ip_changed:
producer_communicator.push_ip_update(ipaddr)
time.sleep(int(os.environ.get("RUN_INTERVAL", 10)))
# update the dictionary of the currently available consumers
consumer_list_redis.update(temp_dict)
r.set('consumer_list', json.dumps(consumer_list_redis).encode('utf-8'))
logging.debug('Update redis consumer ip list from answers: Done')
logging.debug('Waiting for next turn')
# wait for the next update time
time.sleep(30)
if __name__ == "__main__": if __name__ == "__main__":
try: try:
main() main()
except KeyboardInterrupt: except KeyboardInterrupt:
pass pass
+60
View File
@@ -0,0 +1,60 @@
#!/usr/bin/env python3
import os
import logging
import requests
import requests.exceptions
from redis_super_storage import RedisSuperStorage
class ProducerCommunicator:
def __init__(self, redis_store: RedisSuperStorage):
self._redis_store = redis_store
self._session = requests.Session()
def push_ip_update(self, newip: str):
for key, ip in self._redis_store.get_producer_list().items():
try:
response = self._session.post(
f"http://{ip}/ip",
json={'uuid': os.environ['LOCAL_UUID'], 'ip': newip},
timeout=5
)
logging.debug(f"Pushed update to {key} at {ip}. Response: {response.status_code}")
except (requests.exceptions.ConnectionError, requests.exceptions.Timeout) as e:
logging.warning(f"Could not push update to {key}: {str(e)}")
class ConsumerCommunicator:
def __init__(self, redis_store: RedisSuperStorage):
self._redis_store = redis_store
self._session = requests.Session()
def targeted_snyc(self, ip: str):
try:
# request synchronization
response = self._session.post(f"http://{ip}/sync", json={'uuid': os.environ['LOCAL_UUID']}, timeout=5)
except (requests.exceptions.ConnectionError, requests.exceptions.Timeout) as e:
logging.error(f"Error while syncing to {ip}: {str(e)}")
return
if response.status_code == 200:
self._redis_store.update_consumer(response.json()['uuid'], ip)
def sync_all(self):
for uuid, info in self._redis_store.get_consumer_list().items():
ip = info['ip']
try:
# request synchronization
response = requests.post(f"http://{ip}/sync", json={'uuid': os.environ['LOCAL_UUID']}, timeout=5)
except (requests.exceptions.ConnectionError, requests.exceptions.Timeout) as e:
logging.error(f"Error while syncing to {ip}: {str(e)}")
continue
if response.status_code == 200:
self._redis_store.update_consumer(response.json()['uuid'], ip)
+24
View File
@@ -0,0 +1,24 @@
#!/usr/bin/env python3
from typing import Tuple
import logging
import socket
from redis_super_storage import RedisSuperStorage
class IPWatchdog:
def __init__(self, redis_store: RedisSuperStorage):
self._redis_store = redis_store
self._host_name = socket.gethostname()
def ip_changed(self) -> Tuple[bool, str]:
old_ip = self._redis_store.current_ip
current_ip = socket.gethostbyname(self._host_name)
if current_ip != old_ip:
logging.info(f'IP changed: {old_ip} -> {current_ip}')
self._redis_store.current_ip = current_ip
return True, current_ip
return False, old_ip
+64
View File
@@ -0,0 +1,64 @@
#!/usr/bin/env python3
import redis
import os
import json
import time
class RedisSuperStorage:
def __init__(self, redis_url: str, timeout: int):
self.r = redis.from_url(redis_url)
self._timeout = timeout
def get_consumer_list(self) -> dict:
keys = self.r.keys('consumer_*')
list_of_customers = {}
for key in keys:
info = json.loads((self.r.get(key) or b"{}").decode('utf-8'))
if info:
list_of_customers[info['uuid']] = info
return list_of_customers
def get_producer_list(self) -> dict:
keys = self.r.keys('producer_*')
list_of_producer_ip = {}
for key in keys:
ip = (self.r.get(key) or b"").decode('utf-8')
if ip:
list_of_producer_ip[key.decode('utf-8')] = ip
return list_of_producer_ip
def update_consumer(self, uuid: str, ip: str):
cust_key = f"consumer_{uuid}"
info = {
"uuid": uuid,
"ip": ip,
"last_seen": time.time()
}
self.r.set(cust_key, json.dumps(info).encode('utf-8'))
self.r.expire(cust_key, os.environ.get("CUSTOMER_TIMEOUT",30))
def get_current_ip(self) -> str:
ip = self.r.get('current_ip')
if ip:
ip = ip.decode('utf-8')
return ip
def set_current_ip(self, ip: str):
self.r.set('current_ip', ip.encode('utf-8'))
current_ip = property(get_current_ip, set_current_ip)