did stuff

This commit is contained in:
Pünkösd Marcell 2021-12-12 02:07:26 +01:00
parent 0765d98c1f
commit 2e8a4d25b4
3 changed files with 50 additions and 6 deletions

View File

@ -25,10 +25,13 @@ def parse_site_list(raw_site_list: str) -> List[SiteDescription]:
class Config:
INTERVAL = int(os.environ.get("INTERVAL", 30))
SITE_LIST = parse_site_list(os.environ.get("SITES"))
SITE_LIST = parse_site_list(os.environ.get("SITES")) # This is in priority order
SENTRY_DSN = os.environ.get("SENTRY_DSN")
RELEASE_ID = os.environ.get("RELEASE_ID", "test")
RELEASEMODE = os.environ.get("RELEASEMODE", "dev")
DEBUG = ('--debug' in sys.argv) or bool(os.environ.get("DEBUG", "").upper() in ['YES', 'TRUE', '1'])
REDIS_URL = os.environ["REDIS_URL"]
COLLECTOR_URL = os.environ["COLLECTOR_URL"] # http://example.com/report/

View File

@ -1,14 +1,31 @@
#!/usr/bin/env pyton3
import logging
import sys
from typing import Dict
import sentry_sdk
from sentry_sdk.integrations.logging import LoggingIntegration
from config import Config
from apscheduler.schedulers.blocking import BlockingScheduler
from run_scheduler import run
import redis
import json
import run_scheduler
from functools import cache
@cache
def get_site_url_map() -> Dict[str, str]:
return {p.name: p.baseurl for p in Config.SITE_LIST}
def put_site_url_map_into_redis(redis_client: redis.Redis):
redis_client.set("SITEURLMAP", json.dumps(get_site_url_map()).encode('utf-8'))
def run_wrapper(redis_client: redis.Redis):
put_site_url_map_into_redis(redis_client)
run_scheduler.run(redis_client, get_site_url_map())
def main():
@ -34,8 +51,11 @@ def main():
level=logging.DEBUG if Config.DEBUG else logging.INFO
)
redis_client = redis.from_url(Config.REDIS_URL)
put_site_url_map_into_redis(redis_client)
scheduler = BlockingScheduler()
scheduler.add_job(run, trigger='interval', seconds=Config.INTERVAL)
scheduler.add_job(lambda: run_wrapper(redis_client), trigger='interval', seconds=Config.INTERVAL)
try:
scheduler.start()
except KeyboardInterrupt:

View File

@ -1,5 +1,26 @@
from typing import Dict
import logging
from redis import Redis
import requests
from config import Config
from urllib.parse import urljoin
def run():
logging.info("Csirip")
def run(redis_client: Redis, site_url_map: Dict[str, str]):
# Get weather report
# get {http://example.com/report/}{site} The first part supplied
logging.debug("Getting performance reports...")
r = requests.get(urljoin(Config.COLLECTOR_URL, "site"), timeout=10)
r.raise_for_status()
# TODO:
# - set the default scheduling site
# - Check if scheduling is required: any derivatives are above 0 for an extended period
# - if yes, then decide where to schedule
# - any sites with 0 derivative?
# - Check the k8s api before scheduling to see if the pod is running
# - check if there are any pod running where 0 units scheduled for extended time
# - delete that pod
# - write some log, so we can draw nice graphs
# Optional: check if a higher priority pod have free capacity, and move lower ones up