#!/usr/bin/env pyton3 import logging import sys from typing import Dict import sentry_sdk from sentry_sdk.integrations.logging import LoggingIntegration from config import Config from apscheduler.schedulers.blocking import BlockingScheduler import redis import json import run_scheduler from functools import cache @cache def get_site_url_map() -> Dict[str, str]: return {p.name: p.baseurl for p in Config.SITE_LIST} def put_site_url_map_into_redis(redis_client: redis.Redis): redis_client.set("SITEURLMAP", json.dumps(get_site_url_map()).encode('utf-8')) def run_wrapper(redis_client: redis.Redis): put_site_url_map_into_redis(redis_client) run_scheduler.run(redis_client, get_site_url_map()) def main(): sentry_logging = LoggingIntegration( level=logging.INFO, event_level=logging.ERROR ) if Config.SENTRY_DSN: sentry_sdk.init( dsn=Config.SENTRY_DSN, integrations=[sentry_logging], traces_sample_rate=0.0, send_default_pii=True, release=Config.RELEASE_ID, environment=Config.RELEASEMODE, _experiments={"auto_enabling_integrations": True} ) logging.basicConfig( stream=sys.stdout, format="%(asctime)s - %(name)s [%(levelname)s]: %(message)s", level=logging.DEBUG if Config.DEBUG else logging.INFO ) redis_client = redis.from_url(Config.REDIS_URL) put_site_url_map_into_redis(redis_client) scheduler = BlockingScheduler() scheduler.add_job(lambda: run_wrapper(redis_client), trigger='interval', seconds=Config.INTERVAL) try: scheduler.start() except KeyboardInterrupt: pass scheduler.shutdown() if __name__ == '__main__': main()