import json import logging from logging.handlers import RotatingFileHandler from concurrent.futures import ThreadPoolExecutor from datalayer.item_monitor import ItemMonitor from datalayer.general_monitor import GeneralMonitor from managers.worker import Worker from managers.queue_manager import QueueManager def configure_logger(): logging.getLogger("httpx").setLevel(logging.WARNING) console_handler = logging.StreamHandler() console_handler.setLevel(logging.INFO) console_handler.setFormatter(logging.Formatter('%(levelname)s [%(asctime)s] %(name)s - %(message)s')) file_handler = RotatingFileHandler('monitor.log', maxBytes=10e6) file_handler.setLevel(logging.DEBUG) file_handler.setFormatter(logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')) # Configure the root logger with both handlers logging.basicConfig(level=logging.NOTSET, handlers=[console_handler, file_handler]) def parse_items_to_monitor(): import os base_dir = os.path.dirname(os.path.abspath(__file__)) workers_path = os.path.join(base_dir, "workers.json") with open(workers_path) as f: args = json.load(f) if 'items' not in args: raise ValueError("Missing mandatory field: items") items = [ItemMonitor.load_from_json(item) for item in args['items']] general_args = GeneralMonitor.load_from_json(args['general']) return items, general_args if __name__ == "__main__": configure_logger() items, general_args = parse_items_to_monitor() queue_manager = QueueManager() with ThreadPoolExecutor(max_workers=1000) as executor: for item in items: worker = Worker(item, general_args, queue_manager) executor.submit(worker.run)