# services/core.py import logging import threading from .crawler_106 import run_106_logic from .crawler_82 import run_82_logic task_lock = threading.Lock() def execute_monitor_task(): """ 执行所有爬虫,返回一个大列表: {'device_list': [item1, item2...], 'target_time': '...'} """ if task_lock.locked(): logging.warning(">>> 任务正在运行中,跳过") return None with task_lock: logging.info(">>> 开始执行监控任务...") # 1. 获取 106 数据列表 list_106 = run_106_logic() # 2. 获取 82 数据列表 list_82 = run_82_logic() # 3. 合并 combined_list = list_106 + list_82 logging.info(f">>> 任务完成,共获取 {len(combined_list)} 条数据") return { 'device_list': combined_list, 'target_time': None, # 具体时间已在 item 里 'temp_file_path': None # 废弃旧逻辑,文件路径已在 item 里 }