重构: 切换存储至SQLite,启用INI配置与API Key校验
This commit is contained in:
@ -13,13 +13,15 @@ from io import BytesIO
|
||||
|
||||
|
||||
from ..app import process_data_async
|
||||
from ..shared import _format_response, log_performance, logger, ALLOWED_DATA_EXTENSIONS, ALLOWED_CONFIG_EXTENSIONS, allowed_file,update_task_status, TASK_STATUS_PENDING, TASK_STATUS_FAILED
|
||||
from ..shared import _format_response, log_performance, logger, ALLOWED_DATA_EXTENSIONS, ALLOWED_CONFIG_EXTENSIONS, allowed_file, update_task_status, TASK_STATUS_PENDING, TASK_STATUS_FAILED
|
||||
from ..auth import require_api_key
|
||||
|
||||
# Create blueprint
|
||||
upload_bp = Blueprint('upload', __name__, url_prefix='/upload')
|
||||
|
||||
|
||||
@upload_bp.route('', methods=['POST'])
|
||||
@require_api_key
|
||||
@log_performance
|
||||
def upload_file():
|
||||
logger.info("Received upload request")
|
||||
@ -49,8 +51,8 @@ def upload_file():
|
||||
return _format_response(400, "无效的数据文件类型。只允许 .xlsx 和 .xls 格式。")
|
||||
|
||||
# Generate unique job ID
|
||||
job_id = str(uuid.uuid4())
|
||||
logger.info(f"Generated job ID: {job_id}")
|
||||
task_id = str(uuid.uuid4())
|
||||
logger.info(f"Generated job ID: {task_id}")
|
||||
|
||||
# 1) Parse config content (parse in memory without saving first)
|
||||
if config_file and config_file.filename != '':
|
||||
@ -69,13 +71,14 @@ def upload_file():
|
||||
with open(default_config_path, 'r', encoding='utf-8') as f:
|
||||
active_config = yaml.safe_load(f)
|
||||
|
||||
# 2) Create job directories based on config['output_dir']
|
||||
output_base = Path(active_config['output_dir']).expanduser()
|
||||
job_upload_dir = output_base / "uploads" / job_id
|
||||
job_output_dir = output_base / "outputs" / job_id
|
||||
# 2) Create job directories based on INI configuration
|
||||
upload_base = Path(current_app.config['UPLOAD_FOLDER'])
|
||||
output_base = Path(current_app.config['OUTPUT_FOLDER'])
|
||||
job_upload_dir = upload_base / task_id
|
||||
job_output_dir = output_base / task_id
|
||||
job_upload_dir.mkdir(parents=True, exist_ok=True)
|
||||
job_output_dir.mkdir(parents=True, exist_ok=True)
|
||||
logger.info(f"Job {job_id}: Created directories - Upload: {job_upload_dir}, Output: {job_output_dir}")
|
||||
logger.info(f"Job {task_id}: Created directories - Upload: {job_upload_dir}, Output: {job_output_dir}")
|
||||
|
||||
# 3) Save data file to job_upload_dir
|
||||
data_filename = secure_filename(data_file.filename)
|
||||
@ -83,9 +86,9 @@ def upload_file():
|
||||
try:
|
||||
data_file.seek(0)
|
||||
data_file.save(str(data_path))
|
||||
logger.info(f"Job {job_id}: Data file saved successfully - Path: {data_path}")
|
||||
logger.info(f"Job {task_id}: Data file saved successfully - Path: {data_path}")
|
||||
except Exception as e:
|
||||
logger.error(f"Job {job_id}: Failed to save data file {data_filename}: {str(e)}")
|
||||
logger.error(f"Job {task_id}: Failed to save data file {data_filename}: {str(e)}")
|
||||
return _format_response(500, "保存数据文件失败")
|
||||
|
||||
# 4) Save config file to job_upload_dir
|
||||
@ -96,9 +99,9 @@ def upload_file():
|
||||
config_file.seek(0)
|
||||
config_file.save(str(config_path))
|
||||
active_config_path = config_path
|
||||
logger.info(f"Job {job_id}: Custom config saved successfully - Path: {config_path}")
|
||||
logger.info(f"Job {task_id}: Custom config saved successfully - Path: {config_path}")
|
||||
except Exception as e:
|
||||
logger.error(f"Job {job_id}: Failed to save config file {config_filename}: {str(e)}")
|
||||
logger.error(f"Job {task_id}: Failed to save config file {config_filename}: {str(e)}")
|
||||
return _format_response(500, "保存配置文件失败")
|
||||
else:
|
||||
# Copy default config for record keeping
|
||||
@ -106,29 +109,29 @@ def upload_file():
|
||||
with open(config_path, 'w', encoding='utf-8') as f:
|
||||
yaml.safe_dump(active_config, f, allow_unicode=True)
|
||||
active_config_path = config_path
|
||||
logger.info(f"Job {job_id}: Default config saved for record - Path: {config_path}")
|
||||
logger.info(f"Job {task_id}: Default config saved for record - Path: {config_path}")
|
||||
|
||||
# Initialize task status
|
||||
update_task_status(job_id, TASK_STATUS_PENDING, "Task queued for processing")
|
||||
logger.info(f"Job {job_id}: Task status initialized as PENDING")
|
||||
update_task_status(task_id, TASK_STATUS_PENDING, "任务已加入处理队列", output_dir=str(job_output_dir))
|
||||
logger.info(f"Job {task_id}: Task status initialized as PENDING")
|
||||
|
||||
# Start background processing
|
||||
try:
|
||||
thread = threading.Thread(
|
||||
target=process_data_async,
|
||||
args=(job_id, data_path, active_config_path, job_output_dir)
|
||||
args=(task_id, data_path, active_config_path, job_output_dir)
|
||||
)
|
||||
thread.daemon = True
|
||||
thread.start()
|
||||
logger.info(f"Job {job_id}: Background processing thread started successfully")
|
||||
logger.info(f"Job {task_id}: Background processing thread started successfully")
|
||||
except Exception as e:
|
||||
logger.error(f"Job {job_id}: Failed to start background processing thread: {str(e)}")
|
||||
update_task_status(job_id, TASK_STATUS_FAILED, error=str(e))
|
||||
logger.error(f"Job {task_id}: Failed to start background processing thread: {str(e)}")
|
||||
update_task_status(task_id, TASK_STATUS_FAILED, error=str(e))
|
||||
return _format_response(500, "启动处理失败")
|
||||
|
||||
logger.info(f"Job {job_id}: Upload process completed successfully, returning job ID to client")
|
||||
logger.info(f"Job {task_id}: Upload process completed successfully, returning job ID to client")
|
||||
return _format_response(202, "任务已接受并加入处理队列", {
|
||||
"status": "accepted",
|
||||
"job_id": job_id,
|
||||
"task_status_url": f"/task/{job_id}"
|
||||
"task_id": task_id,
|
||||
"task_status_url": f"/task/{task_id}"
|
||||
})
|
||||
Reference in New Issue
Block a user