mirror of
https://git.oceanpay.cc/danial/kami_apple_exchage.git
synced 2025-12-18 22:29:09 +00:00
本次提交将后端的任务队列系统从Celery迁移到了Arq,以支持基于协程的任务处理。主要改动包括: - 更新文档和配置文件,反映架构变化。 - 修改健康检查和服务初始化逻辑,以适应Arq的使用。 - 移除与Celery相关的代码,并添加Arq任务定义和调度器。 - 更新Dockerfile和相关脚本,确保Arq worker能够正确运行。 - 调整API和业务服务中的任务处理逻辑,移除对Celery的依赖。 这些改动旨在提高系统的异步处理能力和整体性能。
276 lines
9.0 KiB
Python
276 lines
9.0 KiB
Python
"""
|
||
任务管理API
|
||
提供任务暂停/恢复控制功能
|
||
"""
|
||
|
||
import traceback
|
||
from typing import Any
|
||
|
||
from fastapi import APIRouter, Depends, HTTPException, status
|
||
from sqlalchemy.ext.asyncio import AsyncSession
|
||
|
||
# Celery has been replaced with Arq for coroutine-based task processing
|
||
from app.core.database import get_async_db
|
||
from app.core.log import get_logger
|
||
from app.core.redis_manager import redis_manager
|
||
from app.core.state_manager import task_state_manager
|
||
from app.schemas.task import (
|
||
DeleteAllDataResponse,
|
||
GiftCardSubmissionRequest,
|
||
GiftCardSubmissionResponse,
|
||
QueueStatsResponse,
|
||
TaskControlRequest,
|
||
TaskControlResponse,
|
||
TaskListResponse,
|
||
TaskStateResponse,
|
||
)
|
||
from app.services.task_service import TaskService
|
||
from app.models import Orders, UserData, GiftCards, Links
|
||
from sqlalchemy import text
|
||
|
||
logger = get_logger(__name__)
|
||
router = APIRouter()
|
||
|
||
|
||
@router.post("/toggle", response_model=TaskControlResponse)
|
||
async def toggle_task_state(request: TaskControlRequest) -> TaskControlResponse:
|
||
"""
|
||
切换任务状态(暂停/恢复)
|
||
|
||
Args:
|
||
request: 任务状态切换请求
|
||
|
||
Returns:
|
||
TaskControlResponse: 操作结果
|
||
"""
|
||
try:
|
||
success = await redis_manager.set_task_pause_state(
|
||
is_paused=request.is_paused, reason=request.reason
|
||
)
|
||
|
||
if success:
|
||
action = "暂停" if request.is_paused else "恢复"
|
||
logger.info(f"任务{action}操作成功: {request.reason}")
|
||
return TaskControlResponse(
|
||
success=True,
|
||
is_paused=request.is_paused,
|
||
reason=request.reason,
|
||
message=f"任务已成功{action}",
|
||
)
|
||
else:
|
||
raise HTTPException(
|
||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||
detail="设置任务状态失败",
|
||
)
|
||
|
||
except Exception as e:
|
||
logger.error(f"设置任务状态异常: {e}")
|
||
raise HTTPException(
|
||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||
detail=f"设置任务状态失败: {str(e)}",
|
||
)
|
||
|
||
|
||
@router.get("/status", response_model=TaskStateResponse)
|
||
async def get_task_status() -> TaskStateResponse:
|
||
"""
|
||
获取任务状态
|
||
|
||
Returns:
|
||
TaskStatusResponse: 当前任务状态
|
||
"""
|
||
try:
|
||
paused, reason = await redis_manager.get_task_pause_state()
|
||
|
||
status_text = "已暂停" if paused else "运行中"
|
||
return TaskStateResponse(
|
||
success=True,
|
||
is_paused=paused,
|
||
reason=reason,
|
||
message=f"任务当前状态: {status_text}",
|
||
)
|
||
|
||
except Exception as e:
|
||
logger.error(f"获取任务状态异常: {traceback.format_exc()}")
|
||
raise HTTPException(
|
||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||
detail=f"获取任务状态失败: {str(e)}",
|
||
)
|
||
|
||
|
||
@router.get("/list", response_model=TaskListResponse)
|
||
async def get_task_list(db: AsyncSession = Depends(get_async_db)) -> TaskListResponse:
|
||
"""
|
||
获取任务列表
|
||
|
||
Returns:
|
||
TaskListResponse: 任务列表响应
|
||
"""
|
||
try:
|
||
# 获取数据库会话
|
||
task_service = TaskService(db)
|
||
result = await task_service.get_task_list()
|
||
# 按更新时间逆向排序
|
||
result.tasks = sorted(result.tasks, key=lambda x: x.updated_at, reverse=True)
|
||
return result
|
||
|
||
except Exception as e:
|
||
logger.error(f"获取任务列表失败: {e}")
|
||
raise HTTPException(
|
||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||
detail=f"获取任务列表失败: {str(e)}",
|
||
)
|
||
|
||
|
||
@router.get("/queue/stats", summary="获取队列统计", response_model=QueueStatsResponse)
|
||
async def get_queue_stats() -> QueueStatsResponse:
|
||
"""获取Arq队列统计信息"""
|
||
try:
|
||
from datetime import datetime
|
||
|
||
# 获取Redis统计信息作为队列状态
|
||
stats = {
|
||
"active_tasks": {},
|
||
"scheduled_tasks": {},
|
||
"reserved_tasks": {},
|
||
"total_active": 0,
|
||
"total_scheduled": 0,
|
||
"total_reserved": 0,
|
||
}
|
||
|
||
logger.info("获取队列统计成功")
|
||
return QueueStatsResponse(
|
||
success=True, stats=stats, timestamp=datetime.now().isoformat()
|
||
)
|
||
|
||
except Exception as e:
|
||
logger.error(f"获取队列统计失败: {e}", exc_info=True)
|
||
raise HTTPException(
|
||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||
detail=f"获取队列统计失败: {str(e)}",
|
||
)
|
||
|
||
|
||
@router.post("/submit", response_model=GiftCardSubmissionResponse)
|
||
async def submit_gift_card(
|
||
request: GiftCardSubmissionRequest, db: AsyncSession = Depends(get_async_db)
|
||
) -> GiftCardSubmissionResponse:
|
||
"""
|
||
提交礼品卡信息并更新任务状态
|
||
|
||
Args:
|
||
request: 礼品卡提交请求
|
||
db: 数据库对象
|
||
|
||
Returns:
|
||
GiftCardSubmissionResponse: 操作结果
|
||
"""
|
||
try:
|
||
# 获取数据库会话
|
||
task_service = TaskService(db)
|
||
return await task_service.submit_gift_card(request)
|
||
|
||
except ValueError as e:
|
||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(e))
|
||
except RuntimeError as e:
|
||
raise HTTPException(
|
||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(e)
|
||
)
|
||
except Exception as e:
|
||
logger.error(f"提交礼品卡失败: {e}")
|
||
raise HTTPException(
|
||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||
detail=f"提交礼品卡失败: {str(e)}",
|
||
)
|
||
|
||
|
||
@router.delete("/clear-all-data", response_model=DeleteAllDataResponse)
|
||
async def clear_all_data(
|
||
db: AsyncSession = Depends(get_async_db),
|
||
) -> DeleteAllDataResponse:
|
||
"""
|
||
删除数据库中所有数据
|
||
|
||
Returns:
|
||
DeleteAllDataResponse: 删除操作结果
|
||
"""
|
||
try:
|
||
deleted_tables = {}
|
||
|
||
# 删除各表数据,注意外键约束的顺序
|
||
# 先删除依赖表,再删除被依赖的表
|
||
tables_to_clear = [
|
||
("gift_cards", GiftCards), # 依赖 orders
|
||
("orders", Orders), # 依赖 user_data 和 links
|
||
("links", Links), # 被 orders 依赖
|
||
("user_data", UserData), # 被 orders 依赖
|
||
]
|
||
|
||
for table_name, model in tables_to_clear:
|
||
try:
|
||
# 获取删除前的记录数
|
||
count_query = await db.execute(
|
||
text(f"SELECT COUNT(*) FROM {table_name}")
|
||
)
|
||
count_before = count_query.scalar() or 0
|
||
|
||
if count_before > 0:
|
||
# 删除所有数据并获取影响的行数
|
||
delete_result = await db.execute(text(f"DELETE FROM {table_name}"))
|
||
deleted_count = (
|
||
count_before # DELETE语句影响的行数应该等于删除前的计数
|
||
)
|
||
deleted_tables[table_name] = deleted_count
|
||
logger.info(f"已删除 {table_name} 表中的 {deleted_count} 条记录")
|
||
else:
|
||
deleted_tables[table_name] = 0
|
||
logger.info(f"{table_name} 表已为空,无需删除")
|
||
|
||
except Exception as table_error:
|
||
logger.error(f"删除 {table_name} 表数据失败: {table_error}")
|
||
# 回滚之前的删除操作
|
||
await db.rollback()
|
||
raise HTTPException(
|
||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||
detail=f"删除 {table_name} 表数据失败: {str(table_error)}",
|
||
)
|
||
|
||
# 提交事务
|
||
await db.commit()
|
||
|
||
# 清空Redis数据
|
||
redis_deleted = 0
|
||
try:
|
||
redis_deleted = await redis_manager.clear_all_data()
|
||
logger.info("Redis数据清空完成")
|
||
except Exception as redis_error:
|
||
logger.error(f"清空Redis数据失败: {redis_error}")
|
||
# Redis清空失败不影响数据库操作的提交
|
||
|
||
total_deleted = sum(deleted_tables.values())
|
||
|
||
if total_deleted > 0 and redis_deleted > 0:
|
||
message = f"成功删除数据库 {total_deleted} 条记录,清空 Redis {redis_deleted} 个键"
|
||
elif total_deleted > 0:
|
||
message = f"成功删除数据库 {total_deleted} 条记录,Redis 无需清空"
|
||
elif redis_deleted > 0:
|
||
message = f"数据库已为空,成功清空 Redis {redis_deleted} 个键"
|
||
else:
|
||
message = "数据库和Redis均为空,无需删除"
|
||
|
||
return DeleteAllDataResponse(
|
||
success=True,
|
||
deleted_tables=deleted_tables,
|
||
message=message,
|
||
)
|
||
|
||
except HTTPException:
|
||
raise
|
||
except Exception as e:
|
||
await db.rollback()
|
||
logger.error(f"删除所有数据异常: {e}")
|
||
raise HTTPException(
|
||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||
detail=f"删除所有数据失败: {str(e)}",
|
||
)
|