mirror of
https://git.oceanpay.cc/danial/kami_apple_exchage.git
synced 2025-12-18 22:29:09 +00:00
- 新增链接权重字段,支持1-100范围设置 - 修改轮询算法为基于权重的选择机制 - 更新链接API接口返回统一使用LinkInfo模型 - 添加更新链接权重的PATCH端点 - 调整链接仓库查询逻辑,只包含激活状态链接 - 迁移链接相关Pydantic模型到task模块统一管理 - 修改分页响应格式为通用PaginatedResponse包装 - 禁用OpenTelemetry监控配置
324 lines
9.5 KiB
Python
324 lines
9.5 KiB
Python
"""
|
||
用户数据服务层
|
||
处理用户数据相关的业务逻辑
|
||
"""
|
||
|
||
from typing import Any
|
||
|
||
# Celery has been replaced with Arq for coroutine-based task processing
|
||
from sqlalchemy.ext.asyncio import AsyncSession
|
||
|
||
from app.core.log import get_logger
|
||
from app.core.redis_manager import redis_manager
|
||
from app.models.orders import OrderStatus
|
||
from app.models.user_data import UserData
|
||
from app.repositories.repository_factory import RepositoryFactory
|
||
from app.schemas.user_data import (
|
||
UserDataBase,
|
||
UserDataCreate,
|
||
UserDataResponse,
|
||
UserDataStatsResponse,
|
||
UserDataUpdate,
|
||
UserDataUploadResponse,
|
||
)
|
||
from app.schemas.task import (
|
||
PaginatedResponse,
|
||
UserInfo,
|
||
)
|
||
|
||
|
||
# 延迟导入避免循环依赖
|
||
def get_create_order_task():
|
||
# Celery已被Arq替代,返回空函数或抛出异常
|
||
def dummy_task(*args, **kwargs):
|
||
raise NotImplementedError(
|
||
"Celery tasks have been replaced with Arq. Please use Arq tasks instead."
|
||
)
|
||
|
||
return dummy_task
|
||
|
||
|
||
logger = get_logger(__name__)
|
||
|
||
|
||
class UserDataService:
|
||
"""用户数据服务类"""
|
||
|
||
def __init__(self, db: AsyncSession):
|
||
self.db = db
|
||
self.repo_factory = RepositoryFactory(db)
|
||
|
||
async def upload_user_data(
|
||
self, user_data: UserDataCreate
|
||
) -> UserDataUploadResponse:
|
||
"""
|
||
上传用户数据并触发创建订单任务
|
||
|
||
Args:
|
||
user_data: 用户数据创建模型
|
||
|
||
Returns:
|
||
上传响应,包含任务ID
|
||
"""
|
||
# 创建用户数据
|
||
user = await self.repo_factory.user_data.create(
|
||
first_name=user_data.first_name,
|
||
last_name=user_data.last_name,
|
||
email=user_data.email,
|
||
phone=user_data.phone,
|
||
street_address=user_data.street_address,
|
||
city=user_data.city,
|
||
state=user_data.state,
|
||
zip_code=user_data.zip_code,
|
||
)
|
||
|
||
logger.info(f"用户数据创建成功: {user.id}")
|
||
await redis_manager.save_user_data_id(user.id)
|
||
|
||
return UserDataUploadResponse(
|
||
user_data=self._convert_to_response(user),
|
||
message="创建成功",
|
||
)
|
||
|
||
async def get_user_data(self, user_id: str) -> UserDataResponse | None:
|
||
"""
|
||
获取单个用户数据
|
||
|
||
Args:
|
||
user_id: 用户ID
|
||
|
||
Returns:
|
||
用户数据响应或None
|
||
"""
|
||
user = await self.repo_factory.user_data.get_by_id(user_id)
|
||
if not user:
|
||
return None
|
||
return self._convert_to_response(user)
|
||
|
||
async def get_user_info(self, user_id: str) -> UserInfo | None:
|
||
"""
|
||
获取用户完整信息(包含所有数据库字段)
|
||
|
||
Args:
|
||
user_id: 用户ID
|
||
|
||
Returns:
|
||
用户完整信息响应或None
|
||
"""
|
||
user = await self.repo_factory.user_data.get_by_id(user_id)
|
||
if not user:
|
||
return None
|
||
return self._convert_to_info_response(user)
|
||
|
||
async def update_user_data(
|
||
self, user_id: str, user_data: UserDataUpdate
|
||
) -> UserDataResponse | None:
|
||
"""
|
||
更新用户数据
|
||
|
||
Args:
|
||
user_id: 用户ID
|
||
user_data: 更新数据
|
||
|
||
Returns:
|
||
更新后的用户数据响应或None
|
||
"""
|
||
# 检查用户是否存在
|
||
existing_user = await self.repo_factory.user_data.get_by_id(user_id)
|
||
if not existing_user:
|
||
return None
|
||
|
||
# 如果更新邮箱,检查是否与其他用户冲突
|
||
if user_data.email and user_data.email != existing_user.email:
|
||
email_conflict = await self.repo_factory.user_data.get_user_by_email(
|
||
user_data.email
|
||
)
|
||
if email_conflict and email_conflict.id != user_id:
|
||
raise ValueError(f"邮箱已存在: {user_data.email}")
|
||
|
||
# 更新用户数据
|
||
update_data = {
|
||
k: v for k, v in user_data.dict(exclude_unset=True).items() if v is not None
|
||
}
|
||
updated_user = await self.repo_factory.user_data.update_by_id(
|
||
user_id, **update_data
|
||
)
|
||
|
||
if updated_user:
|
||
logger.info(f"用户数据更新成功: {user_id}")
|
||
return self._convert_to_response(updated_user)
|
||
return None
|
||
|
||
async def delete_user_data(self, user_id: str) -> bool:
|
||
"""
|
||
删除用户数据
|
||
|
||
Args:
|
||
user_id: 用户ID
|
||
|
||
Returns:
|
||
是否删除成功
|
||
"""
|
||
# 检查是否有关联订单
|
||
user_with_orders = await self.repo_factory.user_data.get_user_with_orders(
|
||
user_id
|
||
)
|
||
if user_with_orders and user_with_orders.orders:
|
||
raise ValueError("无法删除有关联订单的用户数据")
|
||
|
||
success = await self.repo_factory.user_data.delete_by_id(user_id)
|
||
if success:
|
||
logger.info(f"用户数据删除成功: {user_id}")
|
||
return success
|
||
|
||
async def get_user_data_list(
|
||
self,
|
||
page: int = 1,
|
||
size: int = 20,
|
||
city: str | None = None,
|
||
state: str | None = None,
|
||
country: str | None = None,
|
||
name_pattern: str | None = None,
|
||
) -> PaginatedResponse[UserDataResponse]:
|
||
"""
|
||
获取用户数据列表
|
||
|
||
Args:
|
||
page: 页码
|
||
size: 每页大小
|
||
city: 城市过滤
|
||
state: 州/省过滤
|
||
country: 国家过滤
|
||
name_pattern: 姓名模糊搜索
|
||
|
||
Returns:
|
||
用户数据列表响应
|
||
"""
|
||
filters = {}
|
||
|
||
# 构建过滤条件
|
||
if city:
|
||
users = await self.repo_factory.user_data.get_users_by_city(city)
|
||
elif state:
|
||
users = await self.repo_factory.user_data.get_users_by_state(state)
|
||
elif name_pattern:
|
||
users = await self.repo_factory.user_data.search_users_by_name(name_pattern)
|
||
else:
|
||
# 使用分页查询
|
||
if country:
|
||
filters["country"] = country
|
||
|
||
result = await self.repo_factory.user_data.get_paginated(
|
||
page=page, page_size=size, **filters
|
||
)
|
||
users = result.items
|
||
total = result.total
|
||
pages = result.pages
|
||
|
||
return PaginatedResponse[UserDataResponse](
|
||
items=[self._convert_to_response(user) for user in users],
|
||
total=total,
|
||
page=page,
|
||
size=size,
|
||
pages=pages,
|
||
)
|
||
|
||
# 对于非分页查询,手动计算分页
|
||
total = len(users)
|
||
start_idx = (page - 1) * size
|
||
end_idx = start_idx + size
|
||
page_users = users[start_idx:end_idx]
|
||
pages = (total + size - 1) // size
|
||
|
||
return PaginatedResponse[UserDataResponse](
|
||
items=[self._convert_to_response(user) for user in page_users],
|
||
total=total,
|
||
page=page,
|
||
size=size,
|
||
pages=pages,
|
||
)
|
||
|
||
async def get_task_status(self, task_id: str) -> dict[str, Any]:
|
||
"""
|
||
获取任务状态
|
||
|
||
Args:
|
||
task_id: 任务ID
|
||
|
||
Returns:
|
||
任务状态信息
|
||
"""
|
||
try:
|
||
# Arq任务状态需要从Redis或其他状态存储中获取
|
||
# 这里返回一个简单的状态响应
|
||
return {
|
||
"task_id": task_id,
|
||
"status": "UNKNOWN",
|
||
"message": "Arq任务状态查询需要实现具体的状态检查逻辑",
|
||
}
|
||
except Exception as e:
|
||
logger.error(f"获取任务状态失败: {str(e)}")
|
||
return {"task_id": task_id, "status": "UNKNOWN", "error": str(e)}
|
||
|
||
def _convert_to_response(self, user: UserData) -> UserDataResponse:
|
||
"""
|
||
将用户数据模型转换为响应模型
|
||
|
||
Args:
|
||
user: 用户数据模型
|
||
|
||
Returns:
|
||
用户数据响应模型
|
||
"""
|
||
return UserDataResponse(
|
||
id=user.id,
|
||
first_name=user.first_name,
|
||
last_name=user.last_name,
|
||
email=user.email,
|
||
phone=user.phone,
|
||
street_address=user.street_address,
|
||
city=user.city,
|
||
state=user.state,
|
||
zip_code=user.zip_code,
|
||
created_at=user.created_at.isoformat(),
|
||
updated_at=user.updated_at.isoformat(),
|
||
)
|
||
|
||
def _convert_to_info_response(self, user: UserData) -> UserInfo:
|
||
"""
|
||
将用户数据模型转换为完整信息响应模型
|
||
|
||
Args:
|
||
user: 用户数据模型
|
||
|
||
Returns:
|
||
用户完整信息响应模型
|
||
"""
|
||
return UserInfo(
|
||
id=user.id,
|
||
first_name=user.first_name,
|
||
last_name=user.last_name,
|
||
email=user.email,
|
||
phone=user.phone,
|
||
street_address=user.street_address,
|
||
city=user.city,
|
||
state=user.state,
|
||
zip_code=user.zip_code,
|
||
created_at=user.created_at.isoformat(),
|
||
updated_at=user.updated_at.isoformat(),
|
||
full_name=user.full_name,
|
||
full_address=user.full_address,
|
||
)
|
||
|
||
async def delete_all_user_data(self, skip_orders: bool = False) -> dict[str, Any]:
|
||
"""
|
||
软删除所有用户数据
|
||
|
||
Args:
|
||
skip_orders: 是否跳过有关联订单的用户数据
|
||
|
||
Returns:
|
||
删除统计信息
|
||
"""
|
||
return await self.repo_factory.user_data.bulk_soft_delete(skip_orders=skip_orders)
|