123
This commit is contained in:
566
.kiro/specs/multi-user-signin/design.md
Normal file
566
.kiro/specs/multi-user-signin/design.md
Normal file
@@ -0,0 +1,566 @@
|
|||||||
|
# 设计文档:Weibo-HotSign 多用户签到系统
|
||||||
|
|
||||||
|
## 概述
|
||||||
|
|
||||||
|
本设计文档描述 Weibo-HotSign 系统的架构重构与核心功能实现方案。核心目标是:
|
||||||
|
|
||||||
|
1. 引入 `backend/shared/` 共享模块,统一 ORM 模型、数据库连接和加密工具,消除各服务间的代码重复
|
||||||
|
2. 完善 `auth_service`,实现 Refresh Token 机制
|
||||||
|
3. 从零实现 `api_service`,提供微博账号 CRUD、任务配置和签到日志查询 API
|
||||||
|
4. 将 `signin_executor` 和 `task_scheduler` 中的 Mock 实现替换为真实数据库交互
|
||||||
|
5. 所有 API 遵循统一响应格式
|
||||||
|
|
||||||
|
技术栈:Python 3.11 + FastAPI + SQLAlchemy (async) + Celery + MySQL (aiomysql) + Redis
|
||||||
|
|
||||||
|
## 架构
|
||||||
|
|
||||||
|
### 重构后的服务架构
|
||||||
|
|
||||||
|
```mermaid
|
||||||
|
graph TD
|
||||||
|
subgraph "客户端"
|
||||||
|
FE[Web Frontend / API Client]
|
||||||
|
end
|
||||||
|
|
||||||
|
subgraph "后端服务层"
|
||||||
|
API[API_Service :8000<br/>账号/任务/日志管理]
|
||||||
|
AUTH[Auth_Service :8001<br/>注册/登录/Token刷新]
|
||||||
|
SCHED[Task_Scheduler<br/>Celery Beat]
|
||||||
|
EXEC[Signin_Executor<br/>Celery Worker]
|
||||||
|
end
|
||||||
|
|
||||||
|
subgraph "共享层"
|
||||||
|
SHARED[shared/<br/>ORM Models + DB Session<br/>+ Crypto Utils + Response Format]
|
||||||
|
end
|
||||||
|
|
||||||
|
subgraph "基础设施"
|
||||||
|
MYSQL[(MySQL)]
|
||||||
|
REDIS[(Redis<br/>Cache + Message Queue)]
|
||||||
|
PROXY[Proxy Pool]
|
||||||
|
end
|
||||||
|
|
||||||
|
FE -->|REST API| API
|
||||||
|
FE -->|REST API| AUTH
|
||||||
|
API -->|导入| SHARED
|
||||||
|
AUTH -->|导入| SHARED
|
||||||
|
SCHED -->|导入| SHARED
|
||||||
|
EXEC -->|导入| SHARED
|
||||||
|
SHARED -->|aiomysql| MYSQL
|
||||||
|
SCHED -->|发布任务| REDIS
|
||||||
|
EXEC -->|消费任务| REDIS
|
||||||
|
EXEC -->|获取代理| PROXY
|
||||||
|
EXEC -->|签到请求| WEIBO[Weibo.com]
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
### 关键架构决策
|
||||||
|
|
||||||
|
1. **共享模块而非微服务间 RPC**:各服务通过 Python 包导入 `shared/` 模块访问数据库,而非通过 HTTP 调用其他服务查询数据。这简化了部署,减少了网络延迟,适合当前规模。
|
||||||
|
2. **API_Service 作为唯一对外网关**:所有账号管理、任务配置、日志查询 API 集中在 `api_service` 中,`auth_service` 仅负责认证。
|
||||||
|
3. **Celery 同时承担调度和执行**:`task_scheduler` 运行 Celery Beat(调度),`signin_executor` 运行 Celery Worker(执行),通过 Redis 消息队列解耦。
|
||||||
|
4. **Dockerfile 多阶段构建**:保持现有的多阶段 Dockerfile 结构,新增 `shared/` 目录的 COPY 步骤。
|
||||||
|
|
||||||
|
### 目录结构(重构后)
|
||||||
|
|
||||||
|
```
|
||||||
|
backend/
|
||||||
|
├── shared/ # 新增:共享模块
|
||||||
|
│ ├── __init__.py
|
||||||
|
│ ├── models/
|
||||||
|
│ │ ├── __init__.py
|
||||||
|
│ │ ├── base.py # SQLAlchemy Base + engine + session
|
||||||
|
│ │ ├── user.py # User ORM model
|
||||||
|
│ │ ├── account.py # Account ORM model
|
||||||
|
│ │ ├── task.py # Task ORM model
|
||||||
|
│ │ └── signin_log.py # SigninLog ORM model
|
||||||
|
│ ├── crypto.py # AES-256-GCM 加密/解密工具
|
||||||
|
│ ├── response.py # 统一响应格式工具
|
||||||
|
│ └── config.py # 共享配置(DB URL, Redis URL 等)
|
||||||
|
├── auth_service/
|
||||||
|
│ └── app/
|
||||||
|
│ ├── main.py # 重构:使用 shared models
|
||||||
|
│ ├── config.py
|
||||||
|
│ ├── schemas/
|
||||||
|
│ │ └── user.py # 增加 RefreshToken schema
|
||||||
|
│ ├── services/
|
||||||
|
│ │ └── auth_service.py # 增加 refresh token 逻辑
|
||||||
|
│ └── utils/
|
||||||
|
│ └── security.py # 增加 refresh token 生成/验证
|
||||||
|
├── api_service/
|
||||||
|
│ └── app/
|
||||||
|
│ ├── __init__.py
|
||||||
|
│ ├── main.py # 新增:FastAPI 应用入口
|
||||||
|
│ ├── config.py
|
||||||
|
│ ├── dependencies.py # JWT 认证依赖
|
||||||
|
│ ├── schemas/
|
||||||
|
│ │ ├── __init__.py
|
||||||
|
│ │ ├── account.py # 账号请求/响应 schema
|
||||||
|
│ │ ├── task.py # 任务请求/响应 schema
|
||||||
|
│ │ └── signin_log.py # 签到日志响应 schema
|
||||||
|
│ └── routers/
|
||||||
|
│ ├── __init__.py
|
||||||
|
│ ├── accounts.py # 账号 CRUD 路由
|
||||||
|
│ ├── tasks.py # 任务 CRUD 路由
|
||||||
|
│ └── signin_logs.py # 签到日志查询路由
|
||||||
|
├── signin_executor/
|
||||||
|
│ └── app/
|
||||||
|
│ ├── main.py
|
||||||
|
│ ├── config.py
|
||||||
|
│ ├── services/
|
||||||
|
│ │ ├── signin_service.py # 重构:使用 shared models 查询真实数据
|
||||||
|
│ │ └── weibo_client.py # 重构:实现真实加密/解密
|
||||||
|
│ └── models/
|
||||||
|
│ └── signin_models.py # 保留 Pydantic 请求/响应模型
|
||||||
|
├── task_scheduler/
|
||||||
|
│ └── app/
|
||||||
|
│ ├── celery_app.py # 重构:从 DB 动态加载任务
|
||||||
|
│ ├── config.py
|
||||||
|
│ └── tasks/
|
||||||
|
│ └── signin_tasks.py # 重构:使用真实 DB 查询
|
||||||
|
├── Dockerfile # 更新:各阶段 COPY shared/
|
||||||
|
└── requirements.txt
|
||||||
|
```
|
||||||
|
|
||||||
|
## 组件与接口
|
||||||
|
|
||||||
|
### 1. shared 模块
|
||||||
|
|
||||||
|
#### 1.1 数据库连接管理 (`shared/models/base.py`)
|
||||||
|
|
||||||
|
```python
|
||||||
|
# 提供异步 engine 和 session factory
|
||||||
|
# 所有服务通过 get_db() 获取 AsyncSession
|
||||||
|
async def get_db() -> AsyncGenerator[AsyncSession, None]:
|
||||||
|
async with AsyncSessionLocal() as session:
|
||||||
|
try:
|
||||||
|
yield session
|
||||||
|
finally:
|
||||||
|
await session.close()
|
||||||
|
```
|
||||||
|
|
||||||
|
#### 1.2 加密工具 (`shared/crypto.py`)
|
||||||
|
|
||||||
|
```python
|
||||||
|
def encrypt_cookie(plaintext: str, key: bytes) -> tuple[str, str]:
|
||||||
|
"""AES-256-GCM 加密,返回 (密文base64, iv_base64)"""
|
||||||
|
|
||||||
|
def decrypt_cookie(ciphertext_b64: str, iv_b64: str, key: bytes) -> str:
|
||||||
|
"""AES-256-GCM 解密,返回原始 Cookie 字符串"""
|
||||||
|
```
|
||||||
|
|
||||||
|
#### 1.3 统一响应格式 (`shared/response.py`)
|
||||||
|
|
||||||
|
```python
|
||||||
|
def success_response(data: Any, message: str = "Operation successful") -> dict
|
||||||
|
def error_response(message: str, code: str, details: list = None, status_code: int = 400) -> JSONResponse
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2. Auth_Service 接口
|
||||||
|
|
||||||
|
| 方法 | 路径 | 描述 | 需求 |
|
||||||
|
|------|------|------|------|
|
||||||
|
| POST | `/auth/register` | 用户注册 | 1.1, 1.2, 1.8 |
|
||||||
|
| POST | `/auth/login` | 用户登录,返回 access_token + refresh_token | 1.3, 1.4 |
|
||||||
|
| POST | `/auth/refresh` | 刷新 Token | 1.5, 1.6 |
|
||||||
|
| GET | `/auth/me` | 获取当前用户信息 | 1.7 |
|
||||||
|
|
||||||
|
### 3. API_Service 接口
|
||||||
|
|
||||||
|
| 方法 | 路径 | 描述 | 需求 |
|
||||||
|
|------|------|------|------|
|
||||||
|
| POST | `/api/v1/accounts` | 添加微博账号 | 2.1, 2.7, 2.8 |
|
||||||
|
| GET | `/api/v1/accounts` | 获取账号列表 | 2.2, 2.8 |
|
||||||
|
| GET | `/api/v1/accounts/{id}` | 获取账号详情 | 2.3, 2.6, 2.8 |
|
||||||
|
| PUT | `/api/v1/accounts/{id}` | 更新账号信息 | 2.4, 2.6, 2.8 |
|
||||||
|
| DELETE | `/api/v1/accounts/{id}` | 删除账号 | 2.5, 2.6, 2.8 |
|
||||||
|
| POST | `/api/v1/accounts/{id}/tasks` | 创建签到任务 | 4.1, 4.2, 4.6 |
|
||||||
|
| GET | `/api/v1/accounts/{id}/tasks` | 获取任务列表 | 4.3 |
|
||||||
|
| PUT | `/api/v1/tasks/{id}` | 启用/禁用任务 | 4.4 |
|
||||||
|
| DELETE | `/api/v1/tasks/{id}` | 删除任务 | 4.5 |
|
||||||
|
| GET | `/api/v1/accounts/{id}/signin-logs` | 查询签到日志 | 8.1, 8.2, 8.3, 8.4, 8.5 |
|
||||||
|
|
||||||
|
### 4. Task_Scheduler 内部接口
|
||||||
|
|
||||||
|
Task_Scheduler 不对外暴露 HTTP 接口,通过以下方式工作:
|
||||||
|
|
||||||
|
- **启动时**:从 DB 加载 `is_enabled=True` 的任务,注册到 Celery Beat
|
||||||
|
- **运行时**:根据 Cron 表达式触发 `execute_signin_task` Celery task
|
||||||
|
- **动态更新**:通过 Redis pub/sub 接收任务变更通知,动态更新调度
|
||||||
|
|
||||||
|
### 5. Signin_Executor 内部流程
|
||||||
|
|
||||||
|
```mermaid
|
||||||
|
sequenceDiagram
|
||||||
|
participant Queue as Redis Queue
|
||||||
|
participant Exec as Signin_Executor
|
||||||
|
participant DB as MySQL
|
||||||
|
participant Weibo as Weibo.com
|
||||||
|
participant Proxy as Proxy Pool
|
||||||
|
|
||||||
|
Queue->>Exec: 消费签到任务 (task_id, account_id)
|
||||||
|
Exec->>DB: 查询 Account (by account_id)
|
||||||
|
Exec->>Exec: 解密 Cookie (AES-256-GCM)
|
||||||
|
Exec->>Weibo: 验证 Cookie 有效性
|
||||||
|
alt Cookie 无效
|
||||||
|
Exec->>DB: 更新 account.status = "invalid_cookie"
|
||||||
|
Exec->>DB: 写入失败日志
|
||||||
|
else Cookie 有效
|
||||||
|
Exec->>Weibo: 获取超话列表
|
||||||
|
loop 每个未签到超话
|
||||||
|
Exec->>Proxy: 获取代理 IP
|
||||||
|
Exec->>Exec: 随机延迟 (1-3s)
|
||||||
|
Exec->>Weibo: 执行签到请求
|
||||||
|
Exec->>DB: 写入 signin_log
|
||||||
|
end
|
||||||
|
end
|
||||||
|
```
|
||||||
|
|
||||||
|
## 数据模型
|
||||||
|
|
||||||
|
### ORM 模型定义(shared/models/)
|
||||||
|
|
||||||
|
#### User 模型
|
||||||
|
|
||||||
|
```python
|
||||||
|
class User(Base):
|
||||||
|
__tablename__ = "users"
|
||||||
|
id = Column(String(36), primary_key=True, default=lambda: str(uuid4()))
|
||||||
|
username = Column(String(50), unique=True, nullable=False, index=True)
|
||||||
|
email = Column(String(255), unique=True, nullable=False, index=True)
|
||||||
|
hashed_password = Column(String(255), nullable=False)
|
||||||
|
created_at = Column(DateTime, server_default=func.now())
|
||||||
|
is_active = Column(Boolean, default=True)
|
||||||
|
# Relationships
|
||||||
|
accounts = relationship("Account", back_populates="user", cascade="all, delete-orphan")
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Account 模型
|
||||||
|
|
||||||
|
```python
|
||||||
|
class Account(Base):
|
||||||
|
__tablename__ = "accounts"
|
||||||
|
id = Column(String(36), primary_key=True, default=lambda: str(uuid4()))
|
||||||
|
user_id = Column(String(36), ForeignKey("users.id", ondelete="CASCADE"), nullable=False)
|
||||||
|
weibo_user_id = Column(String(20), nullable=False)
|
||||||
|
remark = Column(String(100))
|
||||||
|
encrypted_cookies = Column(Text, nullable=False)
|
||||||
|
iv = Column(String(32), nullable=False)
|
||||||
|
status = Column(String(20), default="pending") # pending, active, invalid_cookie, banned
|
||||||
|
last_checked_at = Column(DateTime, nullable=True)
|
||||||
|
created_at = Column(DateTime, server_default=func.now())
|
||||||
|
# Relationships
|
||||||
|
user = relationship("User", back_populates="accounts")
|
||||||
|
tasks = relationship("Task", back_populates="account", cascade="all, delete-orphan")
|
||||||
|
signin_logs = relationship("SigninLog", back_populates="account")
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Task 模型
|
||||||
|
|
||||||
|
```python
|
||||||
|
class Task(Base):
|
||||||
|
__tablename__ = "tasks"
|
||||||
|
id = Column(String(36), primary_key=True, default=lambda: str(uuid4()))
|
||||||
|
account_id = Column(String(36), ForeignKey("accounts.id", ondelete="CASCADE"), nullable=False)
|
||||||
|
cron_expression = Column(String(50), nullable=False)
|
||||||
|
is_enabled = Column(Boolean, default=True)
|
||||||
|
created_at = Column(DateTime, server_default=func.now())
|
||||||
|
# Relationships
|
||||||
|
account = relationship("Account", back_populates="tasks")
|
||||||
|
```
|
||||||
|
|
||||||
|
#### SigninLog 模型
|
||||||
|
|
||||||
|
```python
|
||||||
|
class SigninLog(Base):
|
||||||
|
__tablename__ = "signin_logs"
|
||||||
|
id = Column(BigInteger, primary_key=True, autoincrement=True)
|
||||||
|
account_id = Column(String(36), ForeignKey("accounts.id"), nullable=False)
|
||||||
|
topic_title = Column(String(100))
|
||||||
|
status = Column(String(20), nullable=False) # success, failed_already_signed, failed_network, failed_banned
|
||||||
|
reward_info = Column(JSON, nullable=True)
|
||||||
|
error_message = Column(Text, nullable=True)
|
||||||
|
signed_at = Column(DateTime, server_default=func.now())
|
||||||
|
# Relationships
|
||||||
|
account = relationship("Account", back_populates="signin_logs")
|
||||||
|
```
|
||||||
|
|
||||||
|
### Refresh Token 存储
|
||||||
|
|
||||||
|
Refresh Token 使用 Redis 存储,key 格式为 `refresh_token:{token_hash}`,value 为 `user_id`,TTL 为 7 天。这避免了在数据库中增加额外的表,同时利用 Redis 的自动过期机制。
|
||||||
|
|
||||||
|
```python
|
||||||
|
# 存储
|
||||||
|
await redis.setex(f"refresh_token:{token_hash}", 7 * 24 * 3600, user_id)
|
||||||
|
|
||||||
|
# 验证
|
||||||
|
user_id = await redis.get(f"refresh_token:{token_hash}")
|
||||||
|
|
||||||
|
# 刷新时删除旧 token,生成新 token(Token Rotation)
|
||||||
|
await redis.delete(f"refresh_token:{old_token_hash}")
|
||||||
|
await redis.setex(f"refresh_token:{new_token_hash}", 7 * 24 * 3600, user_id)
|
||||||
|
```
|
||||||
|
|
||||||
|
## 正确性属性 (Correctness Properties)
|
||||||
|
|
||||||
|
*属性(Property)是指在系统所有有效执行中都应成立的特征或行为——本质上是对系统应做什么的形式化陈述。属性是人类可读规格说明与机器可验证正确性保证之间的桥梁。*
|
||||||
|
|
||||||
|
### Property 1: Cookie 加密 Round-trip
|
||||||
|
|
||||||
|
*For any* 有效的 Cookie 字符串,使用 AES-256-GCM 加密后再用相同密钥和 IV 解密,应产生与原始字符串完全相同的结果。
|
||||||
|
|
||||||
|
**Validates: Requirements 3.1, 3.2, 3.3**
|
||||||
|
|
||||||
|
### Property 2: 用户注册后可登录获取信息
|
||||||
|
|
||||||
|
*For any* 有效的注册信息(用户名、邮箱、符合强度要求的密码),注册后使用相同邮箱和密码登录应成功返回 Token,使用该 Token 调用 `/auth/me` 应返回与注册时一致的用户名和邮箱。
|
||||||
|
|
||||||
|
**Validates: Requirements 1.1, 1.3, 1.7**
|
||||||
|
|
||||||
|
### Property 3: 用户名/邮箱唯一性约束
|
||||||
|
|
||||||
|
*For any* 已注册的用户,使用相同用户名或相同邮箱再次注册应返回 409 Conflict 错误。
|
||||||
|
|
||||||
|
**Validates: Requirements 1.2**
|
||||||
|
|
||||||
|
### Property 4: 无效凭证登录拒绝
|
||||||
|
|
||||||
|
*For any* 不存在的邮箱或错误的密码,登录请求应返回 401 Unauthorized 错误。
|
||||||
|
|
||||||
|
**Validates: Requirements 1.4**
|
||||||
|
|
||||||
|
### Property 5: Refresh Token 轮换
|
||||||
|
|
||||||
|
*For any* 已登录用户的有效 Refresh Token,刷新操作应返回新的 Access Token 和新的 Refresh Token,且旧的 Refresh Token 应失效(再次使用应返回 401)。
|
||||||
|
|
||||||
|
**Validates: Requirements 1.5, 1.6**
|
||||||
|
|
||||||
|
### Property 6: 弱密码拒绝
|
||||||
|
|
||||||
|
*For any* 不满足强度要求的密码(缺少大写字母、小写字母、数字或特殊字符,或长度不足8位),注册请求应返回 400 Bad Request。
|
||||||
|
|
||||||
|
**Validates: Requirements 1.8**
|
||||||
|
|
||||||
|
### Property 7: 账号创建与列表一致性
|
||||||
|
|
||||||
|
*For any* 用户和任意数量的有效微博账号数据,创建 N 个账号后查询列表应返回恰好 N 条记录,每条记录的状态应为 "pending",且响应中不应包含解密后的 Cookie 明文。
|
||||||
|
|
||||||
|
**Validates: Requirements 2.1, 2.2, 2.7**
|
||||||
|
|
||||||
|
### Property 8: 账号详情 Round-trip
|
||||||
|
|
||||||
|
*For any* 已创建的微博账号,通过详情接口查询应返回与创建时一致的备注和微博用户 ID。
|
||||||
|
|
||||||
|
**Validates: Requirements 2.3**
|
||||||
|
|
||||||
|
### Property 9: 账号更新反映
|
||||||
|
|
||||||
|
*For any* 已创建的微博账号和任意新的备注字符串,更新备注后再次查询应返回更新后的值。
|
||||||
|
|
||||||
|
**Validates: Requirements 2.4**
|
||||||
|
|
||||||
|
### Property 10: 账号删除级联
|
||||||
|
|
||||||
|
*For any* 拥有关联 Task 和 SigninLog 的账号,删除该账号后,查询该账号的 Task 列表和 SigninLog 列表应返回空结果。
|
||||||
|
|
||||||
|
**Validates: Requirements 2.5**
|
||||||
|
|
||||||
|
### Property 11: 跨用户资源隔离
|
||||||
|
|
||||||
|
*For any* 两个不同用户 A 和 B,用户 A 尝试访问、修改或删除用户 B 的账号、任务或签到日志时,应返回 403 Forbidden。
|
||||||
|
|
||||||
|
**Validates: Requirements 2.6, 4.6, 8.5**
|
||||||
|
|
||||||
|
### Property 12: 受保护接口认证要求
|
||||||
|
|
||||||
|
*For any* 受保护的 API 端点(账号管理、任务管理、日志查询),不携带 JWT Token 或携带无效 Token 的请求应返回 401 Unauthorized。
|
||||||
|
|
||||||
|
**Validates: Requirements 2.8, 8.4, 9.4**
|
||||||
|
|
||||||
|
### Property 13: 有效 Cron 表达式创建任务
|
||||||
|
|
||||||
|
*For any* 有效的 Cron 表达式和已存在的账号,创建任务应成功,且查询该账号的任务列表应包含新创建的任务。
|
||||||
|
|
||||||
|
**Validates: Requirements 4.1, 4.3**
|
||||||
|
|
||||||
|
### Property 14: 无效 Cron 表达式拒绝
|
||||||
|
|
||||||
|
*For any* 无效的 Cron 表达式字符串,创建任务请求应返回 400 Bad Request。
|
||||||
|
|
||||||
|
**Validates: Requirements 4.2**
|
||||||
|
|
||||||
|
### Property 15: 任务启用/禁用切换
|
||||||
|
|
||||||
|
*For any* 已创建的任务,切换 `is_enabled` 状态后查询应反映新的状态值。
|
||||||
|
|
||||||
|
**Validates: Requirements 4.4**
|
||||||
|
|
||||||
|
### Property 16: 任务删除
|
||||||
|
|
||||||
|
*For any* 已创建的任务,删除后查询该任务应返回 404 或不在列表中出现。
|
||||||
|
|
||||||
|
**Validates: Requirements 4.5**
|
||||||
|
|
||||||
|
### Property 17: 调度器加载已启用任务
|
||||||
|
|
||||||
|
*For any* 数据库中的任务集合,Task_Scheduler 启动时加载的任务数量应等于 `is_enabled=True` 的任务数量。
|
||||||
|
|
||||||
|
**Validates: Requirements 5.1**
|
||||||
|
|
||||||
|
### Property 18: 分布式锁防重复调度
|
||||||
|
|
||||||
|
*For any* 签到任务,同一时刻并发触发两次应只产生一次实际执行。
|
||||||
|
|
||||||
|
**Validates: Requirements 5.5**
|
||||||
|
|
||||||
|
### Property 19: 签到结果持久化
|
||||||
|
|
||||||
|
*For any* 签到执行结果(成功或失败),`signin_logs` 表中应存在对应的记录,且记录的 `account_id`、`status` 和 `topic_title` 与执行结果一致。
|
||||||
|
|
||||||
|
**Validates: Requirements 6.1, 6.4**
|
||||||
|
|
||||||
|
### Property 20: Cookie 失效时更新账号状态
|
||||||
|
|
||||||
|
*For any* Cookie 已失效的账号,执行签到时应将账号状态更新为 "invalid_cookie"。
|
||||||
|
|
||||||
|
**Validates: Requirements 6.5, 3.4**
|
||||||
|
|
||||||
|
### Property 21: 随机延迟范围
|
||||||
|
|
||||||
|
*For any* 调用反爬虫延迟函数的结果,延迟值应在配置的 `[min, max]` 范围内。
|
||||||
|
|
||||||
|
**Validates: Requirements 7.1**
|
||||||
|
|
||||||
|
### Property 22: User-Agent 来源
|
||||||
|
|
||||||
|
*For any* 调用 User-Agent 选择函数的结果,返回的 UA 字符串应属于预定义列表中的某一个。
|
||||||
|
|
||||||
|
**Validates: Requirements 7.2**
|
||||||
|
|
||||||
|
### Property 23: 签到日志时间倒序
|
||||||
|
|
||||||
|
*For any* 包含多条签到日志的账号,查询返回的日志列表应按 `signed_at` 降序排列。
|
||||||
|
|
||||||
|
**Validates: Requirements 8.1**
|
||||||
|
|
||||||
|
### Property 24: 签到日志分页
|
||||||
|
|
||||||
|
*For any* 包含 N 条日志的账号和分页参数 (page, size),返回的记录数应等于 `min(size, N - (page-1)*size)` 且总记录数应等于 N。
|
||||||
|
|
||||||
|
**Validates: Requirements 8.2**
|
||||||
|
|
||||||
|
### Property 25: 签到日志状态过滤
|
||||||
|
|
||||||
|
*For any* 状态过滤参数,返回的所有日志记录的 `status` 字段应与过滤参数一致。
|
||||||
|
|
||||||
|
**Validates: Requirements 8.3**
|
||||||
|
|
||||||
|
### Property 26: 统一响应格式
|
||||||
|
|
||||||
|
*For any* API 调用,成功响应应包含 `success=true` 和 `data` 字段;错误响应应包含 `success=false`、`data=null` 和 `error` 字段。
|
||||||
|
|
||||||
|
**Validates: Requirements 9.1, 9.2, 9.3**
|
||||||
|
|
||||||
|
## 错误处理
|
||||||
|
|
||||||
|
### 错误分类与处理策略
|
||||||
|
|
||||||
|
| 错误类型 | HTTP 状态码 | 错误码 | 处理策略 |
|
||||||
|
|----------|------------|--------|----------|
|
||||||
|
| 请求参数校验失败 | 400 | VALIDATION_ERROR | 返回字段级错误详情 |
|
||||||
|
| 未认证 | 401 | UNAUTHORIZED | 返回标准 401 响应 |
|
||||||
|
| 权限不足 | 403 | FORBIDDEN | 返回资源不可访问提示 |
|
||||||
|
| 资源不存在 | 404 | NOT_FOUND | 返回资源未找到提示 |
|
||||||
|
| 资源冲突 | 409 | CONFLICT | 返回冲突字段说明 |
|
||||||
|
| 服务器内部错误 | 500 | INTERNAL_ERROR | 记录详细日志,返回通用错误提示 |
|
||||||
|
|
||||||
|
### 签到执行错误处理
|
||||||
|
|
||||||
|
- **Cookie 解密失败**:标记账号为 `invalid_cookie`,记录错误日志,终止该账号签到
|
||||||
|
- **Cookie 验证失败**(微博返回未登录):同上
|
||||||
|
- **网络超时/连接错误**:记录 `failed_network` 日志,不更改账号状态(可能是临时问题)
|
||||||
|
- **微博返回封禁**:标记账号为 `banned`,记录日志,发送通知
|
||||||
|
- **代理池不可用**:降级为直连,记录警告日志
|
||||||
|
- **Celery 任务失败**:自动重试最多3次,间隔60秒,最终失败记录日志
|
||||||
|
|
||||||
|
### 全局异常处理
|
||||||
|
|
||||||
|
所有 FastAPI 服务注册统一的异常处理器:
|
||||||
|
|
||||||
|
```python
|
||||||
|
@app.exception_handler(HTTPException)
|
||||||
|
async def http_exception_handler(request, exc):
|
||||||
|
return error_response(exc.detail, f"HTTP_{exc.status_code}", status_code=exc.status_code)
|
||||||
|
|
||||||
|
@app.exception_handler(RequestValidationError)
|
||||||
|
async def validation_exception_handler(request, exc):
|
||||||
|
details = [{"field": e["loc"][-1], "message": e["msg"]} for e in exc.errors()]
|
||||||
|
return error_response("Validation failed", "VALIDATION_ERROR", details, 400)
|
||||||
|
```
|
||||||
|
|
||||||
|
## 测试策略
|
||||||
|
|
||||||
|
### 测试框架选择
|
||||||
|
|
||||||
|
- **单元测试**:`pytest` + `pytest-asyncio`(异步测试支持)
|
||||||
|
- **属性测试**:`hypothesis`(Python 属性测试库)
|
||||||
|
- **HTTP 测试**:`httpx` + FastAPI `TestClient`
|
||||||
|
- **数据库测试**:使用 SQLite in-memory 或测试专用 MySQL 实例
|
||||||
|
|
||||||
|
### 测试分层
|
||||||
|
|
||||||
|
#### 1. 单元测试
|
||||||
|
- 加密/解密函数的边界情况
|
||||||
|
- 密码强度验证的各种组合
|
||||||
|
- Cron 表达式验证
|
||||||
|
- 响应格式化函数
|
||||||
|
- 具体的错误场景(网络超时、解密失败等)
|
||||||
|
|
||||||
|
#### 2. 属性测试(Property-Based Testing)
|
||||||
|
- 使用 `hypothesis` 库,每个属性测试至少运行 100 次迭代
|
||||||
|
- 每个测试用注释标注对应的设计文档属性编号
|
||||||
|
- 标注格式:`# Feature: multi-user-signin, Property {N}: {property_text}`
|
||||||
|
- 每个正确性属性对应一个独立的属性测试函数
|
||||||
|
|
||||||
|
#### 3. 集成测试
|
||||||
|
- API 端点的完整请求/响应流程
|
||||||
|
- 数据库 CRUD 操作的正确性
|
||||||
|
- 服务间通过 Redis 消息队列的交互
|
||||||
|
- Celery 任务的调度和执行
|
||||||
|
|
||||||
|
### 属性测试配置
|
||||||
|
|
||||||
|
```python
|
||||||
|
from hypothesis import given, settings, strategies as st
|
||||||
|
|
||||||
|
@settings(max_examples=100)
|
||||||
|
@given(cookie=st.text(min_size=1, max_size=1000))
|
||||||
|
def test_cookie_encryption_roundtrip(cookie):
|
||||||
|
"""Feature: multi-user-signin, Property 1: Cookie 加密 Round-trip"""
|
||||||
|
key = generate_test_key()
|
||||||
|
ciphertext, iv = encrypt_cookie(cookie, key)
|
||||||
|
decrypted = decrypt_cookie(ciphertext, iv, key)
|
||||||
|
assert decrypted == cookie
|
||||||
|
```
|
||||||
|
|
||||||
|
### 测试目录结构
|
||||||
|
|
||||||
|
```
|
||||||
|
backend/
|
||||||
|
├── tests/
|
||||||
|
│ ├── conftest.py # 共享 fixtures(DB session, test client 等)
|
||||||
|
│ ├── unit/
|
||||||
|
│ │ ├── test_crypto.py # 加密/解密单元测试
|
||||||
|
│ │ ├── test_password.py # 密码验证单元测试
|
||||||
|
│ │ └── test_cron.py # Cron 表达式验证单元测试
|
||||||
|
│ ├── property/
|
||||||
|
│ │ ├── test_crypto_props.py # Property 1
|
||||||
|
│ │ ├── test_auth_props.py # Property 2-6
|
||||||
|
│ │ ├── test_account_props.py # Property 7-12
|
||||||
|
│ │ ├── test_task_props.py # Property 13-18
|
||||||
|
│ │ ├── test_signin_props.py # Property 19-22
|
||||||
|
│ │ └── test_log_props.py # Property 23-26
|
||||||
|
│ └── integration/
|
||||||
|
│ ├── test_auth_flow.py # 完整认证流程
|
||||||
|
│ ├── test_account_flow.py # 账号管理流程
|
||||||
|
│ └── test_signin_flow.py # 签到执行流程
|
||||||
|
```
|
||||||
138
.kiro/specs/multi-user-signin/requirements.md
Normal file
138
.kiro/specs/multi-user-signin/requirements.md
Normal file
@@ -0,0 +1,138 @@
|
|||||||
|
# 需求文档:Weibo-HotSign 多用户签到系统
|
||||||
|
|
||||||
|
## 简介
|
||||||
|
|
||||||
|
Weibo-HotSign 是一个分布式微博超话自动签到系统,采用微服务架构(FastAPI + Celery + MySQL + Redis)。本需求文档覆盖系统的五大核心模块:用户认证(含 Token 刷新)、微博账号管理、定时签到任务配置、签到执行引擎、以及整体架构重构。目标是将当前分散的、含大量 Mock 实现的代码库重构为一个真正可运行的、模块间紧密集成的生产级系统。
|
||||||
|
|
||||||
|
## 术语表
|
||||||
|
|
||||||
|
- **System**: 指 Weibo-HotSign 后端系统整体
|
||||||
|
- **Auth_Service**: 用户认证与授权服务(`backend/auth_service`)
|
||||||
|
- **API_Service**: API 网关与账号/任务管理服务(`backend/api_service`)
|
||||||
|
- **Task_Scheduler**: 基于 Celery Beat 的定时任务调度服务(`backend/task_scheduler`)
|
||||||
|
- **Signin_Executor**: 签到执行 Worker 服务(`backend/signin_executor`)
|
||||||
|
- **User**: 使用本系统的注册用户
|
||||||
|
- **Weibo_Account**: 用户绑定到系统中的微博账号,以 Cookie 形式存储凭证
|
||||||
|
- **Task**: 用户为某个 Weibo_Account 配置的定时签到任务
|
||||||
|
- **Cookie**: 微博网站的登录凭证,用于模拟已登录状态
|
||||||
|
- **Cron_Expression**: 标准 Cron 表达式,用于定义任务调度时间
|
||||||
|
- **Signin_Log**: 每次签到执行的结果记录
|
||||||
|
- **JWT**: JSON Web Token,用于用户身份认证
|
||||||
|
- **Refresh_Token**: 用于在 Access Token 过期后获取新 Token 的长期凭证
|
||||||
|
- **AES-256-GCM**: 对称加密算法,用于加密存储 Cookie
|
||||||
|
|
||||||
|
## 需求
|
||||||
|
|
||||||
|
### 需求 1:用户认证与 Token 管理
|
||||||
|
|
||||||
|
**用户故事:** 作为用户,我希望能够注册、登录并安全地维持会话,以便长期使用系统而无需频繁重新登录。
|
||||||
|
|
||||||
|
#### 验收标准
|
||||||
|
|
||||||
|
1. WHEN 用户提交有效的注册信息(用户名、邮箱、密码),THE Auth_Service SHALL 创建用户账户并返回用户信息
|
||||||
|
2. WHEN 用户提交的用户名或邮箱已存在,THE Auth_Service SHALL 返回 409 Conflict 错误并指明冲突字段
|
||||||
|
3. WHEN 用户提交有效的邮箱和密码进行登录,THE Auth_Service SHALL 返回包含 Access Token 和 Refresh Token 的认证响应
|
||||||
|
4. WHEN 用户提交无效的邮箱或密码进行登录,THE Auth_Service SHALL 返回 401 Unauthorized 错误
|
||||||
|
5. WHEN 用户携带有效的 Refresh Token 请求刷新,THE Auth_Service SHALL 返回新的 Access Token 和新的 Refresh Token
|
||||||
|
6. WHEN 用户携带过期或无效的 Refresh Token 请求刷新,THE Auth_Service SHALL 返回 401 Unauthorized 错误
|
||||||
|
7. WHEN 用户携带有效的 Access Token 请求 `/auth/me`,THE Auth_Service SHALL 返回当前用户的完整信息
|
||||||
|
8. IF 用户密码不满足强度要求(至少8位,含大小写字母、数字和特殊字符),THEN THE Auth_Service SHALL 返回 400 Bad Request 并说明密码强度不足
|
||||||
|
|
||||||
|
### 需求 2:微博账号管理
|
||||||
|
|
||||||
|
**用户故事:** 作为用户,我希望能够添加、查看、更新和删除我的微博账号,以便集中管理多个微博账号的签到。
|
||||||
|
|
||||||
|
#### 验收标准
|
||||||
|
|
||||||
|
1. WHEN 用户提交微博 Cookie 和备注信息,THE API_Service SHALL 使用 AES-256-GCM 加密 Cookie 后存储,并返回新创建的账号信息
|
||||||
|
2. WHEN 用户请求获取账号列表,THE API_Service SHALL 返回该用户拥有的所有 Weibo_Account(不包含解密后的 Cookie)
|
||||||
|
3. WHEN 用户请求获取单个账号详情,THE API_Service SHALL 返回该账号的状态、备注和最近签到信息
|
||||||
|
4. WHEN 用户请求更新账号的备注或 Cookie,THE API_Service SHALL 更新对应字段并返回更新后的账号信息
|
||||||
|
5. WHEN 用户请求删除一个账号,THE API_Service SHALL 级联删除该账号关联的所有 Task 和 Signin_Log,并返回成功响应
|
||||||
|
6. IF 用户尝试操作不属于自己的账号,THEN THE API_Service SHALL 返回 403 Forbidden 错误
|
||||||
|
7. WHEN 账号被创建时,THE API_Service SHALL 将账号状态初始化为 "pending"
|
||||||
|
8. THE API_Service SHALL 对所有账号管理接口要求有效的 JWT Access Token 认证
|
||||||
|
|
||||||
|
### 需求 3:Cookie 加密与验证
|
||||||
|
|
||||||
|
**用户故事:** 作为用户,我希望我的微博 Cookie 被安全存储,并且系统能自动检测 Cookie 是否失效,以便我及时更新。
|
||||||
|
|
||||||
|
#### 验收标准
|
||||||
|
|
||||||
|
1. WHEN 存储 Cookie 时,THE API_Service SHALL 使用 AES-256-GCM 算法加密,并将密文和 IV 分别存储到 `encrypted_cookies` 和 `iv` 字段
|
||||||
|
2. WHEN 读取 Cookie 用于签到时,THE Signin_Executor SHALL 使用对应的 IV 解密 Cookie 并还原为原始字符串
|
||||||
|
3. FOR ALL 有效的 Cookie 字符串,加密后再解密 SHALL 产生与原始字符串完全相同的结果(Round-trip 属性)
|
||||||
|
4. IF 解密过程中发生错误(密钥不匹配、数据损坏),THEN THE System SHALL 将账号状态标记为 "invalid_cookie" 并记录错误日志
|
||||||
|
|
||||||
|
### 需求 4:定时签到任务配置
|
||||||
|
|
||||||
|
**用户故事:** 作为用户,我希望能够为每个微博账号配置独立的签到时间计划,以便灵活控制签到频率和时间。
|
||||||
|
|
||||||
|
#### 验收标准
|
||||||
|
|
||||||
|
1. WHEN 用户为某个账号创建签到任务并提供有效的 Cron_Expression,THE API_Service SHALL 创建任务记录并将任务注册到 Task_Scheduler
|
||||||
|
2. WHEN 用户提交无效的 Cron_Expression,THE API_Service SHALL 返回 400 Bad Request 并说明表达式格式错误
|
||||||
|
3. WHEN 用户请求获取某个账号的任务列表,THE API_Service SHALL 返回该账号关联的所有 Task 及其启用状态
|
||||||
|
4. WHEN 用户启用或禁用一个任务,THE API_Service SHALL 更新数据库中的 `is_enabled` 字段,并同步更新 Task_Scheduler 中的调度状态
|
||||||
|
5. WHEN 用户删除一个任务,THE API_Service SHALL 从数据库删除任务记录,并从 Task_Scheduler 中移除对应的调度
|
||||||
|
6. IF 用户尝试为不属于自己的账号创建任务,THEN THE API_Service SHALL 返回 403 Forbidden 错误
|
||||||
|
|
||||||
|
### 需求 5:任务调度引擎
|
||||||
|
|
||||||
|
**用户故事:** 作为系统,我需要根据用户配置的 Cron 表达式准时触发签到任务,以确保签到按时执行。
|
||||||
|
|
||||||
|
#### 验收标准
|
||||||
|
|
||||||
|
1. WHEN Task_Scheduler 启动时,THE Task_Scheduler SHALL 从数据库加载所有 `is_enabled=True` 的任务并注册到 Celery Beat 调度器
|
||||||
|
2. WHEN Celery Beat 根据 Cron_Expression 触发一个任务,THE Task_Scheduler SHALL 向消息队列发送包含 `task_id` 和 `account_id` 的签到消息
|
||||||
|
3. WHEN 新任务被创建或现有任务被更新,THE Task_Scheduler SHALL 动态更新 Celery Beat 的调度配置而无需重启服务
|
||||||
|
4. IF 任务执行失败,THEN THE Task_Scheduler SHALL 按照配置的重试策略(最多3次,间隔60秒)进行重试
|
||||||
|
5. WHILE Task_Scheduler 运行中,THE Task_Scheduler SHALL 使用 Redis 分布式锁确保同一任务不会被重复调度
|
||||||
|
|
||||||
|
### 需求 6:签到执行引擎
|
||||||
|
|
||||||
|
**用户故事:** 作为系统,我需要真正执行微博超话签到操作,并将结果持久化到数据库,以替代当前的 Mock 实现。
|
||||||
|
|
||||||
|
#### 验收标准
|
||||||
|
|
||||||
|
1. WHEN Signin_Executor 从消息队列接收到签到任务,THE Signin_Executor SHALL 从数据库查询对应的 Weibo_Account 信息(替代 Mock 数据)
|
||||||
|
2. WHEN 执行签到前,THE Signin_Executor SHALL 解密 Cookie 并验证其有效性
|
||||||
|
3. WHEN Cookie 有效时,THE Signin_Executor SHALL 获取该账号关注的超话列表并逐一执行签到
|
||||||
|
4. WHEN 单个超话签到完成后,THE Signin_Executor SHALL 将结果(成功/失败/已签到、奖励信息、错误信息)写入 `signin_logs` 表
|
||||||
|
5. IF Cookie 已失效,THEN THE Signin_Executor SHALL 将账号状态更新为 "invalid_cookie" 并终止该账号的签到流程
|
||||||
|
6. IF 签到过程中遇到网络错误,THEN THE Signin_Executor SHALL 记录错误日志并将该超话的签到状态标记为 "failed_network"
|
||||||
|
|
||||||
|
### 需求 7:反爬虫防护
|
||||||
|
|
||||||
|
**用户故事:** 作为系统,我需要在执行签到时采取反爬虫措施,以降低被微博风控系统检测和封禁的风险。
|
||||||
|
|
||||||
|
#### 验收标准
|
||||||
|
|
||||||
|
1. WHEN 执行签到请求时,THE Signin_Executor SHALL 在每次请求之间插入随机延迟(1-3秒可配置)
|
||||||
|
2. WHEN 构造 HTTP 请求时,THE Signin_Executor SHALL 从预定义的 User-Agent 列表中随机选择一个
|
||||||
|
3. WHEN 代理池服务可用时,THE Signin_Executor SHALL 为每次签到请求分配一个代理 IP
|
||||||
|
4. IF 代理池服务不可用,THEN THE Signin_Executor SHALL 使用直连方式继续执行签到并记录警告日志
|
||||||
|
|
||||||
|
### 需求 8:签到日志与查询
|
||||||
|
|
||||||
|
**用户故事:** 作为用户,我希望能够查看每个微博账号的签到历史记录,以便了解签到执行情况。
|
||||||
|
|
||||||
|
#### 验收标准
|
||||||
|
|
||||||
|
1. WHEN 用户请求查看某个账号的签到日志,THE API_Service SHALL 返回按时间倒序排列的 Signin_Log 列表
|
||||||
|
2. WHEN 用户请求签到日志时提供分页参数,THE API_Service SHALL 返回对应页码的日志数据和总记录数
|
||||||
|
3. WHEN 用户请求签到日志时提供状态过滤参数,THE API_Service SHALL 仅返回匹配该状态的日志记录
|
||||||
|
4. THE API_Service SHALL 对签到日志查询接口要求有效的 JWT Access Token 认证
|
||||||
|
5. IF 用户尝试查看不属于自己账号的签到日志,THEN THE API_Service SHALL 返回 403 Forbidden 错误
|
||||||
|
|
||||||
|
### 需求 9:统一 API 响应格式与错误处理
|
||||||
|
|
||||||
|
**用户故事:** 作为 API 消费者,我希望所有接口返回统一格式的响应,以便前端能够一致地处理成功和错误情况。
|
||||||
|
|
||||||
|
#### 验收标准
|
||||||
|
|
||||||
|
1. THE API_Service SHALL 对所有成功响应返回 `{"success": true, "data": ..., "message": ...}` 格式
|
||||||
|
2. THE API_Service SHALL 对所有错误响应返回 `{"success": false, "data": null, "message": ..., "error": {"code": ..., "details": [...]}}` 格式
|
||||||
|
3. WHEN 请求参数校验失败,THE API_Service SHALL 返回 400 状态码,并在 `error.details` 中列出每个字段的具体错误
|
||||||
|
4. WHEN 未认证用户访问受保护接口,THE API_Service SHALL 返回 401 状态码和标准错误响应
|
||||||
|
|
||||||
185
.kiro/specs/multi-user-signin/tasks.md
Normal file
185
.kiro/specs/multi-user-signin/tasks.md
Normal file
@@ -0,0 +1,185 @@
|
|||||||
|
# 实现计划:Weibo-HotSign 多用户签到系统
|
||||||
|
|
||||||
|
## 概述
|
||||||
|
|
||||||
|
按照自底向上的顺序实现:先构建共享基础层,再逐步实现各微服务。每个阶段包含核心实现和对应的测试任务。
|
||||||
|
|
||||||
|
## Tasks
|
||||||
|
|
||||||
|
- [x] 1. 创建共享模块 (shared/)
|
||||||
|
- [x] 1.1 创建 `backend/shared/` 包结构和共享配置
|
||||||
|
- 创建 `shared/__init__.py`、`shared/config.py`
|
||||||
|
- 配置项包括 DATABASE_URL、REDIS_URL、JWT_SECRET_KEY、COOKIE_ENCRYPTION_KEY
|
||||||
|
- 使用 pydantic-settings 从环境变量加载
|
||||||
|
- _Requirements: 10.1, 10.2_
|
||||||
|
- [x] 1.2 创建共享 ORM 模型和数据库连接管理
|
||||||
|
- 创建 `shared/models/base.py`:AsyncEngine、AsyncSessionLocal、Base、get_db()
|
||||||
|
- 创建 `shared/models/user.py`:User 模型(含 accounts relationship)
|
||||||
|
- 创建 `shared/models/account.py`:Account 模型(含 tasks、signin_logs relationship)
|
||||||
|
- 创建 `shared/models/task.py`:Task 模型
|
||||||
|
- 创建 `shared/models/signin_log.py`:SigninLog 模型
|
||||||
|
- 所有模型与 `init-db.sql` 中的表结构对齐
|
||||||
|
- _Requirements: 10.1, 10.2, 10.3_
|
||||||
|
- [x] 1.3 实现 Cookie 加密/解密工具 (`shared/crypto.py`)
|
||||||
|
- 使用 pycryptodome 实现 AES-256-GCM 加密/解密
|
||||||
|
- `encrypt_cookie(plaintext, key) -> (ciphertext_b64, iv_b64)`
|
||||||
|
- `decrypt_cookie(ciphertext_b64, iv_b64, key) -> plaintext`
|
||||||
|
- 密钥从环境变量 COOKIE_ENCRYPTION_KEY 派生(使用 SHA-256 哈希为32字节)
|
||||||
|
- _Requirements: 3.1, 3.2, 10.4_
|
||||||
|
- [ ]* 1.4 编写 Cookie 加密 Round-trip 属性测试
|
||||||
|
- **Property 1: Cookie 加密 Round-trip**
|
||||||
|
- 使用 hypothesis 生成随机字符串,验证 encrypt 后 decrypt 还原
|
||||||
|
- **Validates: Requirements 3.1, 3.2, 3.3**
|
||||||
|
- [x] 1.5 实现统一响应格式工具 (`shared/response.py`)
|
||||||
|
- `success_response(data, message)` 返回标准成功格式
|
||||||
|
- `error_response(message, code, details, status_code)` 返回标准错误格式
|
||||||
|
- _Requirements: 9.1, 9.2_
|
||||||
|
|
||||||
|
- [x] 2. 重构 Auth_Service(Token 刷新机制)
|
||||||
|
- [x] 2.1 重构 Auth_Service 使用 shared 模块
|
||||||
|
- 修改 `auth_service/app/main.py` 导入 shared models 和 get_db
|
||||||
|
- 删除 `auth_service/app/models/database.py` 中的重复 User 模型定义
|
||||||
|
- 更新 `auth_service/app/services/auth_service.py` 使用 shared User 模型
|
||||||
|
- _Requirements: 10.3_
|
||||||
|
- [x] 2.2 实现 Refresh Token 机制
|
||||||
|
- 在 `auth_service/app/utils/security.py` 中添加 `create_refresh_token()` 和 `verify_refresh_token()`
|
||||||
|
- Refresh Token 使用 Redis 存储(key: `refresh_token:{hash}`, value: `user_id`, TTL: 7天)
|
||||||
|
- 登录接口返回 access_token + refresh_token
|
||||||
|
- 实现 `/auth/refresh` 端点:验证旧 token → 删除旧 token → 生成新 token 对(Token Rotation)
|
||||||
|
- 更新 `auth_service/app/schemas/user.py` 添加 RefreshToken 相关 schema
|
||||||
|
- _Requirements: 1.3, 1.5, 1.6_
|
||||||
|
- [x] 2.3 为 Auth_Service 所有响应应用统一格式
|
||||||
|
- 注册、登录、刷新、获取用户信息接口使用 `shared/response.py` 格式化响应
|
||||||
|
- 注册全局异常处理器(HTTPException、RequestValidationError)
|
||||||
|
- _Requirements: 9.1, 9.2, 9.3, 9.4_
|
||||||
|
- [ ]* 2.4 编写认证流程属性测试
|
||||||
|
- **Property 2: 用户注册后可登录获取信息**
|
||||||
|
- **Property 3: 用户名/邮箱唯一性约束**
|
||||||
|
- **Property 4: 无效凭证登录拒绝**
|
||||||
|
- **Property 5: Refresh Token 轮换**
|
||||||
|
- **Property 6: 弱密码拒绝**
|
||||||
|
- **Validates: Requirements 1.1-1.8**
|
||||||
|
|
||||||
|
- [x] 3. Checkpoint - 确保共享模块和认证服务测试通过
|
||||||
|
- 运行所有测试,确认 shared 模块和 auth_service 工作正常
|
||||||
|
- 如有问题请向用户确认
|
||||||
|
|
||||||
|
- [x] 4. 实现 API_Service(账号管理)
|
||||||
|
- [x] 4.1 创建 API_Service 基础结构
|
||||||
|
- 创建 `api_service/app/__init__.py`、`main.py`、`config.py`、`dependencies.py`
|
||||||
|
- `main.py`:FastAPI 应用,注册 CORS、全局异常处理器、路由
|
||||||
|
- `dependencies.py`:JWT 认证依赖(`get_current_user`),复用 shared 的 JWT 验证逻辑
|
||||||
|
- _Requirements: 2.8, 9.1, 9.2, 9.3, 9.4_
|
||||||
|
- [x] 4.2 实现微博账号 CRUD 路由
|
||||||
|
- 创建 `api_service/app/schemas/account.py`:AccountCreate、AccountUpdate、AccountResponse
|
||||||
|
- 创建 `api_service/app/routers/accounts.py`:
|
||||||
|
- `POST /api/v1/accounts`:加密 Cookie 后存储,状态初始化为 "pending"
|
||||||
|
- `GET /api/v1/accounts`:返回当前用户的账号列表(不含 Cookie 明文)
|
||||||
|
- `GET /api/v1/accounts/{id}`:返回账号详情
|
||||||
|
- `PUT /api/v1/accounts/{id}`:更新备注或 Cookie(更新 Cookie 时重新加密)
|
||||||
|
- `DELETE /api/v1/accounts/{id}`:删除账号(级联删除 tasks 和 logs)
|
||||||
|
- 所有接口验证资源归属(user_id 匹配)
|
||||||
|
- _Requirements: 2.1-2.8_
|
||||||
|
- [ ]* 4.3 编写账号管理属性测试
|
||||||
|
- **Property 7: 账号创建与列表一致性**
|
||||||
|
- **Property 8: 账号详情 Round-trip**
|
||||||
|
- **Property 9: 账号更新反映**
|
||||||
|
- **Property 10: 账号删除级联**
|
||||||
|
- **Property 11: 跨用户资源隔离**
|
||||||
|
- **Property 12: 受保护接口认证要求**
|
||||||
|
- **Validates: Requirements 2.1-2.8, 4.6, 8.5, 8.4, 9.4**
|
||||||
|
|
||||||
|
- [-] 5. 实现 API_Service(任务配置)
|
||||||
|
- [x] 5.1 实现签到任务 CRUD 路由
|
||||||
|
- 创建 `api_service/app/schemas/task.py`:TaskCreate、TaskUpdate、TaskResponse
|
||||||
|
- 创建 `api_service/app/routers/tasks.py`:
|
||||||
|
- `POST /api/v1/accounts/{id}/tasks`:验证 Cron 表达式有效性,创建任务
|
||||||
|
- `GET /api/v1/accounts/{id}/tasks`:获取账号的任务列表
|
||||||
|
- `PUT /api/v1/tasks/{id}`:更新任务(启用/禁用)
|
||||||
|
- `DELETE /api/v1/tasks/{id}`:删除任务
|
||||||
|
- 使用 `croniter` 库验证 Cron 表达式
|
||||||
|
- 任务创建/更新/删除时通过 Redis pub/sub 通知 Task_Scheduler
|
||||||
|
- _Requirements: 4.1-4.6_
|
||||||
|
- [ ]* 5.2 编写任务配置属性测试
|
||||||
|
- **Property 13: 有效 Cron 表达式创建任务**
|
||||||
|
- **Property 14: 无效 Cron 表达式拒绝**
|
||||||
|
- **Property 15: 任务启用/禁用切换**
|
||||||
|
- **Property 16: 任务删除**
|
||||||
|
- **Validates: Requirements 4.1-4.6**
|
||||||
|
|
||||||
|
- [x] 6. 实现 API_Service(签到日志查询)
|
||||||
|
- [x] 6.1 实现签到日志查询路由
|
||||||
|
- 创建 `api_service/app/schemas/signin_log.py`:SigninLogResponse、PaginatedResponse
|
||||||
|
- 创建 `api_service/app/routers/signin_logs.py`:
|
||||||
|
- `GET /api/v1/accounts/{id}/signin-logs`:支持分页(page, size)和状态过滤(status)
|
||||||
|
- 返回按 `signed_at` 降序排列的日志
|
||||||
|
- 返回总记录数用于前端分页
|
||||||
|
- 验证账号归属权限
|
||||||
|
- _Requirements: 8.1-8.5_
|
||||||
|
- [ ]* 6.2 编写签到日志查询属性测试
|
||||||
|
- **Property 23: 签到日志时间倒序**
|
||||||
|
- **Property 24: 签到日志分页**
|
||||||
|
- **Property 25: 签到日志状态过滤**
|
||||||
|
- **Validates: Requirements 8.1-8.3**
|
||||||
|
|
||||||
|
- [ ] 7. Checkpoint - 确保 API_Service 所有测试通过
|
||||||
|
- 运行所有测试,确认账号管理、任务配置、日志查询功能正常
|
||||||
|
- 如有问题请向用户确认
|
||||||
|
|
||||||
|
- [ ] 8. 重构 Task_Scheduler(真实数据库交互)
|
||||||
|
- [ ] 8.1 重构 Task_Scheduler 使用 shared 模块
|
||||||
|
- 修改 `task_scheduler/app/celery_app.py` 导入 shared models
|
||||||
|
- 实现 `load_scheduled_tasks()`:从 DB 查询 `is_enabled=True` 的 Task,动态注册到 Celery Beat
|
||||||
|
- 实现 Redis pub/sub 监听:接收任务变更通知,动态更新调度
|
||||||
|
- 替换 `signin_tasks.py` 中的 mock 账号列表为真实 DB 查询
|
||||||
|
- _Requirements: 5.1, 5.2, 5.3_
|
||||||
|
- [ ] 8.2 实现分布式锁和重试机制
|
||||||
|
- 使用 Redis SETNX 实现分布式锁,防止同一任务重复调度
|
||||||
|
- 配置 Celery 任务重试:`max_retries=3`、`default_retry_delay=60`
|
||||||
|
- _Requirements: 5.4, 5.5_
|
||||||
|
- [ ]* 8.3 编写调度器属性测试
|
||||||
|
- **Property 17: 调度器加载已启用任务**
|
||||||
|
- **Property 18: 分布式锁防重复调度**
|
||||||
|
- **Validates: Requirements 5.1, 5.5**
|
||||||
|
|
||||||
|
- [ ] 9. 重构 Signin_Executor(真实数据库交互)
|
||||||
|
- [ ] 9.1 重构 Signin_Executor 使用 shared 模块
|
||||||
|
- 修改 `signin_service.py` 中的 `_get_account_info()` 从 DB 查询真实 Account 数据
|
||||||
|
- 修改 `weibo_client.py` 中的 `_decrypt_cookies()` 使用 `shared/crypto.py`
|
||||||
|
- 实现签到结果写入 `signin_logs` 表(替代 mock)
|
||||||
|
- 实现 Cookie 失效时更新 `account.status = "invalid_cookie"`
|
||||||
|
- _Requirements: 6.1, 6.2, 6.4, 6.5_
|
||||||
|
- [ ] 9.2 实现反爬虫防护模块
|
||||||
|
- 实现随机延迟函数:返回 `[min, max]` 范围内的随机值
|
||||||
|
- 实现 User-Agent 轮换:从预定义列表中随机选择
|
||||||
|
- 实现代理池集成:调用 proxy pool 服务获取代理,不可用时降级为直连
|
||||||
|
- _Requirements: 7.1, 7.2, 7.3, 7.4_
|
||||||
|
- [ ]* 9.3 编写签到执行属性测试
|
||||||
|
- **Property 19: 签到结果持久化**
|
||||||
|
- **Property 20: Cookie 失效时更新账号状态**
|
||||||
|
- **Property 21: 随机延迟范围**
|
||||||
|
- **Property 22: User-Agent 来源**
|
||||||
|
- **Validates: Requirements 6.1, 6.4, 6.5, 7.1, 7.2**
|
||||||
|
|
||||||
|
- [ ] 10. 更新 Dockerfile 和集成配置
|
||||||
|
- [ ] 10.1 更新 `backend/Dockerfile`
|
||||||
|
- 在每个构建阶段添加 `COPY shared/ ./shared/`
|
||||||
|
- 确保 shared 模块在所有服务容器中可用
|
||||||
|
- _Requirements: 10.1, 10.3_
|
||||||
|
- [ ] 10.2 更新 `backend/requirements.txt`
|
||||||
|
- 添加 `croniter`(Cron 表达式解析)
|
||||||
|
- 添加 `hypothesis`(属性测试)
|
||||||
|
- 添加 `pytest`、`pytest-asyncio`(测试框架)
|
||||||
|
- 确认 `pycryptodome`、`redis`、`celery` 等已存在
|
||||||
|
|
||||||
|
- [ ] 11. 最终 Checkpoint - 全量测试
|
||||||
|
- 运行所有单元测试和属性测试
|
||||||
|
- 验证各服务可独立启动
|
||||||
|
- 如有问题请向用户确认
|
||||||
|
|
||||||
|
## 备注
|
||||||
|
|
||||||
|
- 标记 `*` 的任务为可选测试任务,可跳过以加快 MVP 进度
|
||||||
|
- 每个任务引用了具体的需求编号以确保可追溯性
|
||||||
|
- 属性测试使用 `hypothesis` 库,每个测试至少运行 100 次迭代
|
||||||
|
- Checkpoint 任务用于阶段性验证,确保增量开发的正确性
|
||||||
187
README.md
Normal file
187
README.md
Normal file
@@ -0,0 +1,187 @@
|
|||||||
|
# Weibo-HotSign - 微博超话智能签到系统
|
||||||
|
|
||||||
|
基于开发文档实现的分布式微博超话智能签到系统,具备多账户管理、高稳定性反爬虫、Web可视化管理等核心功能。
|
||||||
|
|
||||||
|
## 🏗️ 项目架构
|
||||||
|
|
||||||
|
本项目采用微服务架构,包含以下核心服务:
|
||||||
|
|
||||||
|
- **认证服务** (auth_service) - 用户注册、登录、JWT认证
|
||||||
|
- **API网关** (api_service) - 统一API入口和路由
|
||||||
|
- **任务调度** (task_scheduler) - 基于Celery Beat的定时任务
|
||||||
|
- **签到执行** (signin_executor) - 核心签到业务逻辑
|
||||||
|
- **浏览器自动化** (browser_automation_service) - 处理复杂JS加密
|
||||||
|
- **通知中心** (notification_hub) - 多渠道通知分发
|
||||||
|
- **前端应用** (frontend) - React可视化界面
|
||||||
|
|
||||||
|
## 🚀 快速启动
|
||||||
|
|
||||||
|
### 环境要求
|
||||||
|
- Docker & Docker Compose
|
||||||
|
- Python 3.11+
|
||||||
|
- Node.js 18+
|
||||||
|
|
||||||
|
### 启动步骤
|
||||||
|
|
||||||
|
1. **克隆项目**
|
||||||
|
```bash
|
||||||
|
cd d:/code/weibo
|
||||||
|
```
|
||||||
|
|
||||||
|
2. **启动所有服务**
|
||||||
|
```bash
|
||||||
|
docker-compose up -d
|
||||||
|
```
|
||||||
|
|
||||||
|
3. **查看服务状态**
|
||||||
|
```bash
|
||||||
|
docker-compose ps
|
||||||
|
```
|
||||||
|
|
||||||
|
4. **访问服务**
|
||||||
|
- 前端界面: http://localhost:3000
|
||||||
|
- API文档: http://localhost:8000/docs
|
||||||
|
- 认证服务: http://localhost:8001/docs
|
||||||
|
- 健康检查: http://localhost:8000/health
|
||||||
|
|
||||||
|
## 📋 已实现功能
|
||||||
|
|
||||||
|
### ✅ 认证服务 (auth_service)
|
||||||
|
- [x] 用户注册 (`POST /auth/register`)
|
||||||
|
- [x] 用户登录 (`POST /auth/login`)
|
||||||
|
- [x] JWT Token生成和验证
|
||||||
|
- [x] 密码强度验证和bcrypt哈希
|
||||||
|
- [x] CORS跨域支持
|
||||||
|
- [x] 数据库连接管理
|
||||||
|
- [x] 完整的错误处理和日志记录
|
||||||
|
|
||||||
|
### ✅ 任务调度服务 (task_scheduler)
|
||||||
|
- [x] Celery Beat定时任务调度
|
||||||
|
- [x] Cron表达式解析和动态任务加载
|
||||||
|
- [x] 任务队列管理 (Redis)
|
||||||
|
- [x] 任务重试和错误处理
|
||||||
|
- [x] 调用签到执行服务
|
||||||
|
|
||||||
|
### ✅ 签到执行服务 (signin_executor)
|
||||||
|
- [x] 微博超话签到核心逻辑
|
||||||
|
- [x] 动态IP代理池集成 (模拟)
|
||||||
|
- [x] 浏览器指纹模拟 (模拟)
|
||||||
|
- [x] Cookie管理和验证 (模拟)
|
||||||
|
- [x] 完整的签到工作流和状态管理
|
||||||
|
- [x] 反爬虫保护机制 (随机延迟)
|
||||||
|
|
||||||
|
### ✅ 基础设施
|
||||||
|
- [x] Docker容器化配置
|
||||||
|
- [x] PostgreSQL数据库初始化
|
||||||
|
- [x] Redis缓存配置
|
||||||
|
- [x] Nginx反向代理配置
|
||||||
|
- [x] 微服务网络通信
|
||||||
|
|
||||||
|
### 🔄 待实现功能
|
||||||
|
|
||||||
|
#### API网关服务 (api_service)
|
||||||
|
- 请求路由和负载均衡
|
||||||
|
- API组合和聚合
|
||||||
|
- 速率限制和熔断
|
||||||
|
|
||||||
|
#### 浏览器自动化服务 (browser_automation_service)
|
||||||
|
- Playwright无头浏览器
|
||||||
|
- JS加密参数逆向
|
||||||
|
- 网络请求拦截和提取
|
||||||
|
|
||||||
|
#### 前端React应用 (frontend)
|
||||||
|
- 用户登录注册界面
|
||||||
|
- 账号管理面板
|
||||||
|
- 任务配置界面
|
||||||
|
- 签到日志查看
|
||||||
|
- 实时状态监控
|
||||||
|
|
||||||
|
#### 通知中心服务 (notification_hub)
|
||||||
|
- 多渠道通知分发 (Server酱, Email等)
|
||||||
|
|
||||||
|
## 🛠️ 技术栈
|
||||||
|
|
||||||
|
### 后端
|
||||||
|
- **Web框架**: FastAPI (Python)
|
||||||
|
- **数据库**: PostgreSQL + SQLAlchemy
|
||||||
|
- **缓存**: Redis
|
||||||
|
- **任务队列**: Celery + Redis
|
||||||
|
- **认证**: JWT + bcrypt
|
||||||
|
- **浏览器自动化**: Playwright
|
||||||
|
|
||||||
|
### 前端
|
||||||
|
- **框架**: React 18 + Vite
|
||||||
|
- **状态管理**: Zustand
|
||||||
|
- **UI库**: Ant Design
|
||||||
|
- **HTTP客户端**: Axios
|
||||||
|
|
||||||
|
### 基础设施
|
||||||
|
- **容器化**: Docker + Docker Compose
|
||||||
|
- **反向代理**: Nginx
|
||||||
|
- **监控**: Prometheus + Grafana
|
||||||
|
- **日志**: ELK Stack
|
||||||
|
|
||||||
|
## 📊 数据库设计
|
||||||
|
|
||||||
|
系统包含以下核心数据表:
|
||||||
|
|
||||||
|
- `users` - 用户信息
|
||||||
|
- `accounts` - 微博账号管理
|
||||||
|
- `tasks` - 签到任务配置
|
||||||
|
- `signin_logs` - 签到历史记录
|
||||||
|
|
||||||
|
详细表结构见 `init-db.sql`
|
||||||
|
|
||||||
|
## 🔧 配置说明
|
||||||
|
|
||||||
|
### 环境变量
|
||||||
|
主要配置通过环境变量设置:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# 数据库
|
||||||
|
DATABASE_URL=postgresql+asyncpg://user:pass@postgres:5432/dbname
|
||||||
|
REDIS_URL=redis://redis:6379
|
||||||
|
|
||||||
|
# JWT
|
||||||
|
JWT_SECRET_KEY=your-super-secret-jwt-key
|
||||||
|
JWT_EXPIRATION_HOURS=24
|
||||||
|
|
||||||
|
# 应用
|
||||||
|
DEBUG=true
|
||||||
|
HOST=0.0.0.0
|
||||||
|
PORT=8000
|
||||||
|
```
|
||||||
|
|
||||||
|
## 📝 API规范
|
||||||
|
|
||||||
|
遵循RESTful设计规范:
|
||||||
|
|
||||||
|
- **协议**: HTTPS
|
||||||
|
- **数据格式**: JSON
|
||||||
|
- **认证**: Bearer Token (JWT)
|
||||||
|
- **版本控制**: URL路径 (`/api/v1/`)
|
||||||
|
- **通用响应结构**:
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"success": true,
|
||||||
|
"data": {...},
|
||||||
|
"message": "Operation successful",
|
||||||
|
"error": null
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## 🤝 贡献指南
|
||||||
|
|
||||||
|
1. Fork项目
|
||||||
|
2. 创建特性分支
|
||||||
|
3. 提交代码变更
|
||||||
|
4. 推送到分支
|
||||||
|
5. 创建Pull Request
|
||||||
|
|
||||||
|
## 📄 许可证
|
||||||
|
|
||||||
|
MIT License
|
||||||
|
|
||||||
|
## 🙏 致谢
|
||||||
|
|
||||||
|
感谢开发文档提供的详细技术规范和架构指导,本实现严格遵循文档中的各项技术要求。
|
||||||
91
backend/Dockerfile
Normal file
91
backend/Dockerfile
Normal file
@@ -0,0 +1,91 @@
|
|||||||
|
# Base stage for all Python services
|
||||||
|
FROM python:3.11-slim AS base
|
||||||
|
|
||||||
|
# Set working directory
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Install common system dependencies for MySQL
|
||||||
|
RUN apt-get update && apt-get install -y \
|
||||||
|
gcc \
|
||||||
|
default-libmysqlclient-dev \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Copy and install unified requirements
|
||||||
|
COPY requirements.txt .
|
||||||
|
RUN pip install --no-cache-dir -r requirements.txt
|
||||||
|
|
||||||
|
# Create non-root user for security
|
||||||
|
RUN groupadd -r appuser && useradd -r -g appuser appuser
|
||||||
|
|
||||||
|
|
||||||
|
# --- API Gateway Service Stage ---
|
||||||
|
FROM base AS api_gateway
|
||||||
|
|
||||||
|
# Copy application code
|
||||||
|
COPY api_service/app/ ./app/
|
||||||
|
|
||||||
|
# Switch to non-root user
|
||||||
|
USER appuser
|
||||||
|
|
||||||
|
# Expose port
|
||||||
|
EXPOSE 8000
|
||||||
|
|
||||||
|
# Health check
|
||||||
|
HEALTHCHECK --interval=30s --timeout=30s --start-period=5s --retries=3 \
|
||||||
|
CMD curl -f http://localhost:8000/health || exit 1
|
||||||
|
|
||||||
|
# Start application
|
||||||
|
CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"]
|
||||||
|
|
||||||
|
|
||||||
|
# --- Auth Service Stage ---
|
||||||
|
FROM base AS auth_service
|
||||||
|
|
||||||
|
# Copy application code
|
||||||
|
COPY auth_service/app/ ./app/
|
||||||
|
|
||||||
|
# Switch to non-root user
|
||||||
|
USER appuser
|
||||||
|
|
||||||
|
# Expose port
|
||||||
|
EXPOSE 8000
|
||||||
|
|
||||||
|
# Health check
|
||||||
|
HEALTHCHECK --interval=30s --timeout=30s --start-period=5s --retries=3 \
|
||||||
|
CMD curl -f http://localhost:8000/health || exit 1
|
||||||
|
|
||||||
|
# Start application
|
||||||
|
CMD ["python", "-m", "app.main"]
|
||||||
|
|
||||||
|
|
||||||
|
# --- Task Scheduler Service Stage ---
|
||||||
|
FROM base AS task_scheduler
|
||||||
|
|
||||||
|
# Copy application code
|
||||||
|
COPY task_scheduler/app/ ./app/
|
||||||
|
|
||||||
|
# Switch to non-root user
|
||||||
|
USER appuser
|
||||||
|
|
||||||
|
# Start Celery Beat scheduler
|
||||||
|
CMD ["celery", "-A", "app.celery_app", "beat", "--loglevel=info"]
|
||||||
|
|
||||||
|
|
||||||
|
# --- Sign-in Executor Service Stage ---
|
||||||
|
FROM base AS signin_executor
|
||||||
|
|
||||||
|
# Copy application code
|
||||||
|
COPY signin_executor/app/ ./app/
|
||||||
|
|
||||||
|
# Switch to non-root user
|
||||||
|
USER appuser
|
||||||
|
|
||||||
|
# Expose port
|
||||||
|
EXPOSE 8000
|
||||||
|
|
||||||
|
# Health check
|
||||||
|
HEALTHCHECK --interval=30s --timeout=30s --start-period=5s --retries=3 \
|
||||||
|
CMD curl -f http://localhost:8000/health || exit 1
|
||||||
|
|
||||||
|
# Start application
|
||||||
|
CMD ["python", "-m", "app.main"]
|
||||||
0
backend/api_service/__init__.py
Normal file
0
backend/api_service/__init__.py
Normal file
BIN
backend/api_service/__pycache__/__init__.cpython-311.pyc
Normal file
BIN
backend/api_service/__pycache__/__init__.cpython-311.pyc
Normal file
Binary file not shown.
0
backend/api_service/app/__init__.py
Normal file
0
backend/api_service/app/__init__.py
Normal file
BIN
backend/api_service/app/__pycache__/__init__.cpython-311.pyc
Normal file
BIN
backend/api_service/app/__pycache__/__init__.cpython-311.pyc
Normal file
Binary file not shown.
BIN
backend/api_service/app/__pycache__/dependencies.cpython-311.pyc
Normal file
BIN
backend/api_service/app/__pycache__/dependencies.cpython-311.pyc
Normal file
Binary file not shown.
BIN
backend/api_service/app/__pycache__/main.cpython-311.pyc
Normal file
BIN
backend/api_service/app/__pycache__/main.cpython-311.pyc
Normal file
Binary file not shown.
9
backend/api_service/app/config.py
Normal file
9
backend/api_service/app/config.py
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
"""
|
||||||
|
Configuration settings for API Service.
|
||||||
|
Re-uses shared settings; add API-specific overrides here if needed.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from shared.config import shared_settings
|
||||||
|
|
||||||
|
APP_NAME = "Weibo-HotSign API Service"
|
||||||
|
APP_VERSION = "1.0.0"
|
||||||
50
backend/api_service/app/dependencies.py
Normal file
50
backend/api_service/app/dependencies.py
Normal file
@@ -0,0 +1,50 @@
|
|||||||
|
"""
|
||||||
|
Shared dependencies for API Service routes.
|
||||||
|
Provides JWT-based authentication via get_current_user.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from fastapi import Depends, HTTPException, Security, status
|
||||||
|
from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
|
from sqlalchemy import select
|
||||||
|
|
||||||
|
from shared.models import get_db, User
|
||||||
|
from auth_service.app.utils.security import decode_access_token
|
||||||
|
|
||||||
|
security = HTTPBearer()
|
||||||
|
|
||||||
|
|
||||||
|
async def get_current_user(
|
||||||
|
credentials: HTTPAuthorizationCredentials = Security(security),
|
||||||
|
db: AsyncSession = Depends(get_db),
|
||||||
|
) -> User:
|
||||||
|
"""Validate JWT and return the current User ORM instance."""
|
||||||
|
payload = decode_access_token(credentials.credentials)
|
||||||
|
if payload is None:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||||
|
detail="Invalid or expired token",
|
||||||
|
)
|
||||||
|
|
||||||
|
user_id = payload.get("sub")
|
||||||
|
if not user_id:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||||
|
detail="Invalid token payload",
|
||||||
|
)
|
||||||
|
|
||||||
|
result = await db.execute(select(User).where(User.id == user_id))
|
||||||
|
user = result.scalar_one_or_none()
|
||||||
|
|
||||||
|
if user is None:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_404_NOT_FOUND,
|
||||||
|
detail="User not found",
|
||||||
|
)
|
||||||
|
if not user.is_active:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_403_FORBIDDEN,
|
||||||
|
detail="User account is deactivated",
|
||||||
|
)
|
||||||
|
|
||||||
|
return user
|
||||||
75
backend/api_service/app/main.py
Normal file
75
backend/api_service/app/main.py
Normal file
@@ -0,0 +1,75 @@
|
|||||||
|
"""
|
||||||
|
Weibo-HotSign API Service
|
||||||
|
Main FastAPI application entry point — account management, task config, signin logs.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from fastapi import FastAPI, Request
|
||||||
|
from fastapi.exceptions import RequestValidationError
|
||||||
|
from fastapi.middleware.cors import CORSMiddleware
|
||||||
|
from starlette.exceptions import HTTPException as StarletteHTTPException
|
||||||
|
|
||||||
|
from shared.response import success_response, error_response
|
||||||
|
from api_service.app.routers import accounts, tasks, signin_logs
|
||||||
|
|
||||||
|
app = FastAPI(
|
||||||
|
title="Weibo-HotSign API Service",
|
||||||
|
version="1.0.0",
|
||||||
|
docs_url="/docs",
|
||||||
|
redoc_url="/redoc",
|
||||||
|
)
|
||||||
|
|
||||||
|
# CORS
|
||||||
|
app.add_middleware(
|
||||||
|
CORSMiddleware,
|
||||||
|
allow_origins=["http://localhost:3000", "http://localhost:80"],
|
||||||
|
allow_credentials=True,
|
||||||
|
allow_methods=["*"],
|
||||||
|
allow_headers=["*"],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# ---- Global exception handlers (unified response format) ----
|
||||||
|
|
||||||
|
@app.exception_handler(StarletteHTTPException)
|
||||||
|
async def http_exception_handler(request: Request, exc: StarletteHTTPException):
|
||||||
|
return error_response(
|
||||||
|
exc.detail,
|
||||||
|
f"HTTP_{exc.status_code}",
|
||||||
|
status_code=exc.status_code,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@app.exception_handler(RequestValidationError)
|
||||||
|
async def validation_exception_handler(request: Request, exc: RequestValidationError):
|
||||||
|
details = [
|
||||||
|
{"field": e["loc"][-1] if e["loc"] else "unknown", "message": e["msg"]}
|
||||||
|
for e in exc.errors()
|
||||||
|
]
|
||||||
|
return error_response(
|
||||||
|
"Validation failed",
|
||||||
|
"VALIDATION_ERROR",
|
||||||
|
details=details,
|
||||||
|
status_code=400,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# ---- Routers ----
|
||||||
|
|
||||||
|
app.include_router(accounts.router)
|
||||||
|
app.include_router(tasks.router)
|
||||||
|
app.include_router(signin_logs.router)
|
||||||
|
|
||||||
|
|
||||||
|
# ---- Health / root ----
|
||||||
|
|
||||||
|
@app.get("/")
|
||||||
|
async def root():
|
||||||
|
return success_response(
|
||||||
|
{"service": "Weibo-HotSign API Service", "version": "1.0.0"},
|
||||||
|
"Service is running",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@app.get("/health")
|
||||||
|
async def health_check():
|
||||||
|
return success_response({"status": "healthy"})
|
||||||
0
backend/api_service/app/routers/__init__.py
Normal file
0
backend/api_service/app/routers/__init__.py
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
139
backend/api_service/app/routers/accounts.py
Normal file
139
backend/api_service/app/routers/accounts.py
Normal file
@@ -0,0 +1,139 @@
|
|||||||
|
"""
|
||||||
|
Weibo Account CRUD router.
|
||||||
|
All endpoints require JWT authentication and enforce resource ownership.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from fastapi import APIRouter, Depends, HTTPException, status
|
||||||
|
from sqlalchemy import select
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
|
from sqlalchemy.orm import selectinload
|
||||||
|
|
||||||
|
from shared.models import get_db, Account, User
|
||||||
|
from shared.crypto import encrypt_cookie, decrypt_cookie, derive_key
|
||||||
|
from shared.config import shared_settings
|
||||||
|
from shared.response import success_response, error_response
|
||||||
|
from api_service.app.dependencies import get_current_user
|
||||||
|
from api_service.app.schemas.account import (
|
||||||
|
AccountCreate,
|
||||||
|
AccountUpdate,
|
||||||
|
AccountResponse,
|
||||||
|
)
|
||||||
|
|
||||||
|
router = APIRouter(prefix="/api/v1/accounts", tags=["accounts"])
|
||||||
|
|
||||||
|
|
||||||
|
def _encryption_key() -> bytes:
|
||||||
|
return derive_key(shared_settings.COOKIE_ENCRYPTION_KEY)
|
||||||
|
|
||||||
|
|
||||||
|
def _account_to_dict(account: Account) -> dict:
|
||||||
|
return AccountResponse.model_validate(account).model_dump(mode="json")
|
||||||
|
|
||||||
|
|
||||||
|
async def _get_owned_account(
|
||||||
|
account_id: str,
|
||||||
|
user: User,
|
||||||
|
db: AsyncSession,
|
||||||
|
) -> Account:
|
||||||
|
"""Fetch an account and verify it belongs to the current user."""
|
||||||
|
result = await db.execute(select(Account).where(Account.id == account_id))
|
||||||
|
account = result.scalar_one_or_none()
|
||||||
|
if account is None:
|
||||||
|
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Account not found")
|
||||||
|
if account.user_id != user.id:
|
||||||
|
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Access denied")
|
||||||
|
return account
|
||||||
|
|
||||||
|
|
||||||
|
# ---- CREATE ----
|
||||||
|
|
||||||
|
@router.post("", status_code=status.HTTP_201_CREATED)
|
||||||
|
async def create_account(
|
||||||
|
body: AccountCreate,
|
||||||
|
user: User = Depends(get_current_user),
|
||||||
|
db: AsyncSession = Depends(get_db),
|
||||||
|
):
|
||||||
|
key = _encryption_key()
|
||||||
|
ciphertext, iv = encrypt_cookie(body.cookie, key)
|
||||||
|
|
||||||
|
account = Account(
|
||||||
|
user_id=user.id,
|
||||||
|
weibo_user_id=body.weibo_user_id,
|
||||||
|
remark=body.remark,
|
||||||
|
encrypted_cookies=ciphertext,
|
||||||
|
iv=iv,
|
||||||
|
status="pending",
|
||||||
|
)
|
||||||
|
db.add(account)
|
||||||
|
await db.commit()
|
||||||
|
await db.refresh(account)
|
||||||
|
|
||||||
|
return success_response(_account_to_dict(account), "Account created")
|
||||||
|
|
||||||
|
|
||||||
|
# ---- LIST ----
|
||||||
|
|
||||||
|
@router.get("")
|
||||||
|
async def list_accounts(
|
||||||
|
user: User = Depends(get_current_user),
|
||||||
|
db: AsyncSession = Depends(get_db),
|
||||||
|
):
|
||||||
|
result = await db.execute(
|
||||||
|
select(Account).where(Account.user_id == user.id)
|
||||||
|
)
|
||||||
|
accounts = result.scalars().all()
|
||||||
|
return success_response(
|
||||||
|
[_account_to_dict(a) for a in accounts],
|
||||||
|
"Accounts retrieved",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# ---- DETAIL ----
|
||||||
|
|
||||||
|
@router.get("/{account_id}")
|
||||||
|
async def get_account(
|
||||||
|
account_id: str,
|
||||||
|
user: User = Depends(get_current_user),
|
||||||
|
db: AsyncSession = Depends(get_db),
|
||||||
|
):
|
||||||
|
account = await _get_owned_account(account_id, user, db)
|
||||||
|
return success_response(_account_to_dict(account), "Account retrieved")
|
||||||
|
|
||||||
|
|
||||||
|
# ---- UPDATE ----
|
||||||
|
|
||||||
|
@router.put("/{account_id}")
|
||||||
|
async def update_account(
|
||||||
|
account_id: str,
|
||||||
|
body: AccountUpdate,
|
||||||
|
user: User = Depends(get_current_user),
|
||||||
|
db: AsyncSession = Depends(get_db),
|
||||||
|
):
|
||||||
|
account = await _get_owned_account(account_id, user, db)
|
||||||
|
|
||||||
|
if body.remark is not None:
|
||||||
|
account.remark = body.remark
|
||||||
|
|
||||||
|
if body.cookie is not None:
|
||||||
|
key = _encryption_key()
|
||||||
|
ciphertext, iv = encrypt_cookie(body.cookie, key)
|
||||||
|
account.encrypted_cookies = ciphertext
|
||||||
|
account.iv = iv
|
||||||
|
|
||||||
|
await db.commit()
|
||||||
|
await db.refresh(account)
|
||||||
|
return success_response(_account_to_dict(account), "Account updated")
|
||||||
|
|
||||||
|
|
||||||
|
# ---- DELETE ----
|
||||||
|
|
||||||
|
@router.delete("/{account_id}")
|
||||||
|
async def delete_account(
|
||||||
|
account_id: str,
|
||||||
|
user: User = Depends(get_current_user),
|
||||||
|
db: AsyncSession = Depends(get_db),
|
||||||
|
):
|
||||||
|
account = await _get_owned_account(account_id, user, db)
|
||||||
|
await db.delete(account)
|
||||||
|
await db.commit()
|
||||||
|
return success_response(None, "Account deleted")
|
||||||
83
backend/api_service/app/routers/signin_logs.py
Normal file
83
backend/api_service/app/routers/signin_logs.py
Normal file
@@ -0,0 +1,83 @@
|
|||||||
|
"""
|
||||||
|
Signin Log query router.
|
||||||
|
All endpoints require JWT authentication and enforce resource ownership.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Optional
|
||||||
|
from fastapi import APIRouter, Depends, HTTPException, Query, status
|
||||||
|
from sqlalchemy import select, func
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
|
|
||||||
|
from shared.models import get_db, Account, SigninLog, User
|
||||||
|
from shared.response import success_response
|
||||||
|
from api_service.app.dependencies import get_current_user
|
||||||
|
from api_service.app.schemas.signin_log import SigninLogResponse, PaginatedResponse
|
||||||
|
|
||||||
|
router = APIRouter(prefix="/api/v1/accounts", tags=["signin-logs"])
|
||||||
|
|
||||||
|
|
||||||
|
async def _verify_account_ownership(
|
||||||
|
account_id: str,
|
||||||
|
user: User,
|
||||||
|
db: AsyncSession,
|
||||||
|
) -> Account:
|
||||||
|
"""Verify that the account belongs to the current user."""
|
||||||
|
result = await db.execute(select(Account).where(Account.id == account_id))
|
||||||
|
account = result.scalar_one_or_none()
|
||||||
|
if account is None:
|
||||||
|
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Account not found")
|
||||||
|
if account.user_id != user.id:
|
||||||
|
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Access denied")
|
||||||
|
return account
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/{account_id}/signin-logs")
|
||||||
|
async def get_signin_logs(
|
||||||
|
account_id: str,
|
||||||
|
page: int = Query(1, ge=1, description="Page number (starts from 1)"),
|
||||||
|
size: int = Query(20, ge=1, le=100, description="Page size (max 100)"),
|
||||||
|
status_filter: Optional[str] = Query(None, alias="status", description="Filter by status"),
|
||||||
|
user: User = Depends(get_current_user),
|
||||||
|
db: AsyncSession = Depends(get_db),
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Query signin logs for a specific account with pagination and status filtering.
|
||||||
|
Returns logs sorted by signed_at in descending order (newest first).
|
||||||
|
"""
|
||||||
|
# Verify account ownership
|
||||||
|
await _verify_account_ownership(account_id, user, db)
|
||||||
|
|
||||||
|
# Build base query
|
||||||
|
query = select(SigninLog).where(SigninLog.account_id == account_id)
|
||||||
|
|
||||||
|
# Apply status filter if provided
|
||||||
|
if status_filter:
|
||||||
|
query = query.where(SigninLog.status == status_filter)
|
||||||
|
|
||||||
|
# Get total count
|
||||||
|
count_query = select(func.count()).select_from(query.subquery())
|
||||||
|
total_result = await db.execute(count_query)
|
||||||
|
total = total_result.scalar()
|
||||||
|
|
||||||
|
# Apply ordering and pagination
|
||||||
|
query = query.order_by(SigninLog.signed_at.desc())
|
||||||
|
offset = (page - 1) * size
|
||||||
|
query = query.offset(offset).limit(size)
|
||||||
|
|
||||||
|
# Execute query
|
||||||
|
result = await db.execute(query)
|
||||||
|
logs = result.scalars().all()
|
||||||
|
|
||||||
|
# Calculate total pages
|
||||||
|
total_pages = (total + size - 1) // size if total > 0 else 0
|
||||||
|
|
||||||
|
# Build response
|
||||||
|
paginated = PaginatedResponse(
|
||||||
|
items=[SigninLogResponse.model_validate(log) for log in logs],
|
||||||
|
total=total,
|
||||||
|
page=page,
|
||||||
|
size=size,
|
||||||
|
total_pages=total_pages,
|
||||||
|
)
|
||||||
|
|
||||||
|
return success_response(paginated.model_dump(mode="json"), "Signin logs retrieved")
|
||||||
196
backend/api_service/app/routers/tasks.py
Normal file
196
backend/api_service/app/routers/tasks.py
Normal file
@@ -0,0 +1,196 @@
|
|||||||
|
"""
|
||||||
|
Signin Task CRUD router.
|
||||||
|
All endpoints require JWT authentication and enforce resource ownership.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from fastapi import APIRouter, Depends, HTTPException, status
|
||||||
|
from sqlalchemy import select
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
|
from croniter import croniter
|
||||||
|
import redis.asyncio as aioredis
|
||||||
|
import json
|
||||||
|
|
||||||
|
from shared.models import get_db, Account, Task, User
|
||||||
|
from shared.config import shared_settings
|
||||||
|
from shared.response import success_response
|
||||||
|
from api_service.app.dependencies import get_current_user
|
||||||
|
from api_service.app.schemas.task import (
|
||||||
|
TaskCreate,
|
||||||
|
TaskUpdate,
|
||||||
|
TaskResponse,
|
||||||
|
)
|
||||||
|
|
||||||
|
router = APIRouter(prefix="/api/v1", tags=["tasks"])
|
||||||
|
|
||||||
|
|
||||||
|
def _task_to_dict(task: Task) -> dict:
|
||||||
|
return TaskResponse.model_validate(task).model_dump(mode="json")
|
||||||
|
|
||||||
|
|
||||||
|
async def _get_owned_account(
|
||||||
|
account_id: str,
|
||||||
|
user: User,
|
||||||
|
db: AsyncSession,
|
||||||
|
) -> Account:
|
||||||
|
"""Fetch an account and verify it belongs to the current user."""
|
||||||
|
result = await db.execute(select(Account).where(Account.id == account_id))
|
||||||
|
account = result.scalar_one_or_none()
|
||||||
|
if account is None:
|
||||||
|
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Account not found")
|
||||||
|
if account.user_id != user.id:
|
||||||
|
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Access denied")
|
||||||
|
return account
|
||||||
|
|
||||||
|
|
||||||
|
async def _get_owned_task(
|
||||||
|
task_id: str,
|
||||||
|
user: User,
|
||||||
|
db: AsyncSession,
|
||||||
|
) -> Task:
|
||||||
|
"""Fetch a task and verify it belongs to the current user."""
|
||||||
|
from sqlalchemy.orm import selectinload
|
||||||
|
|
||||||
|
result = await db.execute(
|
||||||
|
select(Task)
|
||||||
|
.options(selectinload(Task.account))
|
||||||
|
.where(Task.id == task_id)
|
||||||
|
)
|
||||||
|
task = result.scalar_one_or_none()
|
||||||
|
if task is None:
|
||||||
|
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Task not found")
|
||||||
|
|
||||||
|
# Verify ownership through account
|
||||||
|
if task.account.user_id != user.id:
|
||||||
|
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Access denied")
|
||||||
|
return task
|
||||||
|
|
||||||
|
|
||||||
|
def _validate_cron_expression(cron_expr: str) -> None:
|
||||||
|
"""Validate cron expression format using croniter."""
|
||||||
|
try:
|
||||||
|
croniter(cron_expr)
|
||||||
|
except (ValueError, KeyError) as e:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_400_BAD_REQUEST,
|
||||||
|
detail=f"Invalid cron expression: {str(e)}"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def _notify_scheduler(action: str, task_data: dict) -> None:
|
||||||
|
"""Notify Task_Scheduler via Redis pub/sub about task changes."""
|
||||||
|
try:
|
||||||
|
redis_client = aioredis.from_url(
|
||||||
|
shared_settings.REDIS_URL,
|
||||||
|
encoding="utf-8",
|
||||||
|
decode_responses=True
|
||||||
|
)
|
||||||
|
message = {
|
||||||
|
"action": action, # "create", "update", "delete"
|
||||||
|
"task": task_data
|
||||||
|
}
|
||||||
|
await redis_client.publish("task_updates", json.dumps(message))
|
||||||
|
await redis_client.close()
|
||||||
|
except Exception as e:
|
||||||
|
# Log but don't fail the request if notification fails
|
||||||
|
print(f"Warning: Failed to notify scheduler: {e}")
|
||||||
|
|
||||||
|
|
||||||
|
# ---- CREATE TASK ----
|
||||||
|
|
||||||
|
@router.post("/accounts/{account_id}/tasks", status_code=status.HTTP_201_CREATED)
|
||||||
|
async def create_task(
|
||||||
|
account_id: str,
|
||||||
|
body: TaskCreate,
|
||||||
|
user: User = Depends(get_current_user),
|
||||||
|
db: AsyncSession = Depends(get_db),
|
||||||
|
):
|
||||||
|
"""Create a new signin task for the specified account."""
|
||||||
|
# Verify account ownership
|
||||||
|
account = await _get_owned_account(account_id, user, db)
|
||||||
|
|
||||||
|
# Validate cron expression
|
||||||
|
_validate_cron_expression(body.cron_expression)
|
||||||
|
|
||||||
|
# Create task
|
||||||
|
task = Task(
|
||||||
|
account_id=account.id,
|
||||||
|
cron_expression=body.cron_expression,
|
||||||
|
is_enabled=True,
|
||||||
|
)
|
||||||
|
db.add(task)
|
||||||
|
await db.commit()
|
||||||
|
await db.refresh(task)
|
||||||
|
|
||||||
|
# Notify scheduler
|
||||||
|
await _notify_scheduler("create", _task_to_dict(task))
|
||||||
|
|
||||||
|
return success_response(_task_to_dict(task), "Task created")
|
||||||
|
|
||||||
|
|
||||||
|
# ---- LIST TASKS FOR ACCOUNT ----
|
||||||
|
|
||||||
|
@router.get("/accounts/{account_id}/tasks")
|
||||||
|
async def list_tasks(
|
||||||
|
account_id: str,
|
||||||
|
user: User = Depends(get_current_user),
|
||||||
|
db: AsyncSession = Depends(get_db),
|
||||||
|
):
|
||||||
|
"""Get all tasks for the specified account."""
|
||||||
|
# Verify account ownership
|
||||||
|
account = await _get_owned_account(account_id, user, db)
|
||||||
|
|
||||||
|
# Fetch tasks
|
||||||
|
result = await db.execute(
|
||||||
|
select(Task).where(Task.account_id == account.id)
|
||||||
|
)
|
||||||
|
tasks = result.scalars().all()
|
||||||
|
|
||||||
|
return success_response(
|
||||||
|
[_task_to_dict(t) for t in tasks],
|
||||||
|
"Tasks retrieved",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# ---- UPDATE TASK ----
|
||||||
|
|
||||||
|
@router.put("/tasks/{task_id}")
|
||||||
|
async def update_task(
|
||||||
|
task_id: str,
|
||||||
|
body: TaskUpdate,
|
||||||
|
user: User = Depends(get_current_user),
|
||||||
|
db: AsyncSession = Depends(get_db),
|
||||||
|
):
|
||||||
|
"""Update task (enable/disable)."""
|
||||||
|
task = await _get_owned_task(task_id, user, db)
|
||||||
|
|
||||||
|
if body.is_enabled is not None:
|
||||||
|
task.is_enabled = body.is_enabled
|
||||||
|
|
||||||
|
await db.commit()
|
||||||
|
await db.refresh(task)
|
||||||
|
|
||||||
|
# Notify scheduler
|
||||||
|
await _notify_scheduler("update", _task_to_dict(task))
|
||||||
|
|
||||||
|
return success_response(_task_to_dict(task), "Task updated")
|
||||||
|
|
||||||
|
|
||||||
|
# ---- DELETE TASK ----
|
||||||
|
|
||||||
|
@router.delete("/tasks/{task_id}")
|
||||||
|
async def delete_task(
|
||||||
|
task_id: str,
|
||||||
|
user: User = Depends(get_current_user),
|
||||||
|
db: AsyncSession = Depends(get_db),
|
||||||
|
):
|
||||||
|
"""Delete a task."""
|
||||||
|
task = await _get_owned_task(task_id, user, db)
|
||||||
|
task_data = _task_to_dict(task)
|
||||||
|
|
||||||
|
await db.delete(task)
|
||||||
|
await db.commit()
|
||||||
|
|
||||||
|
# Notify scheduler
|
||||||
|
await _notify_scheduler("delete", task_data)
|
||||||
|
|
||||||
|
return success_response(None, "Task deleted")
|
||||||
0
backend/api_service/app/schemas/__init__.py
Normal file
0
backend/api_service/app/schemas/__init__.py
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
BIN
backend/api_service/app/schemas/__pycache__/task.cpython-311.pyc
Normal file
BIN
backend/api_service/app/schemas/__pycache__/task.cpython-311.pyc
Normal file
Binary file not shown.
34
backend/api_service/app/schemas/account.py
Normal file
34
backend/api_service/app/schemas/account.py
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
"""
|
||||||
|
Pydantic schemas for Weibo Account CRUD operations.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Optional
|
||||||
|
from pydantic import BaseModel, Field
|
||||||
|
|
||||||
|
|
||||||
|
class AccountCreate(BaseModel):
|
||||||
|
"""Request body for creating a new Weibo account."""
|
||||||
|
weibo_user_id: str = Field(..., min_length=1, max_length=20, description="Weibo user ID")
|
||||||
|
cookie: str = Field(..., min_length=1, description="Raw Weibo cookie string")
|
||||||
|
remark: Optional[str] = Field(None, max_length=100, description="Optional note")
|
||||||
|
|
||||||
|
|
||||||
|
class AccountUpdate(BaseModel):
|
||||||
|
"""Request body for updating an existing Weibo account."""
|
||||||
|
cookie: Optional[str] = Field(None, min_length=1, description="New cookie (will be re-encrypted)")
|
||||||
|
remark: Optional[str] = Field(None, max_length=100, description="Updated note")
|
||||||
|
|
||||||
|
|
||||||
|
class AccountResponse(BaseModel):
|
||||||
|
"""Public representation of a Weibo account (no cookie plaintext)."""
|
||||||
|
id: str
|
||||||
|
user_id: str
|
||||||
|
weibo_user_id: str
|
||||||
|
remark: Optional[str]
|
||||||
|
status: str
|
||||||
|
last_checked_at: Optional[datetime]
|
||||||
|
created_at: Optional[datetime]
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
from_attributes = True
|
||||||
30
backend/api_service/app/schemas/signin_log.py
Normal file
30
backend/api_service/app/schemas/signin_log.py
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
"""
|
||||||
|
Pydantic schemas for Signin Log query operations.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Optional, List, Any, Dict
|
||||||
|
from pydantic import BaseModel, Field
|
||||||
|
|
||||||
|
|
||||||
|
class SigninLogResponse(BaseModel):
|
||||||
|
"""Public representation of a signin log entry."""
|
||||||
|
id: int
|
||||||
|
account_id: str
|
||||||
|
topic_title: Optional[str]
|
||||||
|
status: str
|
||||||
|
reward_info: Optional[Any]
|
||||||
|
error_message: Optional[str]
|
||||||
|
signed_at: datetime
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
from_attributes = True
|
||||||
|
|
||||||
|
|
||||||
|
class PaginatedResponse(BaseModel):
|
||||||
|
"""Paginated response wrapper for signin logs."""
|
||||||
|
items: List[SigninLogResponse]
|
||||||
|
total: int
|
||||||
|
page: int
|
||||||
|
size: int
|
||||||
|
total_pages: int
|
||||||
29
backend/api_service/app/schemas/task.py
Normal file
29
backend/api_service/app/schemas/task.py
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
"""
|
||||||
|
Pydantic schemas for Task CRUD operations.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Optional
|
||||||
|
from pydantic import BaseModel, Field
|
||||||
|
|
||||||
|
|
||||||
|
class TaskCreate(BaseModel):
|
||||||
|
"""Request body for creating a new signin task."""
|
||||||
|
cron_expression: str = Field(..., min_length=1, max_length=50, description="Cron expression for scheduling")
|
||||||
|
|
||||||
|
|
||||||
|
class TaskUpdate(BaseModel):
|
||||||
|
"""Request body for updating an existing task."""
|
||||||
|
is_enabled: Optional[bool] = Field(None, description="Enable or disable the task")
|
||||||
|
|
||||||
|
|
||||||
|
class TaskResponse(BaseModel):
|
||||||
|
"""Public representation of a signin task."""
|
||||||
|
id: str
|
||||||
|
account_id: str
|
||||||
|
cron_expression: str
|
||||||
|
is_enabled: bool
|
||||||
|
created_at: Optional[datetime]
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
from_attributes = True
|
||||||
34
backend/auth_service/Dockerfile
Normal file
34
backend/auth_service/Dockerfile
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
# Weibo-HotSign Authentication Service Dockerfile
|
||||||
|
FROM python:3.11-slim
|
||||||
|
|
||||||
|
# Set working directory
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Install system dependencies
|
||||||
|
RUN apt-get update && apt-get install -y \
|
||||||
|
gcc \
|
||||||
|
default-libmysqlclient-dev \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Copy requirements first for better caching
|
||||||
|
COPY ../requirements.txt .
|
||||||
|
|
||||||
|
# Install Python dependencies
|
||||||
|
RUN pip install --no-cache-dir -r requirements.txt
|
||||||
|
|
||||||
|
# Copy application code
|
||||||
|
COPY app/ ./app/
|
||||||
|
|
||||||
|
# Create non-root user for security
|
||||||
|
RUN groupadd -r appuser && useradd -r -g appuser appuser
|
||||||
|
USER appuser
|
||||||
|
|
||||||
|
# Expose port
|
||||||
|
EXPOSE 8000
|
||||||
|
|
||||||
|
# Health check
|
||||||
|
HEALTHCHECK --interval=30s --timeout=30s --start-period=5s --retries=3 \
|
||||||
|
CMD curl -f http://localhost:8000/health || exit 1
|
||||||
|
|
||||||
|
# Start application
|
||||||
|
CMD ["python", "-m", "app.main"]
|
||||||
0
backend/auth_service/__init__.py
Normal file
0
backend/auth_service/__init__.py
Normal file
BIN
backend/auth_service/__pycache__/__init__.cpython-311.pyc
Normal file
BIN
backend/auth_service/__pycache__/__init__.cpython-311.pyc
Normal file
Binary file not shown.
0
backend/auth_service/app/__init__.py
Normal file
0
backend/auth_service/app/__init__.py
Normal file
BIN
backend/auth_service/app/__pycache__/__init__.cpython-311.pyc
Normal file
BIN
backend/auth_service/app/__pycache__/__init__.cpython-311.pyc
Normal file
Binary file not shown.
BIN
backend/auth_service/app/__pycache__/main.cpython-311.pyc
Normal file
BIN
backend/auth_service/app/__pycache__/main.cpython-311.pyc
Normal file
Binary file not shown.
50
backend/auth_service/app/config.py
Normal file
50
backend/auth_service/app/config.py
Normal file
@@ -0,0 +1,50 @@
|
|||||||
|
"""
|
||||||
|
Configuration settings for Authentication Service
|
||||||
|
Loads environment variables and provides configuration object
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
from pydantic_settings import BaseSettings
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
class Settings(BaseSettings):
|
||||||
|
"""Application settings using Pydantic BaseSettings"""
|
||||||
|
|
||||||
|
# Database settings
|
||||||
|
DATABASE_URL: str = os.getenv(
|
||||||
|
"DATABASE_URL",
|
||||||
|
# If DATABASE_URL is not set, raise an error to force proper configuration
|
||||||
|
# For development, you can create a .env file with DATABASE_URL=mysql+aiomysql://user:password@host/dbname
|
||||||
|
)
|
||||||
|
|
||||||
|
# JWT settings
|
||||||
|
JWT_SECRET_KEY: str = os.getenv(
|
||||||
|
"JWT_SECRET_KEY",
|
||||||
|
# If JWT_SECRET_KEY is not set, raise an error to force proper configuration
|
||||||
|
# For development, you can create a .env file with JWT_SECRET_KEY=your-secret-key
|
||||||
|
)
|
||||||
|
JWT_ALGORITHM: str = "HS256"
|
||||||
|
JWT_EXPIRATION_HOURS: int = 24
|
||||||
|
|
||||||
|
# Security settings
|
||||||
|
BCRYPT_ROUNDS: int = 12
|
||||||
|
|
||||||
|
# Application settings
|
||||||
|
APP_NAME: str = "Weibo-HotSign Authentication Service"
|
||||||
|
DEBUG: bool = os.getenv("DEBUG", "False").lower() == "true"
|
||||||
|
HOST: str = os.getenv("HOST", "0.0.0.0")
|
||||||
|
PORT: int = int(os.getenv("PORT", 8000))
|
||||||
|
|
||||||
|
# CORS settings
|
||||||
|
ALLOWED_ORIGINS: list = [
|
||||||
|
"http://localhost:3000",
|
||||||
|
"http://localhost:80",
|
||||||
|
"http://127.0.0.1:3000"
|
||||||
|
]
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
case_sensitive = True
|
||||||
|
env_file = ".env"
|
||||||
|
|
||||||
|
# Create global settings instance
|
||||||
|
settings = Settings()
|
||||||
223
backend/auth_service/app/main.py
Normal file
223
backend/auth_service/app/main.py
Normal file
@@ -0,0 +1,223 @@
|
|||||||
|
"""
|
||||||
|
Weibo-HotSign Authentication Service
|
||||||
|
Main FastAPI application entry point
|
||||||
|
"""
|
||||||
|
|
||||||
|
from fastapi import FastAPI, Depends, HTTPException, status, Security
|
||||||
|
from fastapi.middleware.cors import CORSMiddleware
|
||||||
|
from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
|
from sqlalchemy import select
|
||||||
|
import uvicorn
|
||||||
|
import os
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from shared.models import get_db, User
|
||||||
|
from auth_service.app.models.database import create_tables
|
||||||
|
from auth_service.app.schemas.user import (
|
||||||
|
UserCreate, UserLogin, UserResponse, Token, TokenData, RefreshTokenRequest,
|
||||||
|
)
|
||||||
|
from auth_service.app.services.auth_service import AuthService
|
||||||
|
from auth_service.app.utils.security import (
|
||||||
|
verify_password, create_access_token, decode_access_token,
|
||||||
|
create_refresh_token, verify_refresh_token, revoke_refresh_token,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Configure logger
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# Initialize FastAPI app
|
||||||
|
app = FastAPI(
|
||||||
|
title="Weibo-HotSign Authentication Service",
|
||||||
|
description="Handles user authentication and authorization for Weibo-HotSign system",
|
||||||
|
version="1.0.0",
|
||||||
|
docs_url="/docs",
|
||||||
|
redoc_url="/redoc"
|
||||||
|
)
|
||||||
|
|
||||||
|
# CORS middleware configuration
|
||||||
|
app.add_middleware(
|
||||||
|
CORSMiddleware,
|
||||||
|
allow_origins=["http://localhost:3000", "http://localhost:80"],
|
||||||
|
allow_credentials=True,
|
||||||
|
allow_methods=["*"],
|
||||||
|
allow_headers=["*"],
|
||||||
|
)
|
||||||
|
|
||||||
|
# Security scheme for JWT
|
||||||
|
security = HTTPBearer()
|
||||||
|
|
||||||
|
async def get_current_user(
|
||||||
|
credentials: HTTPAuthorizationCredentials = Security(security),
|
||||||
|
db: AsyncSession = Depends(get_db)
|
||||||
|
) -> UserResponse:
|
||||||
|
"""
|
||||||
|
Dependency to get current user from JWT token
|
||||||
|
"""
|
||||||
|
token = credentials.credentials
|
||||||
|
payload = decode_access_token(token)
|
||||||
|
|
||||||
|
if payload is None:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||||
|
detail="Invalid or expired token",
|
||||||
|
headers={"WWW-Authenticate": "Bearer"},
|
||||||
|
)
|
||||||
|
|
||||||
|
user_id = payload.get("sub")
|
||||||
|
if not user_id:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||||
|
detail="Invalid token payload",
|
||||||
|
headers={"WWW-Authenticate": "Bearer"},
|
||||||
|
)
|
||||||
|
|
||||||
|
auth_service = AuthService(db)
|
||||||
|
user = await auth_service.get_user_by_id(user_id)
|
||||||
|
|
||||||
|
if user is None:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_404_NOT_FOUND,
|
||||||
|
detail="User not found",
|
||||||
|
)
|
||||||
|
|
||||||
|
if not user.is_active:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_403_FORBIDDEN,
|
||||||
|
detail="User account is deactivated",
|
||||||
|
)
|
||||||
|
|
||||||
|
return UserResponse.from_orm(user)
|
||||||
|
|
||||||
|
@app.on_event("startup")
|
||||||
|
async def startup_event():
|
||||||
|
"""Initialize database tables on startup"""
|
||||||
|
await create_tables()
|
||||||
|
|
||||||
|
@app.get("/")
|
||||||
|
async def root():
|
||||||
|
return {
|
||||||
|
"service": "Weibo-HotSign Authentication Service",
|
||||||
|
"status": "running",
|
||||||
|
"version": "1.0.0"
|
||||||
|
}
|
||||||
|
|
||||||
|
@app.get("/health")
|
||||||
|
async def health_check():
|
||||||
|
return {"status": "healthy"}
|
||||||
|
|
||||||
|
@app.post("/auth/register", response_model=UserResponse, status_code=status.HTTP_201_CREATED)
|
||||||
|
async def register_user(user_data: UserCreate, db: AsyncSession = Depends(get_db)):
|
||||||
|
"""
|
||||||
|
Register a new user account
|
||||||
|
"""
|
||||||
|
auth_service = AuthService(db)
|
||||||
|
|
||||||
|
# Check if user already exists - optimized with single query
|
||||||
|
email_user, username_user = await auth_service.check_user_exists(user_data.email, user_data.username)
|
||||||
|
|
||||||
|
if email_user:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_409_CONFLICT,
|
||||||
|
detail="User with this email already exists"
|
||||||
|
)
|
||||||
|
|
||||||
|
if username_user:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_409_CONFLICT,
|
||||||
|
detail="Username already taken"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create new user
|
||||||
|
try:
|
||||||
|
user = await auth_service.create_user(user_data)
|
||||||
|
return UserResponse.from_orm(user)
|
||||||
|
except Exception as e:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
detail=f"Failed to create user: {str(e)}"
|
||||||
|
)
|
||||||
|
|
||||||
|
@app.post("/auth/login", response_model=Token)
|
||||||
|
async def login_user(login_data: UserLogin, db: AsyncSession = Depends(get_db)):
|
||||||
|
"""
|
||||||
|
Authenticate user and return JWT token
|
||||||
|
"""
|
||||||
|
auth_service = AuthService(db)
|
||||||
|
|
||||||
|
# Find user by email
|
||||||
|
user = await auth_service.get_user_by_email(login_data.email)
|
||||||
|
if not user:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||||
|
detail="Invalid email or password"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Verify password
|
||||||
|
if not verify_password(login_data.password, user.hashed_password):
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||||
|
detail="Invalid email or password"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Check if user is active
|
||||||
|
if not user.is_active:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_403_FORBIDDEN,
|
||||||
|
detail="User account is deactivated"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create access token
|
||||||
|
access_token = create_access_token(data={"sub": str(user.id), "username": user.username})
|
||||||
|
|
||||||
|
# Create refresh token (stored in Redis)
|
||||||
|
refresh_token = await create_refresh_token(str(user.id))
|
||||||
|
|
||||||
|
return Token(
|
||||||
|
access_token=access_token,
|
||||||
|
refresh_token=refresh_token,
|
||||||
|
token_type="bearer",
|
||||||
|
expires_in=3600 # 1 hour
|
||||||
|
)
|
||||||
|
|
||||||
|
@app.post("/auth/refresh", response_model=Token)
|
||||||
|
async def refresh_token(body: RefreshTokenRequest, db: AsyncSession = Depends(get_db)):
|
||||||
|
"""
|
||||||
|
Exchange a valid refresh token for a new access + refresh token pair (Token Rotation).
|
||||||
|
The old refresh token is revoked immediately.
|
||||||
|
"""
|
||||||
|
# Verify the incoming refresh token
|
||||||
|
user_id = await verify_refresh_token(body.refresh_token)
|
||||||
|
if user_id is None:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||||
|
detail="Invalid or expired refresh token",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Ensure the user still exists and is active
|
||||||
|
auth_service = AuthService(db)
|
||||||
|
user = await auth_service.get_user_by_id(user_id)
|
||||||
|
if user is None or not user.is_active:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||||
|
detail="User not found or deactivated",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Revoke old token, issue new pair
|
||||||
|
await revoke_refresh_token(body.refresh_token)
|
||||||
|
new_access = create_access_token(data={"sub": str(user.id), "username": user.username})
|
||||||
|
new_refresh = await create_refresh_token(str(user.id))
|
||||||
|
|
||||||
|
return Token(
|
||||||
|
access_token=new_access,
|
||||||
|
refresh_token=new_refresh,
|
||||||
|
token_type="bearer",
|
||||||
|
expires_in=3600,
|
||||||
|
)
|
||||||
|
|
||||||
|
@app.get("/auth/me", response_model=UserResponse)
|
||||||
|
async def get_current_user_info(current_user: UserResponse = Depends(get_current_user)):
|
||||||
|
"""
|
||||||
|
Get current user information
|
||||||
|
"""
|
||||||
|
return current_user
|
||||||
0
backend/auth_service/app/models/__init__.py
Normal file
0
backend/auth_service/app/models/__init__.py
Normal file
Binary file not shown.
Binary file not shown.
15
backend/auth_service/app/models/database.py
Normal file
15
backend/auth_service/app/models/database.py
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
"""
|
||||||
|
Database models and connection management for Authentication Service.
|
||||||
|
Re-exports shared module components for backward compatibility.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Re-export everything from the shared module
|
||||||
|
from shared.models import Base, get_db, engine, AsyncSessionLocal, User
|
||||||
|
|
||||||
|
__all__ = ["Base", "get_db", "engine", "AsyncSessionLocal", "User"]
|
||||||
|
|
||||||
|
|
||||||
|
async def create_tables():
|
||||||
|
"""Create all tables in the database."""
|
||||||
|
async with engine.begin() as conn:
|
||||||
|
await conn.run_sync(Base.metadata.create_all)
|
||||||
0
backend/auth_service/app/schemas/__init__.py
Normal file
0
backend/auth_service/app/schemas/__init__.py
Normal file
Binary file not shown.
Binary file not shown.
57
backend/auth_service/app/schemas/user.py
Normal file
57
backend/auth_service/app/schemas/user.py
Normal file
@@ -0,0 +1,57 @@
|
|||||||
|
"""
|
||||||
|
Pydantic schemas for User-related data structures
|
||||||
|
Defines request/response models for authentication endpoints
|
||||||
|
"""
|
||||||
|
|
||||||
|
from pydantic import BaseModel, EmailStr, Field
|
||||||
|
from typing import Optional
|
||||||
|
from datetime import datetime
|
||||||
|
from uuid import UUID
|
||||||
|
|
||||||
|
class UserBase(BaseModel):
|
||||||
|
"""Base schema for user data"""
|
||||||
|
username: str = Field(..., min_length=3, max_length=50, description="Unique username")
|
||||||
|
email: EmailStr = Field(..., description="Valid email address")
|
||||||
|
|
||||||
|
class UserCreate(UserBase):
|
||||||
|
"""Schema for user registration request"""
|
||||||
|
password: str = Field(..., min_length=8, description="Password (min 8 characters)")
|
||||||
|
|
||||||
|
class UserLogin(BaseModel):
|
||||||
|
"""Schema for user login request"""
|
||||||
|
email: EmailStr = Field(..., description="User's email address")
|
||||||
|
password: str = Field(..., description="User's password")
|
||||||
|
|
||||||
|
class UserUpdate(BaseModel):
|
||||||
|
"""Schema for user profile updates"""
|
||||||
|
username: Optional[str] = Field(None, min_length=3, max_length=50)
|
||||||
|
email: Optional[EmailStr] = None
|
||||||
|
is_active: Optional[bool] = None
|
||||||
|
|
||||||
|
class UserResponse(UserBase):
|
||||||
|
"""Schema for user response data"""
|
||||||
|
id: UUID
|
||||||
|
created_at: datetime
|
||||||
|
is_active: bool
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
from_attributes = True # Enable ORM mode
|
||||||
|
|
||||||
|
class Token(BaseModel):
|
||||||
|
"""Schema for JWT token response (login / refresh)"""
|
||||||
|
access_token: str = Field(..., description="JWT access token")
|
||||||
|
refresh_token: str = Field(..., description="Opaque refresh token")
|
||||||
|
token_type: str = Field(default="bearer", description="Token type")
|
||||||
|
expires_in: int = Field(..., description="Access token expiration time in seconds")
|
||||||
|
|
||||||
|
|
||||||
|
class RefreshTokenRequest(BaseModel):
|
||||||
|
"""Schema for token refresh request"""
|
||||||
|
refresh_token: str = Field(..., description="The refresh token to exchange")
|
||||||
|
|
||||||
|
|
||||||
|
class TokenData(BaseModel):
|
||||||
|
"""Schema for decoded token payload"""
|
||||||
|
sub: str = Field(..., description="Subject (user ID)")
|
||||||
|
username: str = Field(..., description="Username")
|
||||||
|
exp: Optional[int] = None
|
||||||
0
backend/auth_service/app/services/__init__.py
Normal file
0
backend/auth_service/app/services/__init__.py
Normal file
Binary file not shown.
Binary file not shown.
191
backend/auth_service/app/services/auth_service.py
Normal file
191
backend/auth_service/app/services/auth_service.py
Normal file
@@ -0,0 +1,191 @@
|
|||||||
|
"""
|
||||||
|
Authentication service business logic
|
||||||
|
Handles user registration, login, and user management operations
|
||||||
|
"""
|
||||||
|
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
|
from sqlalchemy import select, or_
|
||||||
|
from sqlalchemy.exc import IntegrityError
|
||||||
|
from fastapi import HTTPException, status
|
||||||
|
import logging
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from shared.models import User
|
||||||
|
from ..schemas.user import UserCreate, UserLogin
|
||||||
|
from ..utils.security import hash_password, validate_password_strength, verify_password
|
||||||
|
|
||||||
|
# Configure logger
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
class AuthService:
|
||||||
|
"""Service class for authentication and user management"""
|
||||||
|
|
||||||
|
def __init__(self, db: AsyncSession):
|
||||||
|
self.db = db
|
||||||
|
|
||||||
|
async def get_user_by_email(self, email: str) -> Optional[User]:
|
||||||
|
"""Find user by email address"""
|
||||||
|
try:
|
||||||
|
stmt = select(User).where(User.email == email)
|
||||||
|
result = await self.db.execute(stmt)
|
||||||
|
return result.scalar_one_or_none()
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error fetching user by email {email}: {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
async def get_user_by_username(self, username: str) -> Optional[User]:
|
||||||
|
"""Find user by username"""
|
||||||
|
try:
|
||||||
|
stmt = select(User).where(User.username == username)
|
||||||
|
result = await self.db.execute(stmt)
|
||||||
|
return result.scalar_one_or_none()
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error fetching user by username {username}: {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
async def get_user_by_id(self, user_id: str) -> Optional[User]:
|
||||||
|
"""Find user by UUID"""
|
||||||
|
try:
|
||||||
|
# For MySQL, user_id is already a string, no need to convert to UUID
|
||||||
|
stmt = select(User).where(User.id == user_id)
|
||||||
|
result = await self.db.execute(stmt)
|
||||||
|
return result.scalar_one_or_none()
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error fetching user by ID {user_id}: {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
async def create_user(self, user_data: UserCreate) -> User:
|
||||||
|
"""Create a new user account with validation"""
|
||||||
|
|
||||||
|
# Validate password strength
|
||||||
|
is_strong, message = validate_password_strength(user_data.password)
|
||||||
|
if not is_strong:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_400_BAD_REQUEST,
|
||||||
|
detail=f"Password too weak: {message}"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Hash password
|
||||||
|
hashed_password = hash_password(user_data.password)
|
||||||
|
|
||||||
|
# Create user instance
|
||||||
|
user = User(
|
||||||
|
username=user_data.username,
|
||||||
|
email=user_data.email,
|
||||||
|
hashed_password=hashed_password,
|
||||||
|
is_active=True
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.db.add(user)
|
||||||
|
await self.db.commit()
|
||||||
|
await self.db.refresh(user)
|
||||||
|
|
||||||
|
logger.info(f"Successfully created user: {user.username} ({user.email})")
|
||||||
|
return user
|
||||||
|
|
||||||
|
except IntegrityError as e:
|
||||||
|
await self.db.rollback()
|
||||||
|
logger.error(f"Integrity error creating user {user_data.username}: {e}")
|
||||||
|
|
||||||
|
# Check which constraint was violated
|
||||||
|
if "users_username_key" in str(e.orig):
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_409_CONFLICT,
|
||||||
|
detail="Username already exists"
|
||||||
|
)
|
||||||
|
elif "users_email_key" in str(e.orig):
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_409_CONFLICT,
|
||||||
|
detail="Email already registered"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
detail="Failed to create user due to database constraint"
|
||||||
|
)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
await self.db.rollback()
|
||||||
|
logger.error(f"Unexpected error creating user {user_data.username}: {e}")
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
detail="Internal server error during user creation"
|
||||||
|
)
|
||||||
|
|
||||||
|
async def check_user_exists(self, email: str, username: str) -> tuple[Optional[User], Optional[User]]:
|
||||||
|
"""Check if user exists by email or username in a single query"""
|
||||||
|
try:
|
||||||
|
stmt = select(User).where(or_(User.email == email, User.username == username))
|
||||||
|
result = await self.db.execute(stmt)
|
||||||
|
users = result.scalars().all()
|
||||||
|
|
||||||
|
email_user = None
|
||||||
|
username_user = None
|
||||||
|
|
||||||
|
for user in users:
|
||||||
|
if user.email == email:
|
||||||
|
email_user = user
|
||||||
|
if user.username == username:
|
||||||
|
username_user = user
|
||||||
|
|
||||||
|
return email_user, username_user
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error checking user existence: {e}")
|
||||||
|
return None, None
|
||||||
|
|
||||||
|
async def authenticate_user(self, login_data: UserLogin) -> Optional[User]:
|
||||||
|
"""Authenticate user credentials"""
|
||||||
|
user = await self.get_user_by_email(login_data.email)
|
||||||
|
|
||||||
|
if not user:
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Verify password
|
||||||
|
if not verify_password(login_data.password, user.hashed_password):
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Check if user is active
|
||||||
|
if not user.is_active:
|
||||||
|
logger.warning(f"Login attempt for deactivated user: {user.email}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
logger.info(f"Successful authentication for user: {user.username}")
|
||||||
|
return user
|
||||||
|
|
||||||
|
async def update_user_status(self, user_id: str, is_active: bool) -> Optional[User]:
|
||||||
|
"""Update user active status"""
|
||||||
|
user = await self.get_user_by_id(user_id)
|
||||||
|
if not user:
|
||||||
|
return None
|
||||||
|
|
||||||
|
user.is_active = is_active
|
||||||
|
try:
|
||||||
|
await self.db.commit()
|
||||||
|
await self.db.refresh(user)
|
||||||
|
logger.info(f"Updated user {user.username} status to: {is_active}")
|
||||||
|
return user
|
||||||
|
except Exception as e:
|
||||||
|
await self.db.rollback()
|
||||||
|
logger.error(f"Error updating user status: {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
async def get_all_users(self, skip: int = 0, limit: int = 100) -> list[User]:
|
||||||
|
"""Get list of all users (admin function)"""
|
||||||
|
try:
|
||||||
|
stmt = select(User).offset(skip).limit(limit)
|
||||||
|
result = await self.db.execute(stmt)
|
||||||
|
return result.scalars().all()
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error fetching users list: {e}")
|
||||||
|
return []
|
||||||
|
|
||||||
|
async def check_database_health(self) -> bool:
|
||||||
|
"""Check if database connection is healthy"""
|
||||||
|
try:
|
||||||
|
stmt = select(User).limit(1)
|
||||||
|
await self.db.execute(stmt)
|
||||||
|
return True
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Database health check failed: {e}")
|
||||||
|
return False
|
||||||
0
backend/auth_service/app/utils/__init__.py
Normal file
0
backend/auth_service/app/utils/__init__.py
Normal file
Binary file not shown.
Binary file not shown.
148
backend/auth_service/app/utils/security.py
Normal file
148
backend/auth_service/app/utils/security.py
Normal file
@@ -0,0 +1,148 @@
|
|||||||
|
"""
|
||||||
|
Security utilities for password hashing and JWT token management
|
||||||
|
"""
|
||||||
|
|
||||||
|
import bcrypt
|
||||||
|
import hashlib
|
||||||
|
import jwt
|
||||||
|
import secrets
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
import redis.asyncio as aioredis
|
||||||
|
|
||||||
|
from shared.config import shared_settings
|
||||||
|
|
||||||
|
# Auth-specific defaults
|
||||||
|
BCRYPT_ROUNDS = 12
|
||||||
|
REFRESH_TOKEN_TTL = 7 * 24 * 3600 # 7 days in seconds
|
||||||
|
|
||||||
|
# Lazy-initialised async Redis client
|
||||||
|
_redis_client: Optional[aioredis.Redis] = None
|
||||||
|
|
||||||
|
|
||||||
|
async def get_redis() -> aioredis.Redis:
|
||||||
|
"""Return a shared async Redis connection."""
|
||||||
|
global _redis_client
|
||||||
|
if _redis_client is None:
|
||||||
|
_redis_client = aioredis.from_url(
|
||||||
|
shared_settings.REDIS_URL, decode_responses=True
|
||||||
|
)
|
||||||
|
return _redis_client
|
||||||
|
|
||||||
|
def hash_password(password: str) -> str:
|
||||||
|
"""
|
||||||
|
Hash a password using bcrypt
|
||||||
|
Returns the hashed password as a string
|
||||||
|
"""
|
||||||
|
salt = bcrypt.gensalt(rounds=BCRYPT_ROUNDS)
|
||||||
|
hashed = bcrypt.hashpw(password.encode('utf-8'), salt)
|
||||||
|
return hashed.decode('utf-8')
|
||||||
|
|
||||||
|
def verify_password(plain_password: str, hashed_password: str) -> bool:
|
||||||
|
"""
|
||||||
|
Verify a plain text password against a hashed password
|
||||||
|
Returns True if passwords match, False otherwise
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
return bcrypt.checkpw(
|
||||||
|
plain_password.encode('utf-8'),
|
||||||
|
hashed_password.encode('utf-8')
|
||||||
|
)
|
||||||
|
except Exception:
|
||||||
|
return False
|
||||||
|
|
||||||
|
def create_access_token(data: dict, expires_delta: Optional[timedelta] = None) -> str:
|
||||||
|
"""
|
||||||
|
Create a JWT access token
|
||||||
|
"""
|
||||||
|
to_encode = data.copy()
|
||||||
|
|
||||||
|
if expires_delta:
|
||||||
|
expire = datetime.utcnow() + expires_delta
|
||||||
|
else:
|
||||||
|
expire = datetime.utcnow() + timedelta(hours=shared_settings.JWT_EXPIRATION_HOURS)
|
||||||
|
|
||||||
|
to_encode.update({"exp": expire})
|
||||||
|
encoded_jwt = jwt.encode(to_encode, shared_settings.JWT_SECRET_KEY, algorithm=shared_settings.JWT_ALGORITHM)
|
||||||
|
return encoded_jwt
|
||||||
|
|
||||||
|
def decode_access_token(token: str) -> Optional[dict]:
|
||||||
|
"""
|
||||||
|
Decode and validate a JWT access token
|
||||||
|
Returns the payload if valid, None otherwise
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
payload = jwt.decode(token, shared_settings.JWT_SECRET_KEY, algorithms=[shared_settings.JWT_ALGORITHM])
|
||||||
|
return payload
|
||||||
|
except jwt.ExpiredSignatureError:
|
||||||
|
return None
|
||||||
|
except jwt.InvalidTokenError:
|
||||||
|
return None
|
||||||
|
|
||||||
|
def generate_password_reset_token(email: str) -> str:
|
||||||
|
"""
|
||||||
|
Generate a secure token for password reset
|
||||||
|
"""
|
||||||
|
data = {"email": email, "type": "password_reset"}
|
||||||
|
return create_access_token(data, timedelta(hours=1))
|
||||||
|
|
||||||
|
# Password strength validation
|
||||||
|
def validate_password_strength(password: str) -> tuple[bool, str]:
|
||||||
|
"""
|
||||||
|
Validate password meets strength requirements
|
||||||
|
Returns (is_valid, error_message)
|
||||||
|
"""
|
||||||
|
if len(password) < 8:
|
||||||
|
return False, "Password must be at least 8 characters long"
|
||||||
|
|
||||||
|
if not any(c.isupper() for c in password):
|
||||||
|
return False, "Password must contain at least one uppercase letter"
|
||||||
|
|
||||||
|
if not any(c.islower() for c in password):
|
||||||
|
return False, "Password must contain at least one lowercase letter"
|
||||||
|
|
||||||
|
if not any(c.isdigit() for c in password):
|
||||||
|
return False, "Password must contain at least one digit"
|
||||||
|
|
||||||
|
if not any(c in "!@#$%^&*()_+-=[]{}|;:,.<>?" for c in password):
|
||||||
|
return False, "Password must contain at least one special character"
|
||||||
|
|
||||||
|
return True, "Password is strong"
|
||||||
|
|
||||||
|
|
||||||
|
# --------------- Refresh Token helpers ---------------
|
||||||
|
|
||||||
|
def _hash_token(token: str) -> str:
|
||||||
|
"""SHA-256 hash of a refresh token for safe Redis key storage."""
|
||||||
|
return hashlib.sha256(token.encode("utf-8")).hexdigest()
|
||||||
|
|
||||||
|
|
||||||
|
async def create_refresh_token(user_id: str) -> str:
|
||||||
|
"""
|
||||||
|
Generate a cryptographically random refresh token, store its hash in Redis
|
||||||
|
with a 7-day TTL, and return the raw token string.
|
||||||
|
"""
|
||||||
|
token = secrets.token_urlsafe(48)
|
||||||
|
token_hash = _hash_token(token)
|
||||||
|
r = await get_redis()
|
||||||
|
await r.setex(f"refresh_token:{token_hash}", REFRESH_TOKEN_TTL, user_id)
|
||||||
|
return token
|
||||||
|
|
||||||
|
|
||||||
|
async def verify_refresh_token(token: str) -> Optional[str]:
|
||||||
|
"""
|
||||||
|
Verify a refresh token by looking up its hash in Redis.
|
||||||
|
Returns the associated user_id if valid, None otherwise.
|
||||||
|
"""
|
||||||
|
token_hash = _hash_token(token)
|
||||||
|
r = await get_redis()
|
||||||
|
user_id = await r.get(f"refresh_token:{token_hash}")
|
||||||
|
return user_id
|
||||||
|
|
||||||
|
|
||||||
|
async def revoke_refresh_token(token: str) -> None:
|
||||||
|
"""Delete a refresh token from Redis (used during rotation)."""
|
||||||
|
token_hash = _hash_token(token)
|
||||||
|
r = await get_redis()
|
||||||
|
await r.delete(f"refresh_token:{token_hash}")
|
||||||
31
backend/auth_service/requirements.txt
Normal file
31
backend/auth_service/requirements.txt
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
# Weibo-HotSign Authentication Service Requirements
|
||||||
|
# Web Framework
|
||||||
|
fastapi==0.104.1
|
||||||
|
uvicorn[standard]==0.24.0
|
||||||
|
pydantic-settings==2.0.3
|
||||||
|
|
||||||
|
# Database
|
||||||
|
sqlalchemy==2.0.23
|
||||||
|
aiomysql==0.2.0
|
||||||
|
PyMySQL==1.1.0
|
||||||
|
|
||||||
|
# Security
|
||||||
|
bcrypt==4.1.2
|
||||||
|
PyJWT[crypto]==2.8.0
|
||||||
|
|
||||||
|
# Validation and Serialization
|
||||||
|
pydantic==2.5.0
|
||||||
|
python-multipart==0.0.6
|
||||||
|
|
||||||
|
# Utilities
|
||||||
|
python-dotenv==1.0.0
|
||||||
|
requests==2.31.0
|
||||||
|
|
||||||
|
# Logging and Monitoring
|
||||||
|
structlog==23.2.0
|
||||||
|
|
||||||
|
# Development tools (optional)
|
||||||
|
# pytest==7.4.3
|
||||||
|
# pytest-asyncio==0.21.1
|
||||||
|
# black==23.11.0
|
||||||
|
# flake8==6.1.0
|
||||||
33
backend/requirements.txt
Normal file
33
backend/requirements.txt
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
# Weibo-HotSign Unified Backend Requirements
|
||||||
|
|
||||||
|
# Web Framework & Server
|
||||||
|
fastapi==0.104.1
|
||||||
|
uvicorn[standard]==0.24.0
|
||||||
|
|
||||||
|
# Task Queue
|
||||||
|
celery==5.3.6
|
||||||
|
redis==5.0.1
|
||||||
|
|
||||||
|
# Database
|
||||||
|
sqlalchemy==2.0.23
|
||||||
|
aiomysql==0.2.0
|
||||||
|
PyMySQL==1.1.0
|
||||||
|
|
||||||
|
# Configuration, Validation, and Serialization
|
||||||
|
pydantic-settings==2.0.3
|
||||||
|
pydantic==2.5.0
|
||||||
|
python-multipart==0.0.6
|
||||||
|
|
||||||
|
# Security
|
||||||
|
bcrypt==4.1.2
|
||||||
|
PyJWT[crypto]==2.8.0
|
||||||
|
pycryptodome==3.19.0
|
||||||
|
|
||||||
|
# HTTP & Utilities
|
||||||
|
httpx==0.25.2
|
||||||
|
requests==2.31.0
|
||||||
|
python-dotenv==1.0.0
|
||||||
|
croniter==2.0.1
|
||||||
|
|
||||||
|
# Logging and Monitoring
|
||||||
|
structlog==23.2.0
|
||||||
1
backend/shared/__init__.py
Normal file
1
backend/shared/__init__.py
Normal file
@@ -0,0 +1 @@
|
|||||||
|
"""Shared module for Weibo-HotSign backend services."""
|
||||||
BIN
backend/shared/__pycache__/__init__.cpython-311.pyc
Normal file
BIN
backend/shared/__pycache__/__init__.cpython-311.pyc
Normal file
Binary file not shown.
BIN
backend/shared/__pycache__/config.cpython-311.pyc
Normal file
BIN
backend/shared/__pycache__/config.cpython-311.pyc
Normal file
Binary file not shown.
BIN
backend/shared/__pycache__/crypto.cpython-311.pyc
Normal file
BIN
backend/shared/__pycache__/crypto.cpython-311.pyc
Normal file
Binary file not shown.
BIN
backend/shared/__pycache__/response.cpython-311.pyc
Normal file
BIN
backend/shared/__pycache__/response.cpython-311.pyc
Normal file
Binary file not shown.
31
backend/shared/config.py
Normal file
31
backend/shared/config.py
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
"""
|
||||||
|
Shared configuration for all Weibo-HotSign backend services.
|
||||||
|
Loads settings from environment variables using pydantic-settings.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from pydantic_settings import BaseSettings
|
||||||
|
|
||||||
|
|
||||||
|
class SharedSettings(BaseSettings):
|
||||||
|
"""Shared settings across all backend services."""
|
||||||
|
|
||||||
|
# Database
|
||||||
|
DATABASE_URL: str = "mysql+aiomysql://root:password@localhost/weibo_hotsign"
|
||||||
|
|
||||||
|
# Redis
|
||||||
|
REDIS_URL: str = "redis://localhost:6379/0"
|
||||||
|
|
||||||
|
# JWT
|
||||||
|
JWT_SECRET_KEY: str = "change-me-in-production"
|
||||||
|
JWT_ALGORITHM: str = "HS256"
|
||||||
|
JWT_EXPIRATION_HOURS: int = 24
|
||||||
|
|
||||||
|
# Cookie encryption
|
||||||
|
COOKIE_ENCRYPTION_KEY: str = "change-me-in-production"
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
case_sensitive = True
|
||||||
|
env_file = ".env"
|
||||||
|
|
||||||
|
|
||||||
|
shared_settings = SharedSettings()
|
||||||
44
backend/shared/crypto.py
Normal file
44
backend/shared/crypto.py
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
"""
|
||||||
|
AES-256-GCM Cookie encryption / decryption utilities.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import base64
|
||||||
|
import hashlib
|
||||||
|
|
||||||
|
from Crypto.Cipher import AES
|
||||||
|
|
||||||
|
|
||||||
|
def derive_key(raw_key: str) -> bytes:
|
||||||
|
"""Derive a 32-byte key from an arbitrary string using SHA-256."""
|
||||||
|
return hashlib.sha256(raw_key.encode("utf-8")).digest()
|
||||||
|
|
||||||
|
|
||||||
|
def encrypt_cookie(plaintext: str, key: bytes) -> tuple[str, str]:
|
||||||
|
"""
|
||||||
|
Encrypt a cookie string with AES-256-GCM.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
(ciphertext_b64, iv_b64) — both base64-encoded strings.
|
||||||
|
"""
|
||||||
|
cipher = AES.new(key, AES.MODE_GCM)
|
||||||
|
ciphertext, tag = cipher.encrypt_and_digest(plaintext.encode("utf-8"))
|
||||||
|
# Append the 16-byte tag to the ciphertext so decryption can verify it
|
||||||
|
ciphertext_with_tag = ciphertext + tag
|
||||||
|
ciphertext_b64 = base64.b64encode(ciphertext_with_tag).decode("utf-8")
|
||||||
|
iv_b64 = base64.b64encode(cipher.nonce).decode("utf-8")
|
||||||
|
return ciphertext_b64, iv_b64
|
||||||
|
|
||||||
|
|
||||||
|
def decrypt_cookie(ciphertext_b64: str, iv_b64: str, key: bytes) -> str:
|
||||||
|
"""
|
||||||
|
Decrypt a cookie string previously encrypted with encrypt_cookie.
|
||||||
|
|
||||||
|
Raises ValueError on decryption failure (wrong key, corrupted data, etc.).
|
||||||
|
"""
|
||||||
|
raw = base64.b64decode(ciphertext_b64)
|
||||||
|
nonce = base64.b64decode(iv_b64)
|
||||||
|
# Last 16 bytes are the GCM tag
|
||||||
|
ciphertext, tag = raw[:-16], raw[-16:]
|
||||||
|
cipher = AES.new(key, AES.MODE_GCM, nonce=nonce)
|
||||||
|
plaintext = cipher.decrypt_and_verify(ciphertext, tag)
|
||||||
|
return plaintext.decode("utf-8")
|
||||||
18
backend/shared/models/__init__.py
Normal file
18
backend/shared/models/__init__.py
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
"""Shared ORM models for Weibo-HotSign."""
|
||||||
|
|
||||||
|
from .base import Base, get_db, engine, AsyncSessionLocal
|
||||||
|
from .user import User
|
||||||
|
from .account import Account
|
||||||
|
from .task import Task
|
||||||
|
from .signin_log import SigninLog
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"Base",
|
||||||
|
"get_db",
|
||||||
|
"engine",
|
||||||
|
"AsyncSessionLocal",
|
||||||
|
"User",
|
||||||
|
"Account",
|
||||||
|
"Task",
|
||||||
|
"SigninLog",
|
||||||
|
]
|
||||||
BIN
backend/shared/models/__pycache__/__init__.cpython-311.pyc
Normal file
BIN
backend/shared/models/__pycache__/__init__.cpython-311.pyc
Normal file
Binary file not shown.
BIN
backend/shared/models/__pycache__/account.cpython-311.pyc
Normal file
BIN
backend/shared/models/__pycache__/account.cpython-311.pyc
Normal file
Binary file not shown.
BIN
backend/shared/models/__pycache__/base.cpython-311.pyc
Normal file
BIN
backend/shared/models/__pycache__/base.cpython-311.pyc
Normal file
Binary file not shown.
BIN
backend/shared/models/__pycache__/signin_log.cpython-311.pyc
Normal file
BIN
backend/shared/models/__pycache__/signin_log.cpython-311.pyc
Normal file
Binary file not shown.
BIN
backend/shared/models/__pycache__/task.cpython-311.pyc
Normal file
BIN
backend/shared/models/__pycache__/task.cpython-311.pyc
Normal file
Binary file not shown.
BIN
backend/shared/models/__pycache__/user.cpython-311.pyc
Normal file
BIN
backend/shared/models/__pycache__/user.cpython-311.pyc
Normal file
Binary file not shown.
30
backend/shared/models/account.py
Normal file
30
backend/shared/models/account.py
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
"""Account ORM model."""
|
||||||
|
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
from sqlalchemy import Column, DateTime, ForeignKey, String, Text
|
||||||
|
from sqlalchemy.orm import relationship
|
||||||
|
from sqlalchemy.sql import func
|
||||||
|
|
||||||
|
from .base import Base
|
||||||
|
|
||||||
|
|
||||||
|
class Account(Base):
|
||||||
|
__tablename__ = "accounts"
|
||||||
|
|
||||||
|
id = Column(String(36), primary_key=True, default=lambda: str(uuid.uuid4()))
|
||||||
|
user_id = Column(String(36), ForeignKey("users.id", ondelete="CASCADE"), nullable=False)
|
||||||
|
weibo_user_id = Column(String(20), nullable=False)
|
||||||
|
remark = Column(String(100))
|
||||||
|
encrypted_cookies = Column(Text, nullable=False)
|
||||||
|
iv = Column(String(32), nullable=False)
|
||||||
|
status = Column(String(20), default="pending")
|
||||||
|
last_checked_at = Column(DateTime, nullable=True)
|
||||||
|
created_at = Column(DateTime, server_default=func.now())
|
||||||
|
|
||||||
|
user = relationship("User", back_populates="accounts")
|
||||||
|
tasks = relationship("Task", back_populates="account", cascade="all, delete-orphan")
|
||||||
|
signin_logs = relationship("SigninLog", back_populates="account")
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return f"<Account(id={self.id}, weibo_user_id='{self.weibo_user_id}')>"
|
||||||
33
backend/shared/models/base.py
Normal file
33
backend/shared/models/base.py
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
"""
|
||||||
|
Database engine, session factory, and declarative base.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import AsyncGenerator
|
||||||
|
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine
|
||||||
|
from sqlalchemy.orm import sessionmaker, declarative_base
|
||||||
|
|
||||||
|
from ..config import shared_settings
|
||||||
|
|
||||||
|
_engine_kwargs: dict = {"echo": False}
|
||||||
|
if "sqlite" not in shared_settings.DATABASE_URL:
|
||||||
|
_engine_kwargs.update(pool_size=20, max_overflow=30, pool_pre_ping=True)
|
||||||
|
|
||||||
|
engine = create_async_engine(shared_settings.DATABASE_URL, **_engine_kwargs)
|
||||||
|
|
||||||
|
AsyncSessionLocal = sessionmaker(
|
||||||
|
engine,
|
||||||
|
class_=AsyncSession,
|
||||||
|
expire_on_commit=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
Base = declarative_base()
|
||||||
|
|
||||||
|
|
||||||
|
async def get_db() -> AsyncGenerator[AsyncSession, None]:
|
||||||
|
"""Dependency that yields an async database session."""
|
||||||
|
async with AsyncSessionLocal() as session:
|
||||||
|
try:
|
||||||
|
yield session
|
||||||
|
finally:
|
||||||
|
await session.close()
|
||||||
23
backend/shared/models/signin_log.py
Normal file
23
backend/shared/models/signin_log.py
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
"""SigninLog ORM model."""
|
||||||
|
|
||||||
|
from sqlalchemy import Integer, Column, DateTime, ForeignKey, JSON, String, Text
|
||||||
|
from sqlalchemy.orm import relationship
|
||||||
|
from sqlalchemy.sql import func
|
||||||
|
|
||||||
|
from .base import Base
|
||||||
|
|
||||||
|
|
||||||
|
class SigninLog(Base):
|
||||||
|
__tablename__ = "signin_logs"
|
||||||
|
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||||
|
account_id = Column(String(36), ForeignKey("accounts.id"), nullable=False)
|
||||||
|
topic_title = Column(String(100))
|
||||||
|
status = Column(String(20), nullable=False)
|
||||||
|
reward_info = Column(JSON, nullable=True)
|
||||||
|
error_message = Column(Text, nullable=True)
|
||||||
|
signed_at = Column(DateTime, server_default=func.now())
|
||||||
|
|
||||||
|
account = relationship("Account", back_populates="signin_logs")
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return f"<SigninLog(id={self.id}, status='{self.status}')>"
|
||||||
24
backend/shared/models/task.py
Normal file
24
backend/shared/models/task.py
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
"""Task ORM model."""
|
||||||
|
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
from sqlalchemy import Boolean, Column, DateTime, ForeignKey, String
|
||||||
|
from sqlalchemy.orm import relationship
|
||||||
|
from sqlalchemy.sql import func
|
||||||
|
|
||||||
|
from .base import Base
|
||||||
|
|
||||||
|
|
||||||
|
class Task(Base):
|
||||||
|
__tablename__ = "tasks"
|
||||||
|
|
||||||
|
id = Column(String(36), primary_key=True, default=lambda: str(uuid.uuid4()))
|
||||||
|
account_id = Column(String(36), ForeignKey("accounts.id", ondelete="CASCADE"), nullable=False)
|
||||||
|
cron_expression = Column(String(50), nullable=False)
|
||||||
|
is_enabled = Column(Boolean, default=True)
|
||||||
|
created_at = Column(DateTime, server_default=func.now())
|
||||||
|
|
||||||
|
account = relationship("Account", back_populates="tasks")
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return f"<Task(id={self.id}, cron='{self.cron_expression}')>"
|
||||||
25
backend/shared/models/user.py
Normal file
25
backend/shared/models/user.py
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
"""User ORM model."""
|
||||||
|
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
from sqlalchemy import Boolean, Column, DateTime, String
|
||||||
|
from sqlalchemy.orm import relationship
|
||||||
|
from sqlalchemy.sql import func
|
||||||
|
|
||||||
|
from .base import Base
|
||||||
|
|
||||||
|
|
||||||
|
class User(Base):
|
||||||
|
__tablename__ = "users"
|
||||||
|
|
||||||
|
id = Column(String(36), primary_key=True, default=lambda: str(uuid.uuid4()))
|
||||||
|
username = Column(String(50), unique=True, nullable=False, index=True)
|
||||||
|
email = Column(String(255), unique=True, nullable=False, index=True)
|
||||||
|
hashed_password = Column(String(255), nullable=False)
|
||||||
|
created_at = Column(DateTime, server_default=func.now())
|
||||||
|
is_active = Column(Boolean, default=True)
|
||||||
|
|
||||||
|
accounts = relationship("Account", back_populates="user", cascade="all, delete-orphan")
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return f"<User(id={self.id}, username='{self.username}')>"
|
||||||
35
backend/shared/response.py
Normal file
35
backend/shared/response.py
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
"""
|
||||||
|
Unified API response format utilities.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Any, List, Optional
|
||||||
|
|
||||||
|
from fastapi.responses import JSONResponse
|
||||||
|
|
||||||
|
|
||||||
|
def success_response(data: Any = None, message: str = "Operation successful") -> dict:
|
||||||
|
"""Return a standardised success payload."""
|
||||||
|
return {
|
||||||
|
"success": True,
|
||||||
|
"data": data,
|
||||||
|
"message": message,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def error_response(
|
||||||
|
message: str,
|
||||||
|
code: str,
|
||||||
|
details: Optional[List[dict]] = None,
|
||||||
|
status_code: int = 400,
|
||||||
|
) -> JSONResponse:
|
||||||
|
"""Return a standardised error JSONResponse."""
|
||||||
|
body: dict = {
|
||||||
|
"success": False,
|
||||||
|
"data": None,
|
||||||
|
"message": message,
|
||||||
|
"error": {
|
||||||
|
"code": code,
|
||||||
|
"details": details or [],
|
||||||
|
},
|
||||||
|
}
|
||||||
|
return JSONResponse(status_code=status_code, content=body)
|
||||||
34
backend/signin_executor/Dockerfile
Normal file
34
backend/signin_executor/Dockerfile
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
# Weibo-HotSign Sign-in Executor Service Dockerfile
|
||||||
|
FROM python:3.11-slim
|
||||||
|
|
||||||
|
# Set working directory
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Install system dependencies
|
||||||
|
RUN apt-get update && apt-get install -y \
|
||||||
|
gcc \
|
||||||
|
default-libmysqlclient-dev \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Copy requirements first for better caching
|
||||||
|
COPY requirements.txt .
|
||||||
|
|
||||||
|
# Install Python dependencies
|
||||||
|
RUN pip install --no-cache-dir -r requirements.txt
|
||||||
|
|
||||||
|
# Copy application code
|
||||||
|
COPY app/ ./app/
|
||||||
|
|
||||||
|
# Create non-root user for security
|
||||||
|
RUN groupadd -r appuser && useradd -r -g appuser appuser
|
||||||
|
USER appuser
|
||||||
|
|
||||||
|
# Expose port
|
||||||
|
EXPOSE 8000
|
||||||
|
|
||||||
|
# Health check
|
||||||
|
HEALTHCHECK --interval=30s --timeout=30s --start-period=5s --retries=3 \
|
||||||
|
CMD curl -f http://localhost:8000/health || exit 1
|
||||||
|
|
||||||
|
# Start application
|
||||||
|
CMD ["python", "-m", "app.main"]
|
||||||
56
backend/signin_executor/app/config.py
Normal file
56
backend/signin_executor/app/config.py
Normal file
@@ -0,0 +1,56 @@
|
|||||||
|
"""
|
||||||
|
Configuration for Sign-in Executor Service
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
from pydantic_settings import BaseSettings
|
||||||
|
|
||||||
|
class Settings(BaseSettings):
|
||||||
|
"""Sign-in Executor settings"""
|
||||||
|
|
||||||
|
# Server settings
|
||||||
|
HOST: str = os.getenv("HOST", "0.0.0.0")
|
||||||
|
PORT: int = int(os.getenv("PORT", 8000))
|
||||||
|
DEBUG: bool = os.getenv("DEBUG", "False").lower() == "true"
|
||||||
|
|
||||||
|
# Database settings
|
||||||
|
DATABASE_URL: str = os.getenv(
|
||||||
|
"DATABASE_URL",
|
||||||
|
"mysql+aiomysql://weibo:123456789@118.195.133.163/weibo"
|
||||||
|
)
|
||||||
|
REDIS_URL: str = os.getenv("REDIS_URL", "redis://redis:6379")
|
||||||
|
|
||||||
|
# External service URLs
|
||||||
|
PROXY_POOL_URL: str = os.getenv("PROXY_POOL_URL", "http://proxy-pool:8080")
|
||||||
|
BROWSER_AUTOMATION_URL: str = os.getenv("BROWSER_AUTOMATION_URL", "http://browser-automation:3001")
|
||||||
|
TASK_SCHEDULER_URL: str = os.getenv("TASK_SCHEDULER_URL", "http://task-scheduler:8000")
|
||||||
|
|
||||||
|
# Weibo API settings
|
||||||
|
WEIBO_LOGIN_URL: str = "https://weibo.com/login.php"
|
||||||
|
WEIBO_SUPER_TOPIC_URL: str = "https://weibo.com/p/aj/general/button"
|
||||||
|
|
||||||
|
# Anti-bot protection settings
|
||||||
|
RANDOM_DELAY_MIN: float = float(os.getenv("RANDOM_DELAY_MIN", "1.0"))
|
||||||
|
RANDOM_DELAY_MAX: float = float(os.getenv("RANDOM_DELAY_MAX", "3.0"))
|
||||||
|
USER_AGENT_ROTATION: bool = os.getenv("USER_AGENT_ROTATION", "True").lower() == "true"
|
||||||
|
|
||||||
|
# Cookie and session settings
|
||||||
|
COOKIE_ENCRYPTION_KEY: str = os.getenv("COOKIE_ENCRYPTION_KEY", "your-cookie-encryption-key")
|
||||||
|
SESSION_TIMEOUT_MINUTES: int = int(os.getenv("SESSION_TIMEOUT_MINUTES", "30"))
|
||||||
|
|
||||||
|
# Browser automation settings
|
||||||
|
BROWSER_HEADLESS: bool = os.getenv("BROWSER_HEADLESS", "True").lower() == "true"
|
||||||
|
BROWSER_TIMEOUT_SECONDS: int = int(os.getenv("BROWSER_TIMEOUT_SECONDS", "30"))
|
||||||
|
|
||||||
|
# Task execution settings
|
||||||
|
MAX_CONCURRENT_SIGNIN: int = int(os.getenv("MAX_CONCURRENT_SIGNIN", "5"))
|
||||||
|
TASK_TIMEOUT_SECONDS: int = int(os.getenv("TASK_TIMEOUT_SECONDS", "300"))
|
||||||
|
|
||||||
|
# Logging
|
||||||
|
LOG_LEVEL: str = os.getenv("LOG_LEVEL", "INFO")
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
case_sensitive = True
|
||||||
|
env_file = ".env"
|
||||||
|
|
||||||
|
settings = Settings()
|
||||||
226
backend/signin_executor/app/main.py
Normal file
226
backend/signin_executor/app/main.py
Normal file
@@ -0,0 +1,226 @@
|
|||||||
|
"""
|
||||||
|
Weibo-HotSign Sign-in Executor Service
|
||||||
|
Core service that executes sign-in tasks and handles Weibo interactions
|
||||||
|
"""
|
||||||
|
|
||||||
|
from fastapi import FastAPI, BackgroundTasks, HTTPException, status, Depends, Request
|
||||||
|
from fastapi.middleware.cors import CORSMiddleware
|
||||||
|
from fastapi.responses import JSONResponse
|
||||||
|
import uvicorn
|
||||||
|
import asyncio
|
||||||
|
import httpx
|
||||||
|
import logging
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Dict, Any, Optional
|
||||||
|
import os
|
||||||
|
|
||||||
|
from app.config import settings
|
||||||
|
from app.services.signin_service import SignInService
|
||||||
|
from app.services.weibo_client import WeiboClient
|
||||||
|
from app.models.signin_models import SignInRequest, SignInResult, TaskStatus
|
||||||
|
|
||||||
|
# Initialize FastAPI app
|
||||||
|
app = FastAPI(
|
||||||
|
title="Weibo-HotSign Sign-in Executor",
|
||||||
|
description="Core service for executing Weibo super topic sign-in tasks",
|
||||||
|
version="1.0.0",
|
||||||
|
docs_url="/docs",
|
||||||
|
redoc_url="/redoc"
|
||||||
|
)
|
||||||
|
|
||||||
|
# CORS middleware
|
||||||
|
app.add_middleware(
|
||||||
|
CORSMiddleware,
|
||||||
|
allow_origins=["*"], # In production, specify actual origins
|
||||||
|
allow_credentials=True,
|
||||||
|
allow_methods=["*"],
|
||||||
|
allow_headers=["*"],
|
||||||
|
)
|
||||||
|
|
||||||
|
# Initialize services
|
||||||
|
signin_service = SignInService()
|
||||||
|
weibo_client = WeiboClient()
|
||||||
|
|
||||||
|
@app.on_event("startup")
|
||||||
|
async def startup_event():
|
||||||
|
"""Initialize executor service on startup"""
|
||||||
|
print("🚀 Weibo-HotSign Sign-in Executor starting up...")
|
||||||
|
print(f"📡 Service Documentation: http://{settings.HOST}:{settings.PORT}/docs")
|
||||||
|
print("🔧 Ready to process sign-in tasks...")
|
||||||
|
|
||||||
|
@app.on_event("shutdown")
|
||||||
|
async def shutdown_event():
|
||||||
|
"""Cleanup on shutdown"""
|
||||||
|
print("👋 Weibo-HotSign Sign-in Executor shutting down...")
|
||||||
|
|
||||||
|
@app.get("/")
|
||||||
|
async def root():
|
||||||
|
return {
|
||||||
|
"service": "Weibo-HotSign Sign-in Executor",
|
||||||
|
"status": "running",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"description": "Core sign-in execution service for Weibo super topics",
|
||||||
|
"capabilities": [
|
||||||
|
"Weibo login and verification",
|
||||||
|
"Super topic sign-in automation",
|
||||||
|
"Anti-bot protection handling",
|
||||||
|
"Proxy integration",
|
||||||
|
"Browser fingerprint simulation"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
@app.get("/health")
|
||||||
|
async def health_check():
|
||||||
|
"""Health check endpoint"""
|
||||||
|
return {
|
||||||
|
"status": "healthy",
|
||||||
|
"service": "signin-executor",
|
||||||
|
"timestamp": datetime.now().isoformat(),
|
||||||
|
"dependencies": {
|
||||||
|
"database": "connected",
|
||||||
|
"redis": "connected",
|
||||||
|
"proxy_pool": f"{settings.PROXY_POOL_URL}",
|
||||||
|
"browser_automation": f"{settings.BROWSER_AUTOMATION_URL}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@app.post("/api/v1/signin/execute", response_model=SignInResult)
|
||||||
|
async def execute_signin_task(
|
||||||
|
signin_request: SignInRequest,
|
||||||
|
background_tasks: BackgroundTasks
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Execute sign-in task for specified account
|
||||||
|
This endpoint is called by the task scheduler
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
logger.info(f"🎯 Received sign-in request for account: {signin_request.account_id}")
|
||||||
|
|
||||||
|
# Execute sign-in in background to avoid timeout
|
||||||
|
background_tasks.add_task(
|
||||||
|
signin_service.execute_signin_task,
|
||||||
|
signin_request.account_id,
|
||||||
|
signin_request.task_id
|
||||||
|
)
|
||||||
|
|
||||||
|
# Return immediate response
|
||||||
|
return SignInResult(
|
||||||
|
task_id=signin_request.task_id,
|
||||||
|
account_id=signin_request.account_id,
|
||||||
|
status="accepted",
|
||||||
|
message="Sign-in task accepted and queued for execution",
|
||||||
|
started_at=datetime.now(),
|
||||||
|
estimated_completion=None
|
||||||
|
)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"❌ Failed to accept sign-in task: {e}")
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
detail=f"Failed to accept sign-in task: {str(e)}"
|
||||||
|
)
|
||||||
|
|
||||||
|
@app.get("/api/v1/signin/status/{task_id}", response_model=TaskStatus)
|
||||||
|
async def get_task_status(task_id: str):
|
||||||
|
"""Get status of a sign-in task"""
|
||||||
|
try:
|
||||||
|
status_info = await signin_service.get_task_status(task_id)
|
||||||
|
if not status_info:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_404_NOT_FOUND,
|
||||||
|
detail=f"Task {task_id} not found"
|
||||||
|
)
|
||||||
|
return status_info
|
||||||
|
except HTTPException:
|
||||||
|
raise
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"❌ Error getting task status: {e}")
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
detail="Internal server error"
|
||||||
|
)
|
||||||
|
|
||||||
|
@app.post("/api/v1/signin/test")
|
||||||
|
async def test_signin_capability():
|
||||||
|
"""Test sign-in service capabilities (for debugging)"""
|
||||||
|
try:
|
||||||
|
# Test basic service connectivity
|
||||||
|
tests = {
|
||||||
|
"weibo_connectivity": await _test_weibo_connectivity(),
|
||||||
|
"proxy_pool_access": await _test_proxy_pool(),
|
||||||
|
"browser_automation": await _test_browser_automation(),
|
||||||
|
"database_connection": await _test_database_connection()
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
"test_timestamp": datetime.now().isoformat(),
|
||||||
|
"tests": tests,
|
||||||
|
"overall_status": "operational" if all(tests.values()) else "degraded"
|
||||||
|
}
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"❌ Capability test failed: {e}")
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
detail=f"Capability test failed: {str(e)}"
|
||||||
|
)
|
||||||
|
|
||||||
|
async def _test_weibo_connectivity() -> bool:
|
||||||
|
"""Test connectivity to Weibo"""
|
||||||
|
try:
|
||||||
|
async with httpx.AsyncClient(timeout=10.0) as client:
|
||||||
|
response = await client.get("https://weibo.com", follow_redirects=True)
|
||||||
|
return response.status_code == 200
|
||||||
|
except:
|
||||||
|
return False
|
||||||
|
|
||||||
|
async def _test_proxy_pool() -> bool:
|
||||||
|
"""Test proxy pool service availability"""
|
||||||
|
try:
|
||||||
|
async with httpx.AsyncClient(timeout=5.0) as client:
|
||||||
|
response = await client.get(f"{settings.PROXY_POOL_URL}/health", timeout=5.0)
|
||||||
|
return response.status_code == 200
|
||||||
|
except:
|
||||||
|
return False
|
||||||
|
|
||||||
|
async def _test_browser_automation() -> bool:
|
||||||
|
"""Test browser automation service availability"""
|
||||||
|
try:
|
||||||
|
async with httpx.AsyncClient(timeout=5.0) as client:
|
||||||
|
response = await client.get(f"{settings.BROWSER_AUTOMATION_URL}/health", timeout=5.0)
|
||||||
|
return response.status_code == 200
|
||||||
|
except:
|
||||||
|
return False
|
||||||
|
|
||||||
|
async def _test_database_connection() -> bool:
|
||||||
|
"""Test database connectivity"""
|
||||||
|
try:
|
||||||
|
# Simple database ping test
|
||||||
|
return True # Simplified for demo
|
||||||
|
except:
|
||||||
|
return False
|
||||||
|
|
||||||
|
@app.exception_handler(HTTPException)
|
||||||
|
async def http_exception_handler(request: Request, exc: HTTPException):
|
||||||
|
"""Global HTTP exception handler"""
|
||||||
|
return JSONResponse(
|
||||||
|
status_code=exc.status_code,
|
||||||
|
content={
|
||||||
|
"success": False,
|
||||||
|
"data": None,
|
||||||
|
"message": exc.detail,
|
||||||
|
"error": {
|
||||||
|
"code": f"HTTP_{exc.status_code}",
|
||||||
|
"details": []
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
host = os.getenv("HOST", settings.HOST)
|
||||||
|
port = int(os.getenv("PORT", settings.PORT))
|
||||||
|
uvicorn.run(
|
||||||
|
app,
|
||||||
|
host=host,
|
||||||
|
port=port,
|
||||||
|
log_level="info" if not settings.DEBUG else "debug"
|
||||||
|
)
|
||||||
89
backend/signin_executor/app/models/signin_models.py
Normal file
89
backend/signin_executor/app/models/signin_models.py
Normal file
@@ -0,0 +1,89 @@
|
|||||||
|
"""
|
||||||
|
Data models for Sign-in Executor Service
|
||||||
|
"""
|
||||||
|
|
||||||
|
from pydantic import BaseModel, Field
|
||||||
|
from typing import Optional, Dict, Any, List
|
||||||
|
from datetime import datetime
|
||||||
|
from uuid import UUID
|
||||||
|
|
||||||
|
class SignInRequest(BaseModel):
|
||||||
|
"""Request model for sign-in task execution"""
|
||||||
|
task_id: str = Field(..., description="Unique task identifier")
|
||||||
|
account_id: str = Field(..., description="Weibo account identifier")
|
||||||
|
timestamp: Optional[datetime] = Field(default_factory=datetime.now, description="Request timestamp")
|
||||||
|
requested_by: Optional[str] = Field(default="task_scheduler", description="Request source")
|
||||||
|
|
||||||
|
class SignInResult(BaseModel):
|
||||||
|
"""Result model for sign-in task execution"""
|
||||||
|
task_id: str = Field(..., description="Task identifier")
|
||||||
|
account_id: str = Field(..., description="Account identifier")
|
||||||
|
status: str = Field(..., description="Task status: accepted, running, success, failed")
|
||||||
|
message: str = Field(..., description="Human readable result message")
|
||||||
|
started_at: datetime = Field(..., description="Task start timestamp")
|
||||||
|
completed_at: Optional[datetime] = Field(None, description="Task completion timestamp")
|
||||||
|
estimated_completion: Optional[datetime] = Field(None, description="Estimated completion time")
|
||||||
|
reward_info: Optional[Dict[str, Any]] = Field(None, description="Reward details like exp, credits")
|
||||||
|
error_message: Optional[str] = Field(None, description="Error details if failed")
|
||||||
|
signed_topics: Optional[List[str]] = Field(None, description="List of successfully signed topics")
|
||||||
|
total_topics: Optional[int] = Field(None, description="Total number of topics attempted")
|
||||||
|
|
||||||
|
class TaskStatus(BaseModel):
|
||||||
|
"""Status model for tracking sign-in task progress"""
|
||||||
|
task_id: str = Field(..., description="Task identifier")
|
||||||
|
account_id: str = Field(..., description="Account identifier")
|
||||||
|
status: str = Field(..., description="Current status: pending, running, success, failed")
|
||||||
|
progress_percentage: int = Field(default=0, ge=0, le=100, description="Progress percentage")
|
||||||
|
current_step: Optional[str] = Field(None, description="Current execution step")
|
||||||
|
steps_completed: List[str] = Field(default_factory=list, description="Completed steps")
|
||||||
|
steps_remaining: List[str] = Field(default_factory=list, description="Remaining steps")
|
||||||
|
started_at: datetime = Field(..., description="Start timestamp")
|
||||||
|
updated_at: datetime = Field(default_factory=datetime.now, description="Last update timestamp")
|
||||||
|
estimated_completion: Optional[datetime] = Field(None, description="Estimated completion")
|
||||||
|
|
||||||
|
class WeiboAccount(BaseModel):
|
||||||
|
"""Weibo account information for sign-in"""
|
||||||
|
id: UUID = Field(..., description="Account UUID")
|
||||||
|
user_id: UUID = Field(..., description="Owner user UUID")
|
||||||
|
weibo_user_id: str = Field(..., description="Weibo user ID")
|
||||||
|
remark: Optional[str] = Field(None, description="User remark")
|
||||||
|
encrypted_cookies: str = Field(..., description="Encrypted Weibo cookies")
|
||||||
|
iv: str = Field(..., description="Encryption initialization vector")
|
||||||
|
status: str = Field(default="active", description="Account status: active, invalid_cookie, banned")
|
||||||
|
last_checked_at: Optional[datetime] = Field(None, description="Last validation timestamp")
|
||||||
|
|
||||||
|
class SignInLog(BaseModel):
|
||||||
|
"""Sign-in operation log entry"""
|
||||||
|
id: Optional[int] = Field(None, description="Log entry ID")
|
||||||
|
account_id: UUID = Field(..., description="Account UUID")
|
||||||
|
topic_title: Optional[str] = Field(None, description="Signed topic title")
|
||||||
|
status: str = Field(..., description="Sign-in status")
|
||||||
|
reward_info: Optional[Dict[str, Any]] = Field(None, description="Reward information")
|
||||||
|
error_message: Optional[str] = Field(None, description="Error details")
|
||||||
|
signed_at: datetime = Field(default_factory=datetime.now, description="Sign-in timestamp")
|
||||||
|
execution_time_ms: Optional[int] = Field(None, description="Execution time in milliseconds")
|
||||||
|
|
||||||
|
class WeiboSuperTopic(BaseModel):
|
||||||
|
"""Weibo super topic information"""
|
||||||
|
id: str = Field(..., description="Topic ID")
|
||||||
|
title: str = Field(..., description="Topic title")
|
||||||
|
url: str = Field(..., description="Topic URL")
|
||||||
|
is_signed: bool = Field(default=False, description="Whether already signed")
|
||||||
|
sign_url: Optional[str] = Field(None, description="Sign-in API URL")
|
||||||
|
reward_exp: Optional[int] = Field(None, description="Experience points reward")
|
||||||
|
reward_credit: Optional[int] = Field(None, description="Credit points reward")
|
||||||
|
|
||||||
|
class AntiBotConfig(BaseModel):
|
||||||
|
"""Anti-bot protection configuration"""
|
||||||
|
random_delay_min: float = Field(default=1.0, description="Minimum random delay seconds")
|
||||||
|
random_delay_max: float = Field(default=3.0, description="Maximum random delay seconds")
|
||||||
|
user_agent_rotation: bool = Field(default=True, description="Enable user agent rotation")
|
||||||
|
proxy_enabled: bool = Field(default=True, description="Enable proxy usage")
|
||||||
|
fingerprint_simulation: bool = Field(default=True, description="Enable browser fingerprint simulation")
|
||||||
|
|
||||||
|
class BrowserAutomationRequest(BaseModel):
|
||||||
|
"""Request for browser automation service"""
|
||||||
|
target_url: str = Field(..., description="Target URL to automate")
|
||||||
|
action_type: str = Field(..., description="Action type: signin, extract, click")
|
||||||
|
context_data: Optional[Dict[str, Any]] = Field(None, description="Additional context data")
|
||||||
|
timeout_seconds: int = Field(default=30, description="Operation timeout")
|
||||||
271
backend/signin_executor/app/services/signin_service.py
Normal file
271
backend/signin_executor/app/services/signin_service.py
Normal file
@@ -0,0 +1,271 @@
|
|||||||
|
"""
|
||||||
|
Core sign-in business logic service
|
||||||
|
Handles Weibo super topic sign-in operations
|
||||||
|
"""
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import httpx
|
||||||
|
import logging
|
||||||
|
import random
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
from typing import Dict, Any, List, Optional
|
||||||
|
from uuid import UUID
|
||||||
|
|
||||||
|
from app.config import settings
|
||||||
|
from app.models.signin_models import SignInRequest, SignInResult, TaskStatus, WeiboAccount, WeiboSuperTopic, AntiBotConfig
|
||||||
|
from app.services.weibo_client import WeiboClient
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
class SignInService:
|
||||||
|
"""Main service for handling sign-in operations"""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.weibo_client = WeiboClient()
|
||||||
|
self.active_tasks: Dict[str, TaskStatus] = {}
|
||||||
|
self.antibot_config = AntiBotConfig(
|
||||||
|
random_delay_min=settings.RANDOM_DELAY_MIN,
|
||||||
|
random_delay_max=settings.RANDOM_DELAY_MAX,
|
||||||
|
user_agent_rotation=settings.USER_AGENT_ROTATION,
|
||||||
|
proxy_enabled=True,
|
||||||
|
fingerprint_simulation=True
|
||||||
|
)
|
||||||
|
|
||||||
|
async def execute_signin_task(self, account_id: str, task_id: str):
|
||||||
|
"""
|
||||||
|
Execute complete sign-in workflow for an account
|
||||||
|
This is the main business logic method
|
||||||
|
"""
|
||||||
|
logger.info(f"🎯 Starting sign-in execution for account {account_id}, task {task_id}")
|
||||||
|
|
||||||
|
# Initialize task status
|
||||||
|
task_status = TaskStatus(
|
||||||
|
task_id=task_id,
|
||||||
|
account_id=account_id,
|
||||||
|
status="running",
|
||||||
|
progress_percentage=0,
|
||||||
|
current_step="initializing",
|
||||||
|
steps_completed=[],
|
||||||
|
steps_remaining=[
|
||||||
|
"validate_account",
|
||||||
|
"setup_session",
|
||||||
|
"get_super_topics",
|
||||||
|
"execute_signin",
|
||||||
|
"record_results"
|
||||||
|
],
|
||||||
|
started_at=datetime.now()
|
||||||
|
)
|
||||||
|
self.active_tasks[task_id] = task_status
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Step 1: Validate account
|
||||||
|
task_status.current_step = "validate_account"
|
||||||
|
await self._update_task_progress(task_id, 10)
|
||||||
|
|
||||||
|
account = await self._get_account_info(account_id)
|
||||||
|
if not account or account.status != "active":
|
||||||
|
raise Exception(f"Account {account_id} not found or inactive")
|
||||||
|
|
||||||
|
task_status.steps_completed.append("validate_account")
|
||||||
|
task_status.steps_remaining.remove("validate_account")
|
||||||
|
task_status.progress_percentage = 20
|
||||||
|
|
||||||
|
# Step 2: Setup session with proxy and fingerprint
|
||||||
|
task_status.current_step = "setup_session"
|
||||||
|
await self._apply_anti_bot_protection()
|
||||||
|
|
||||||
|
task_status.steps_completed.append("setup_session")
|
||||||
|
task_status.steps_remaining.remove("setup_session")
|
||||||
|
task_status.progress_percentage = 30
|
||||||
|
|
||||||
|
# Step 3: Get super topics list
|
||||||
|
task_status.current_step = "get_super_topics"
|
||||||
|
await self._update_task_progress(task_id, 40)
|
||||||
|
|
||||||
|
super_topics = await self._get_super_topics_list(account)
|
||||||
|
if not super_topics:
|
||||||
|
logger.warning(f"No super topics found for account {account_id}")
|
||||||
|
|
||||||
|
task_status.steps_completed.append("get_super_topics")
|
||||||
|
task_status.steps_remaining.remove("get_super_topics")
|
||||||
|
task_status.progress_percentage = 50
|
||||||
|
|
||||||
|
# Step 4: Execute signin for each topic
|
||||||
|
task_status.current_step = "execute_signin"
|
||||||
|
signin_results = await self._execute_topic_signin(account, super_topics, task_id)
|
||||||
|
|
||||||
|
task_status.steps_completed.append("execute_signin")
|
||||||
|
task_status.steps_remaining.remove("execute_signin")
|
||||||
|
task_status.progress_percentage = 80
|
||||||
|
|
||||||
|
# Step 5: Record results
|
||||||
|
task_status.current_step = "record_results"
|
||||||
|
await self._update_task_progress(task_id, 90)
|
||||||
|
|
||||||
|
result = SignInResult(
|
||||||
|
task_id=task_id,
|
||||||
|
account_id=account_id,
|
||||||
|
status="success",
|
||||||
|
message=f"Successfully processed {len(signin_results['signed'])} topics",
|
||||||
|
started_at=task_status.started_at,
|
||||||
|
completed_at=datetime.now(),
|
||||||
|
signed_topics=signin_results['signed'],
|
||||||
|
total_topics=len(super_topics) if super_topics else 0,
|
||||||
|
reward_info={
|
||||||
|
"topics_signed": len(signin_results['signed']),
|
||||||
|
"topics_already_signed": len(signin_results['already_signed']),
|
||||||
|
"errors": len(signin_results['errors'])
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
task_status.status = "success"
|
||||||
|
task_status.progress_percentage = 100
|
||||||
|
task_status.current_step = "completed"
|
||||||
|
|
||||||
|
logger.info(f"✅ Sign-in task {task_id} completed successfully")
|
||||||
|
return result
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"❌ Sign-in task {task_id} failed: {e}")
|
||||||
|
|
||||||
|
# Update task status to failed
|
||||||
|
if task_id in self.active_tasks:
|
||||||
|
task_status = self.active_tasks[task_id]
|
||||||
|
task_status.status = "failed"
|
||||||
|
task_status.error_message = str(e)
|
||||||
|
|
||||||
|
# Return failed result
|
||||||
|
return SignInResult(
|
||||||
|
task_id=task_id,
|
||||||
|
account_id=account_id,
|
||||||
|
status="failed",
|
||||||
|
message=f"Sign-in failed: {str(e)}",
|
||||||
|
started_at=task_status.started_at if task_id in self.active_tasks else datetime.now(),
|
||||||
|
completed_at=datetime.now(),
|
||||||
|
error_message=str(e)
|
||||||
|
)
|
||||||
|
|
||||||
|
async def get_task_status(self, task_id: str) -> Optional[TaskStatus]:
|
||||||
|
"""Get current status of a sign-in task"""
|
||||||
|
return self.active_tasks.get(task_id)
|
||||||
|
|
||||||
|
async def _update_task_progress(self, task_id: str, percentage: int):
|
||||||
|
"""Update task progress percentage"""
|
||||||
|
if task_id in self.active_tasks:
|
||||||
|
self.active_tasks[task_id].progress_percentage = percentage
|
||||||
|
self.active_tasks[task_id].updated_at = datetime.now()
|
||||||
|
|
||||||
|
async def _get_account_info(self, account_id: str) -> Optional[WeiboAccount]:
|
||||||
|
"""Get Weibo account information from database"""
|
||||||
|
try:
|
||||||
|
# Mock implementation - in real system, query database
|
||||||
|
# For demo, return mock account
|
||||||
|
return WeiboAccount(
|
||||||
|
id=UUID(account_id),
|
||||||
|
user_id=UUID("12345678-1234-5678-9012-123456789012"),
|
||||||
|
weibo_user_id="1234567890",
|
||||||
|
remark="Demo Account",
|
||||||
|
encrypted_cookies="mock_encrypted_cookies",
|
||||||
|
iv="mock_iv_16_bytes",
|
||||||
|
status="active",
|
||||||
|
last_checked_at=datetime.now() - timedelta(hours=1)
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error fetching account {account_id}: {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
async def _apply_anti_bot_protection(self):
|
||||||
|
"""Apply anti-bot protection measures"""
|
||||||
|
# Random delay to mimic human behavior
|
||||||
|
delay = random.uniform(
|
||||||
|
self.antibot_config.random_delay_min,
|
||||||
|
self.antibot_config.random_delay_max
|
||||||
|
)
|
||||||
|
logger.debug(f"Applying random delay: {delay:.2f}s")
|
||||||
|
await asyncio.sleep(delay)
|
||||||
|
|
||||||
|
# Additional anti-bot measures would go here:
|
||||||
|
# - User agent rotation
|
||||||
|
# - Proxy selection
|
||||||
|
# - Browser fingerprint simulation
|
||||||
|
# - Request header randomization
|
||||||
|
|
||||||
|
async def _get_super_topics_list(self, account: WeiboAccount) -> List[WeiboSuperTopic]:
|
||||||
|
"""Get list of super topics for account"""
|
||||||
|
try:
|
||||||
|
# Mock implementation - in real system, fetch from Weibo API
|
||||||
|
# Simulate API call delay
|
||||||
|
await asyncio.sleep(1)
|
||||||
|
|
||||||
|
# Return mock super topics
|
||||||
|
return [
|
||||||
|
WeiboSuperTopic(
|
||||||
|
id="topic_001",
|
||||||
|
title="Python编程",
|
||||||
|
url="https://weibo.com/p/100808xxx",
|
||||||
|
is_signed=False,
|
||||||
|
sign_url="https://weibo.com/p/aj/general/button",
|
||||||
|
reward_exp=2,
|
||||||
|
reward_credit=1
|
||||||
|
),
|
||||||
|
WeiboSuperTopic(
|
||||||
|
id="topic_002",
|
||||||
|
title="人工智能",
|
||||||
|
url="https://weibo.com/p/100808yyy",
|
||||||
|
is_signed=False,
|
||||||
|
sign_url="https://weibo.com/p/aj/general/button",
|
||||||
|
reward_exp=2,
|
||||||
|
reward_credit=1
|
||||||
|
),
|
||||||
|
WeiboSuperTopic(
|
||||||
|
id="topic_003",
|
||||||
|
title="机器学习",
|
||||||
|
url="https://weibo.com/p/100808zzz",
|
||||||
|
is_signed=True, # Already signed
|
||||||
|
sign_url="https://weibo.com/p/aj/general/button",
|
||||||
|
reward_exp=2,
|
||||||
|
reward_credit=1
|
||||||
|
)
|
||||||
|
]
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error fetching super topics: {e}")
|
||||||
|
return []
|
||||||
|
|
||||||
|
async def _execute_topic_signin(self, account: WeiboAccount, topics: List[WeiboSuperTopic], task_id: str) -> Dict[str, List[str]]:
|
||||||
|
"""Execute sign-in for each super topic"""
|
||||||
|
signed = []
|
||||||
|
already_signed = []
|
||||||
|
errors = []
|
||||||
|
|
||||||
|
for topic in topics:
|
||||||
|
try:
|
||||||
|
# Add small delay between requests
|
||||||
|
await asyncio.sleep(random.uniform(0.5, 1.5))
|
||||||
|
|
||||||
|
if topic.is_signed:
|
||||||
|
already_signed.append(topic.title)
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Execute signin for this topic
|
||||||
|
success = await self.weibo_client.sign_super_topic(
|
||||||
|
account=account,
|
||||||
|
topic=topic,
|
||||||
|
task_id=task_id
|
||||||
|
)
|
||||||
|
|
||||||
|
if success:
|
||||||
|
signed.append(topic.title)
|
||||||
|
logger.info(f"✅ Successfully signed topic: {topic.title}")
|
||||||
|
else:
|
||||||
|
errors.append(f"Failed to sign topic: {topic.title}")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
error_msg = f"Error signing topic {topic.title}: {str(e)}"
|
||||||
|
logger.error(error_msg)
|
||||||
|
errors.append(error_msg)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"signed": signed,
|
||||||
|
"already_signed": already_signed,
|
||||||
|
"errors": errors
|
||||||
|
}
|
||||||
167
backend/signin_executor/app/services/weibo_client.py
Normal file
167
backend/signin_executor/app/services/weibo_client.py
Normal file
@@ -0,0 +1,167 @@
|
|||||||
|
"""
|
||||||
|
Weibo API Client
|
||||||
|
Handles all interactions with Weibo.com, including login, sign-in, and data fetching
|
||||||
|
"""
|
||||||
|
|
||||||
|
import httpx
|
||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
|
import random
|
||||||
|
from typing import Dict, Any, Optional, List
|
||||||
|
|
||||||
|
from app.config import settings
|
||||||
|
from app.models.signin_models import WeiboAccount, WeiboSuperTopic
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
class WeiboClient:
|
||||||
|
"""Client for interacting with Weibo API"""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.base_headers = {
|
||||||
|
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36",
|
||||||
|
"Accept": "application/json, text/plain, */*",
|
||||||
|
"Accept-Language": "en-US,en;q=0.9,zh-CN;q=0.8,zh;q=0.7",
|
||||||
|
"Connection": "keep-alive",
|
||||||
|
"Referer": "https://weibo.com/"
|
||||||
|
}
|
||||||
|
|
||||||
|
async def verify_cookies(self, account: WeiboAccount) -> bool:
|
||||||
|
"""Verify if Weibo cookies are still valid"""
|
||||||
|
try:
|
||||||
|
# Decrypt cookies
|
||||||
|
cookies = self._decrypt_cookies(account.encrypted_cookies, account.iv)
|
||||||
|
|
||||||
|
async with httpx.AsyncClient(cookies=cookies, headers=self.base_headers) as client:
|
||||||
|
response = await client.get("https://weibo.com/mygroups", follow_redirects=True)
|
||||||
|
|
||||||
|
if response.status_code == 200 and "我的首页" in response.text:
|
||||||
|
logger.info(f"Cookies for account {account.weibo_user_id} are valid")
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
logger.warning(f"Cookies for account {account.weibo_user_id} are invalid")
|
||||||
|
return False
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error verifying cookies: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
async def get_super_topics(self, account: WeiboAccount) -> List[WeiboSuperTopic]:
|
||||||
|
"""Get list of super topics for an account"""
|
||||||
|
try:
|
||||||
|
# Mock implementation - in real system, this would involve complex API calls
|
||||||
|
# Simulate API call delay
|
||||||
|
await asyncio.sleep(random.uniform(1.0, 2.0))
|
||||||
|
|
||||||
|
# Return mock data
|
||||||
|
return [
|
||||||
|
WeiboSuperTopic(id="topic_001", title="Python编程", url="...", is_signed=False),
|
||||||
|
WeiboSuperTopic(id="topic_002", title="人工智能", url="...", is_signed=False),
|
||||||
|
WeiboSuperTopic(id="topic_003", title="机器学习", url="...", is_signed=True)
|
||||||
|
]
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error fetching super topics: {e}")
|
||||||
|
return []
|
||||||
|
|
||||||
|
async def sign_super_topic(self, account: WeiboAccount, topic: WeiboSuperTopic, task_id: str) -> bool:
|
||||||
|
"""
|
||||||
|
Execute sign-in for a single super topic
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
# Decrypt cookies
|
||||||
|
cookies = self._decrypt_cookies(account.encrypted_cookies, account.iv)
|
||||||
|
|
||||||
|
# Prepare request payload
|
||||||
|
payload = {
|
||||||
|
"ajwvr": "6",
|
||||||
|
"api": "http://i.huati.weibo.com/aj/super/checkin",
|
||||||
|
"id": topic.id,
|
||||||
|
"location": "page_100808_super_index",
|
||||||
|
"refer_flag": "100808_-_1",
|
||||||
|
"refer_lflag": "100808_-_1",
|
||||||
|
"ua": self.base_headers["User-Agent"],
|
||||||
|
"is_new": "1",
|
||||||
|
"is_from_ad": "0",
|
||||||
|
"ext": "mi_898_1_0_0"
|
||||||
|
}
|
||||||
|
|
||||||
|
# In a real scenario, we might need to call browser automation service
|
||||||
|
# to get signed parameters or handle JS challenges
|
||||||
|
|
||||||
|
# Simulate API call
|
||||||
|
await asyncio.sleep(random.uniform(0.5, 1.5))
|
||||||
|
|
||||||
|
# Mock response - assume success
|
||||||
|
response_data = {
|
||||||
|
"code": "100000",
|
||||||
|
"msg": "签到成功",
|
||||||
|
"data": {
|
||||||
|
"tip": "签到成功",
|
||||||
|
"alert_title": "签到成功",
|
||||||
|
"alert_subtitle": "恭喜你成为今天第12345位签到的人",
|
||||||
|
"reward": {"exp": 2, "credit": 1}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if response_data.get("code") == "100000":
|
||||||
|
logger.info(f"Successfully signed topic: {topic.title}")
|
||||||
|
return True
|
||||||
|
elif response_data.get("code") == "382004":
|
||||||
|
logger.info(f"Topic {topic.title} already signed today")
|
||||||
|
return True # Treat as success
|
||||||
|
else:
|
||||||
|
logger.error(f"Failed to sign topic {topic.title}: {response_data.get('msg')}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Exception signing topic {topic.title}: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
def _decrypt_cookies(self, encrypted_cookies: str, iv: str) -> Dict[str, str]:
|
||||||
|
"""
|
||||||
|
Decrypt cookies using AES-256-GCM
|
||||||
|
In a real system, this would use a proper crypto library
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
# Mock implementation - return dummy cookies
|
||||||
|
return {
|
||||||
|
"SUB": "_2A25z...",
|
||||||
|
"SUBP": "0033Wr...",
|
||||||
|
"ALF": "16...",
|
||||||
|
"SSOLoginState": "16...",
|
||||||
|
"SCF": "...",
|
||||||
|
"UN": "testuser"
|
||||||
|
}
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to decrypt cookies: {e}")
|
||||||
|
return {}
|
||||||
|
|
||||||
|
async def get_proxy(self) -> Optional[Dict[str, str]]:
|
||||||
|
"""Get a proxy from the proxy pool service"""
|
||||||
|
try:
|
||||||
|
async with httpx.AsyncClient(timeout=5.0) as client:
|
||||||
|
response = await client.get(f"{settings.PROXY_POOL_URL}/get")
|
||||||
|
if response.status_code == 200:
|
||||||
|
proxy_info = response.json()
|
||||||
|
return {
|
||||||
|
"http://": f"http://{proxy_info['proxy']}",
|
||||||
|
"https://": f"https://{proxy_info['proxy']}"
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to get proxy: {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
async def get_browser_fingerprint(self) -> Dict[str, Any]:
|
||||||
|
"""Get a browser fingerprint from the generator service"""
|
||||||
|
try:
|
||||||
|
# Mock implementation
|
||||||
|
return {
|
||||||
|
"user_agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36",
|
||||||
|
"screen_resolution": "1920x1080",
|
||||||
|
"timezone": "Asia/Shanghai",
|
||||||
|
"plugins": ["PDF Viewer", "Chrome PDF Viewer", "Native Client"]
|
||||||
|
}
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to get browser fingerprint: {e}")
|
||||||
|
return {}
|
||||||
23
backend/signin_executor/requirements.txt
Normal file
23
backend/signin_executor/requirements.txt
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
# Weibo-HotSign Sign-in Executor Service Requirements
|
||||||
|
# Web Framework
|
||||||
|
fastapi==0.104.1
|
||||||
|
uvicorn[standard]==0.24.0
|
||||||
|
|
||||||
|
# Database
|
||||||
|
sqlalchemy==2.0.23
|
||||||
|
aiomysql==0.2.0
|
||||||
|
PyMySQL==1.1.0
|
||||||
|
redis==5.0.1
|
||||||
|
|
||||||
|
# Configuration
|
||||||
|
pydantic-settings==2.0.3
|
||||||
|
pydantic==2.5.0
|
||||||
|
|
||||||
|
# HTTP Client
|
||||||
|
httpx==0.25.2
|
||||||
|
|
||||||
|
# Utilities
|
||||||
|
python-dotenv==1.0.0
|
||||||
|
|
||||||
|
# Security (for cookie decryption)
|
||||||
|
pycryptodome==3.19.0
|
||||||
30
backend/task_scheduler/Dockerfile
Normal file
30
backend/task_scheduler/Dockerfile
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
# Weibo-HotSign Task Scheduler Service Dockerfile
|
||||||
|
FROM python:3.11-slim
|
||||||
|
|
||||||
|
# Set working directory
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Install system dependencies
|
||||||
|
RUN apt-get update && apt-get install -y \
|
||||||
|
gcc \
|
||||||
|
default-libmysqlclient-dev \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Copy requirements first for better caching
|
||||||
|
COPY requirements.txt .
|
||||||
|
|
||||||
|
# Install Python dependencies
|
||||||
|
RUN pip install --no-cache-dir -r requirements.txt
|
||||||
|
|
||||||
|
# Copy application code
|
||||||
|
COPY app/ ./app/
|
||||||
|
|
||||||
|
# Create non-root user for security
|
||||||
|
RUN groupadd -r appuser && useradd -r -g appuser appuser
|
||||||
|
USER appuser
|
||||||
|
|
||||||
|
# Expose port (optional, as scheduler doesn't need external access)
|
||||||
|
# EXPOSE 8000
|
||||||
|
|
||||||
|
# Start Celery Beat scheduler
|
||||||
|
CMD ["celery", "-A", "app.celery_app", "beat", "--loglevel=info"]
|
||||||
97
backend/task_scheduler/app/celery_app.py
Normal file
97
backend/task_scheduler/app/celery_app.py
Normal file
@@ -0,0 +1,97 @@
|
|||||||
|
"""
|
||||||
|
Weibo-HotSign Task Scheduler Service
|
||||||
|
Celery Beat configuration for scheduled sign-in tasks
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
from celery import Celery
|
||||||
|
from celery.schedules import crontab
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine
|
||||||
|
from sqlalchemy.orm import sessionmaker
|
||||||
|
from sqlalchemy import select
|
||||||
|
import asyncio
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
from ..config import settings
|
||||||
|
|
||||||
|
# Create Celery app
|
||||||
|
celery_app = Celery(
|
||||||
|
"weibo_hot_sign_scheduler",
|
||||||
|
broker=settings.CELERY_BROKER_URL,
|
||||||
|
backend=settings.CELERY_RESULT_BACKEND,
|
||||||
|
include=["app.tasks.signin_tasks"]
|
||||||
|
)
|
||||||
|
|
||||||
|
# Celery configuration
|
||||||
|
celery_app.conf.update(
|
||||||
|
task_serializer="json",
|
||||||
|
accept_content=["json"],
|
||||||
|
result_serializer="json",
|
||||||
|
timezone="Asia/Shanghai",
|
||||||
|
enable_utc=True,
|
||||||
|
beat_schedule_filename="celerybeat-schedule",
|
||||||
|
beat_max_loop_interval=5,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Database configuration for task scheduler
|
||||||
|
engine = create_async_engine(
|
||||||
|
settings.DATABASE_URL,
|
||||||
|
echo=settings.DEBUG,
|
||||||
|
pool_size=10,
|
||||||
|
max_overflow=20
|
||||||
|
)
|
||||||
|
|
||||||
|
AsyncSessionLocal = sessionmaker(
|
||||||
|
engine,
|
||||||
|
class_=AsyncSession,
|
||||||
|
expire_on_commit=False
|
||||||
|
)
|
||||||
|
|
||||||
|
async def get_db():
|
||||||
|
"""Get database session for task scheduler"""
|
||||||
|
async with AsyncSessionLocal() as session:
|
||||||
|
try:
|
||||||
|
yield session
|
||||||
|
finally:
|
||||||
|
await session.close()
|
||||||
|
|
||||||
|
class TaskSchedulerService:
|
||||||
|
"""Service to manage scheduled tasks from database"""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.engine = engine
|
||||||
|
|
||||||
|
async def load_scheduled_tasks(self):
|
||||||
|
"""Load enabled tasks from database and schedule them"""
|
||||||
|
from app.models.task_models import Task
|
||||||
|
|
||||||
|
try:
|
||||||
|
async with AsyncSessionLocal() as session:
|
||||||
|
# Query all enabled tasks
|
||||||
|
stmt = select(Task).where(Task.is_enabled == True)
|
||||||
|
result = await session.execute(stmt)
|
||||||
|
tasks = result.scalars().all()
|
||||||
|
|
||||||
|
print(f"📅 Loaded {len(tasks)} enabled tasks from database")
|
||||||
|
|
||||||
|
# Here we would dynamically add tasks to Celery Beat
|
||||||
|
# For now, we'll use static configuration in celery_config.py
|
||||||
|
return tasks
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f"❌ Error loading tasks from database: {e}")
|
||||||
|
return []
|
||||||
|
|
||||||
|
# Synchronous wrapper for async function
|
||||||
|
def sync_load_tasks():
|
||||||
|
"""Synchronous wrapper to load tasks"""
|
||||||
|
service = TaskSchedulerService()
|
||||||
|
loop = asyncio.new_event_loop()
|
||||||
|
asyncio.set_event_loop(loop)
|
||||||
|
try:
|
||||||
|
return loop.run_until_complete(service.load_scheduled_tasks())
|
||||||
|
finally:
|
||||||
|
loop.close()
|
||||||
|
|
||||||
|
# Import task modules to register them
|
||||||
|
from app.tasks import signin_tasks
|
||||||
47
backend/task_scheduler/app/config.py
Normal file
47
backend/task_scheduler/app/config.py
Normal file
@@ -0,0 +1,47 @@
|
|||||||
|
"""
|
||||||
|
Configuration for Task Scheduler Service
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
from pydantic_settings import BaseSettings
|
||||||
|
from typing import List
|
||||||
|
|
||||||
|
class Settings(BaseSettings):
|
||||||
|
"""Task Scheduler settings"""
|
||||||
|
|
||||||
|
# Database settings
|
||||||
|
DATABASE_URL: str = os.getenv(
|
||||||
|
"DATABASE_URL",
|
||||||
|
"mysql+aiomysql://weibo:123456789@43.134.68.207/weibo"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Celery settings
|
||||||
|
CELERY_BROKER_URL: str = os.getenv("CELERY_BROKER_URL", "redis://redis:6379/0")
|
||||||
|
CELERY_RESULT_BACKEND: str = os.getenv("CELERY_RESULT_BACKEND", "redis://redis:6379/0")
|
||||||
|
|
||||||
|
# Task execution settings
|
||||||
|
MAX_CONCURRENT_TASKS: int = int(os.getenv("MAX_CONCURRENT_TASKS", "10"))
|
||||||
|
TASK_TIMEOUT_SECONDS: int = int(os.getenv("TASK_TIMEOUT_SECONDS", "300"))
|
||||||
|
|
||||||
|
# Scheduler settings
|
||||||
|
SCHEDULER_TIMEZONE: str = os.getenv("SCHEDULER_TIMEZONE", "Asia/Shanghai")
|
||||||
|
BEAT_SCHEDULE_FILE: str = os.getenv("BEAT_SCHEDULE_FILE", "/tmp/celerybeat-schedule")
|
||||||
|
|
||||||
|
# Retry settings
|
||||||
|
MAX_RETRY_ATTEMPTS: int = int(os.getenv("MAX_RETRY_ATTEMPTS", "3"))
|
||||||
|
RETRY_DELAY_SECONDS: int = int(os.getenv("RETRY_DELAY_SECONDS", "60"))
|
||||||
|
|
||||||
|
# Logging
|
||||||
|
LOG_LEVEL: str = os.getenv("LOG_LEVEL", "INFO")
|
||||||
|
DEBUG: bool = os.getenv("DEBUG", "False").lower() == "true"
|
||||||
|
|
||||||
|
# Service URLs
|
||||||
|
SIGNIN_EXECUTOR_URL: str = os.getenv("SIGNIN_EXECUTOR_URL", "http://signin-executor:8000")
|
||||||
|
PROXY_POOL_URL: str = os.getenv("PROXY_POOL_URL", "http://proxy-pool:8080")
|
||||||
|
BROWSER_AUTOMATION_URL: str = os.getenv("BROWSER_AUTOMATION_URL", "http://browser-automation:3001")
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
case_sensitive = True
|
||||||
|
env_file = ".env"
|
||||||
|
|
||||||
|
settings = Settings()
|
||||||
196
backend/task_scheduler/app/tasks/signin_tasks.py
Normal file
196
backend/task_scheduler/app/tasks/signin_tasks.py
Normal file
@@ -0,0 +1,196 @@
|
|||||||
|
"""
|
||||||
|
Weibo-HotSign Sign-in Task Definitions
|
||||||
|
Celery tasks for scheduled sign-in operations
|
||||||
|
"""
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import httpx
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Dict, Any, Optional
|
||||||
|
|
||||||
|
from celery import current_task
|
||||||
|
from ..celery_app import celery_app
|
||||||
|
from ..config import settings
|
||||||
|
|
||||||
|
# Configure logger
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
@celery_app.task(bind=True, max_retries=3, default_retry_delay=60)
|
||||||
|
def execute_signin_task(self, task_id: str, account_id: str, cron_expression: str):
|
||||||
|
"""
|
||||||
|
Execute scheduled sign-in task for a specific account
|
||||||
|
This task is triggered by Celery Beat based on cron schedule
|
||||||
|
"""
|
||||||
|
logger.info(f"🎯 Starting sign-in task {task_id} for account {account_id}")
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Update task status
|
||||||
|
current_task.update_state(
|
||||||
|
state="PROGRESS",
|
||||||
|
meta={
|
||||||
|
"current": 10,
|
||||||
|
"total": 100,
|
||||||
|
"status": "Initializing sign-in process...",
|
||||||
|
"account_id": account_id
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
# Call signin executor service
|
||||||
|
result = _call_signin_executor(account_id, task_id)
|
||||||
|
|
||||||
|
# Update task status
|
||||||
|
current_task.update_state(
|
||||||
|
state="SUCCESS",
|
||||||
|
meta={
|
||||||
|
"current": 100,
|
||||||
|
"total": 100,
|
||||||
|
"status": "Sign-in completed successfully",
|
||||||
|
"result": result,
|
||||||
|
"account_id": account_id
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.info(f"✅ Sign-in task {task_id} completed successfully for account {account_id}")
|
||||||
|
return result
|
||||||
|
|
||||||
|
except Exception as exc:
|
||||||
|
logger.error(f"❌ Sign-in task {task_id} failed for account {account_id}: {exc}")
|
||||||
|
|
||||||
|
# Retry logic
|
||||||
|
if self.request.retries < settings.MAX_RETRY_ATTEMPTS:
|
||||||
|
logger.info(f"🔄 Retrying task {task_id} (attempt {self.request.retries + 1})")
|
||||||
|
raise self.retry(exc=exc, countdown=settings.RETRY_DELAY_SECONDS)
|
||||||
|
|
||||||
|
# Final failure
|
||||||
|
current_task.update_state(
|
||||||
|
state="FAILURE",
|
||||||
|
meta={
|
||||||
|
"current": 100,
|
||||||
|
"total": 100,
|
||||||
|
"status": f"Task failed after {settings.MAX_RETRY_ATTEMPTS} attempts",
|
||||||
|
"error": str(exc),
|
||||||
|
"account_id": account_id
|
||||||
|
}
|
||||||
|
)
|
||||||
|
raise exc
|
||||||
|
|
||||||
|
@celery_app.task
|
||||||
|
def schedule_daily_signin():
|
||||||
|
"""
|
||||||
|
Daily sign-in task - example of scheduled task
|
||||||
|
Can be configured in Celery Beat schedule
|
||||||
|
"""
|
||||||
|
logger.info("📅 Executing daily sign-in schedule")
|
||||||
|
|
||||||
|
# This would typically query database for accounts that need daily sign-in
|
||||||
|
# For demo purposes, we'll simulate processing multiple accounts
|
||||||
|
|
||||||
|
accounts = ["account_1", "account_2", "account_3"] # Mock account IDs
|
||||||
|
results = []
|
||||||
|
|
||||||
|
for account_id in accounts:
|
||||||
|
try:
|
||||||
|
# Submit individual sign-in task for each account
|
||||||
|
task = execute_signin_task.delay(
|
||||||
|
task_id=f"daily_{datetime.now().strftime('%Y%m%d_%H%M%S')}",
|
||||||
|
account_id=account_id,
|
||||||
|
cron_expression="0 8 * * *" # Daily at 8 AM
|
||||||
|
)
|
||||||
|
results.append({
|
||||||
|
"account_id": account_id,
|
||||||
|
"task_id": task.id,
|
||||||
|
"status": "submitted"
|
||||||
|
})
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to submit task for account {account_id}: {e}")
|
||||||
|
results.append({
|
||||||
|
"account_id": account_id,
|
||||||
|
"status": "failed",
|
||||||
|
"error": str(e)
|
||||||
|
})
|
||||||
|
|
||||||
|
return {
|
||||||
|
"scheduled_date": datetime.now().isoformat(),
|
||||||
|
"accounts_processed": len(accounts),
|
||||||
|
"results": results
|
||||||
|
}
|
||||||
|
|
||||||
|
@celery_app.task
|
||||||
|
def process_pending_tasks():
|
||||||
|
"""
|
||||||
|
Process pending sign-in tasks from database
|
||||||
|
This can be called manually or via external trigger
|
||||||
|
"""
|
||||||
|
logger.info("🔄 Processing pending sign-in tasks from database")
|
||||||
|
|
||||||
|
# In real implementation, this would:
|
||||||
|
# 1. Query database for tasks that need to be executed
|
||||||
|
# 2. Check if they're due based on cron expressions
|
||||||
|
# 3. Submit them to Celery for execution
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Mock implementation - query enabled tasks
|
||||||
|
result = {
|
||||||
|
"processed_at": datetime.now().isoformat(),
|
||||||
|
"tasks_found": 5, # Mock number
|
||||||
|
"tasks_submitted": 3,
|
||||||
|
"tasks_skipped": 2,
|
||||||
|
"status": "completed"
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(f"✅ Processed pending tasks: {result}")
|
||||||
|
return result
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"❌ Failed to process pending tasks: {e}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
def _call_signin_executor(account_id: str, task_id: str) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Call the signin executor service to perform actual sign-in
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
signin_data = {
|
||||||
|
"task_id": task_id,
|
||||||
|
"account_id": account_id,
|
||||||
|
"timestamp": datetime.now().isoformat(),
|
||||||
|
"requested_by": "task_scheduler"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Call signin executor service
|
||||||
|
with httpx.Client(timeout=30.0) as client:
|
||||||
|
response = client.post(
|
||||||
|
f"{settings.SIGNIN_EXECUTOR_URL}/api/v1/signin/execute",
|
||||||
|
json=signin_data
|
||||||
|
)
|
||||||
|
|
||||||
|
if response.status_code == 200:
|
||||||
|
result = response.json()
|
||||||
|
logger.info(f"Sign-in executor response: {result}")
|
||||||
|
return result
|
||||||
|
else:
|
||||||
|
raise Exception(f"Sign-in executor returned error: {response.status_code} - {response.text}")
|
||||||
|
|
||||||
|
except httpx.RequestError as e:
|
||||||
|
logger.error(f"Network error calling signin executor: {e}")
|
||||||
|
raise Exception(f"Failed to connect to signin executor: {e}")
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error calling signin executor: {e}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
# Periodic task definitions for Celery Beat
|
||||||
|
celery_app.conf.beat_schedule = {
|
||||||
|
"daily-signin-at-8am": {
|
||||||
|
"task": "app.tasks.signin_tasks.schedule_daily_signin",
|
||||||
|
"schedule": {
|
||||||
|
"hour": 8,
|
||||||
|
"minute": 0,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"process-pending-every-15-minutes": {
|
||||||
|
"task": "app.tasks.signin_tasks.process_pending_tasks",
|
||||||
|
"schedule": 900.0, # Every 15 minutes
|
||||||
|
},
|
||||||
|
}
|
||||||
18
backend/task_scheduler/requirements.txt
Normal file
18
backend/task_scheduler/requirements.txt
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
# Weibo-HotSign Task Scheduler Service Requirements
|
||||||
|
# Task Queue
|
||||||
|
celery==5.3.6
|
||||||
|
redis==5.0.1
|
||||||
|
|
||||||
|
# Database
|
||||||
|
sqlalchemy==2.0.23
|
||||||
|
aiomysql==0.2.0
|
||||||
|
PyMySQL==1.1.0
|
||||||
|
|
||||||
|
# Configuration
|
||||||
|
pydantic-settings==2.0.3
|
||||||
|
|
||||||
|
# HTTP Client
|
||||||
|
httpx==0.25.2
|
||||||
|
|
||||||
|
# Utilities
|
||||||
|
python-dotenv==1.0.0
|
||||||
0
backend/tests/__init__.py
Normal file
0
backend/tests/__init__.py
Normal file
BIN
backend/tests/__pycache__/__init__.cpython-311.pyc
Normal file
BIN
backend/tests/__pycache__/__init__.cpython-311.pyc
Normal file
Binary file not shown.
BIN
backend/tests/__pycache__/conftest.cpython-311-pytest-8.3.3.pyc
Normal file
BIN
backend/tests/__pycache__/conftest.cpython-311-pytest-8.3.3.pyc
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
86
backend/tests/conftest.py
Normal file
86
backend/tests/conftest.py
Normal file
@@ -0,0 +1,86 @@
|
|||||||
|
"""
|
||||||
|
Shared test fixtures for Weibo-HotSign backend tests.
|
||||||
|
|
||||||
|
Uses SQLite in-memory for database tests and a simple dict-based
|
||||||
|
fake Redis for refresh-token tests, so no external services are needed.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
from typing import AsyncGenerator
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
import pytest_asyncio
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine
|
||||||
|
from sqlalchemy.orm import sessionmaker
|
||||||
|
|
||||||
|
# Ensure backend/ is on sys.path so `shared` and `app` imports work
|
||||||
|
sys.path.insert(0, os.path.join(os.path.dirname(__file__), ".."))
|
||||||
|
|
||||||
|
# --------------- override shared settings BEFORE any app import ---------------
|
||||||
|
os.environ["DATABASE_URL"] = "sqlite+aiosqlite://"
|
||||||
|
os.environ["REDIS_URL"] = "redis://localhost:6379/0"
|
||||||
|
os.environ["JWT_SECRET_KEY"] = "test-secret-key"
|
||||||
|
os.environ["COOKIE_ENCRYPTION_KEY"] = "test-cookie-key"
|
||||||
|
|
||||||
|
# Create the test engine BEFORE importing shared.models so we can swap it in
|
||||||
|
TEST_ENGINE = create_async_engine("sqlite+aiosqlite://", echo=False)
|
||||||
|
TestSessionLocal = sessionmaker(TEST_ENGINE, class_=AsyncSession, expire_on_commit=False)
|
||||||
|
|
||||||
|
# Now patch shared.models.base module-level objects before they get used
|
||||||
|
import shared.models.base as _base_mod # noqa: E402
|
||||||
|
|
||||||
|
_base_mod.engine = TEST_ENGINE
|
||||||
|
_base_mod.AsyncSessionLocal = TestSessionLocal
|
||||||
|
|
||||||
|
from shared.models.base import Base # noqa: E402
|
||||||
|
from shared.models import User # noqa: E402
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope="session")
|
||||||
|
def event_loop():
|
||||||
|
"""Create a single event loop for the whole test session."""
|
||||||
|
loop = asyncio.new_event_loop()
|
||||||
|
yield loop
|
||||||
|
loop.close()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest_asyncio.fixture(autouse=True)
|
||||||
|
async def setup_db():
|
||||||
|
"""Create all tables before each test, drop after."""
|
||||||
|
async with TEST_ENGINE.begin() as conn:
|
||||||
|
await conn.run_sync(Base.metadata.create_all)
|
||||||
|
yield
|
||||||
|
async with TEST_ENGINE.begin() as conn:
|
||||||
|
await conn.run_sync(Base.metadata.drop_all)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest_asyncio.fixture
|
||||||
|
async def db_session() -> AsyncGenerator[AsyncSession, None]:
|
||||||
|
"""Yield a fresh async DB session."""
|
||||||
|
async with TestSessionLocal() as session:
|
||||||
|
yield session
|
||||||
|
|
||||||
|
|
||||||
|
# --------------- Fake Redis for refresh-token tests ---------------
|
||||||
|
|
||||||
|
class FakeRedis:
|
||||||
|
"""Minimal async Redis stand-in backed by a plain dict."""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self._store: dict[str, str] = {}
|
||||||
|
|
||||||
|
async def setex(self, key: str, ttl: int, value: str):
|
||||||
|
self._store[key] = value
|
||||||
|
|
||||||
|
async def get(self, key: str):
|
||||||
|
return self._store.get(key)
|
||||||
|
|
||||||
|
async def delete(self, key: str):
|
||||||
|
self._store.pop(key, None)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def fake_redis():
|
||||||
|
return FakeRedis()
|
||||||
214
backend/tests/test_api_accounts.py
Normal file
214
backend/tests/test_api_accounts.py
Normal file
@@ -0,0 +1,214 @@
|
|||||||
|
"""
|
||||||
|
Tests for api_service account CRUD endpoints.
|
||||||
|
Validates tasks 4.1 and 4.2.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
import pytest_asyncio
|
||||||
|
from unittest.mock import patch
|
||||||
|
from httpx import AsyncClient, ASGITransport
|
||||||
|
|
||||||
|
from shared.models import get_db
|
||||||
|
from tests.conftest import TEST_ENGINE, TestSessionLocal, Base, FakeRedis
|
||||||
|
|
||||||
|
|
||||||
|
@pytest_asyncio.fixture
|
||||||
|
async def client():
|
||||||
|
"""
|
||||||
|
Provide an httpx AsyncClient wired to the api_service app,
|
||||||
|
with DB overridden to test SQLite and a fake Redis for auth tokens.
|
||||||
|
"""
|
||||||
|
fake_redis = FakeRedis()
|
||||||
|
|
||||||
|
async with TEST_ENGINE.begin() as conn:
|
||||||
|
await conn.run_sync(Base.metadata.create_all)
|
||||||
|
|
||||||
|
# Import apps after DB is ready
|
||||||
|
from api_service.app.main import app as api_app
|
||||||
|
from auth_service.app.main import app as auth_app
|
||||||
|
|
||||||
|
async def override_get_db():
|
||||||
|
async with TestSessionLocal() as session:
|
||||||
|
yield session
|
||||||
|
|
||||||
|
async def _fake_get_redis():
|
||||||
|
return fake_redis
|
||||||
|
|
||||||
|
api_app.dependency_overrides[get_db] = override_get_db
|
||||||
|
auth_app.dependency_overrides[get_db] = override_get_db
|
||||||
|
|
||||||
|
with patch(
|
||||||
|
"auth_service.app.utils.security.get_redis",
|
||||||
|
new=_fake_get_redis,
|
||||||
|
):
|
||||||
|
# We need both clients: auth for getting tokens, api for account ops
|
||||||
|
async with AsyncClient(
|
||||||
|
transport=ASGITransport(app=auth_app), base_url="http://auth"
|
||||||
|
) as auth_client, AsyncClient(
|
||||||
|
transport=ASGITransport(app=api_app), base_url="http://api"
|
||||||
|
) as api_client:
|
||||||
|
yield auth_client, api_client
|
||||||
|
|
||||||
|
api_app.dependency_overrides.clear()
|
||||||
|
auth_app.dependency_overrides.clear()
|
||||||
|
|
||||||
|
|
||||||
|
async def _register_and_login(auth_client: AsyncClient, suffix: str = "1") -> str:
|
||||||
|
"""Helper: register a user and return an access token."""
|
||||||
|
reg = await auth_client.post("/auth/register", json={
|
||||||
|
"username": f"acctuser{suffix}",
|
||||||
|
"email": f"acct{suffix}@example.com",
|
||||||
|
"password": "Str0ng!Pass1",
|
||||||
|
})
|
||||||
|
assert reg.status_code == 201, f"Register failed: {reg.json()}"
|
||||||
|
resp = await auth_client.post("/auth/login", json={
|
||||||
|
"email": f"acct{suffix}@example.com",
|
||||||
|
"password": "Str0ng!Pass1",
|
||||||
|
})
|
||||||
|
login_body = resp.json()
|
||||||
|
assert resp.status_code == 200, f"Login failed: {login_body}"
|
||||||
|
# Handle both wrapped (success_response) and unwrapped token formats
|
||||||
|
if "data" in login_body:
|
||||||
|
return login_body["data"]["access_token"]
|
||||||
|
return login_body["access_token"]
|
||||||
|
|
||||||
|
|
||||||
|
def _auth_header(token: str) -> dict:
|
||||||
|
return {"Authorization": f"Bearer {token}"}
|
||||||
|
|
||||||
|
|
||||||
|
# ===================== Basic structure tests =====================
|
||||||
|
|
||||||
|
|
||||||
|
class TestAPIServiceBase:
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_health(self, client):
|
||||||
|
_, api = client
|
||||||
|
resp = await api.get("/health")
|
||||||
|
assert resp.status_code == 200
|
||||||
|
assert resp.json()["success"] is True
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_root(self, client):
|
||||||
|
_, api = client
|
||||||
|
resp = await api.get("/")
|
||||||
|
assert resp.status_code == 200
|
||||||
|
assert "API Service" in resp.json()["data"]["service"]
|
||||||
|
|
||||||
|
|
||||||
|
# ===================== Account CRUD tests =====================
|
||||||
|
|
||||||
|
|
||||||
|
class TestAccountCRUD:
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_create_account(self, client):
|
||||||
|
auth, api = client
|
||||||
|
token = await _register_and_login(auth)
|
||||||
|
|
||||||
|
resp = await api.post("/api/v1/accounts", json={
|
||||||
|
"weibo_user_id": "12345",
|
||||||
|
"cookie": "SUB=abc; SUBP=xyz;",
|
||||||
|
"remark": "test account",
|
||||||
|
}, headers=_auth_header(token))
|
||||||
|
|
||||||
|
assert resp.status_code == 201
|
||||||
|
body = resp.json()
|
||||||
|
assert body["success"] is True
|
||||||
|
assert body["data"]["weibo_user_id"] == "12345"
|
||||||
|
assert body["data"]["status"] == "pending"
|
||||||
|
assert body["data"]["remark"] == "test account"
|
||||||
|
# Cookie plaintext must NOT appear in response
|
||||||
|
assert "SUB=abc" not in str(body)
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_list_accounts(self, client):
|
||||||
|
auth, api = client
|
||||||
|
token = await _register_and_login(auth, "list")
|
||||||
|
|
||||||
|
# Create two accounts
|
||||||
|
for i in range(2):
|
||||||
|
await api.post("/api/v1/accounts", json={
|
||||||
|
"weibo_user_id": f"uid{i}",
|
||||||
|
"cookie": f"cookie{i}",
|
||||||
|
}, headers=_auth_header(token))
|
||||||
|
|
||||||
|
resp = await api.get("/api/v1/accounts", headers=_auth_header(token))
|
||||||
|
assert resp.status_code == 200
|
||||||
|
data = resp.json()["data"]
|
||||||
|
assert len(data) == 2
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_get_account_detail(self, client):
|
||||||
|
auth, api = client
|
||||||
|
token = await _register_and_login(auth, "detail")
|
||||||
|
|
||||||
|
create_resp = await api.post("/api/v1/accounts", json={
|
||||||
|
"weibo_user_id": "99",
|
||||||
|
"cookie": "c=1",
|
||||||
|
"remark": "my remark",
|
||||||
|
}, headers=_auth_header(token))
|
||||||
|
account_id = create_resp.json()["data"]["id"]
|
||||||
|
|
||||||
|
resp = await api.get(f"/api/v1/accounts/{account_id}", headers=_auth_header(token))
|
||||||
|
assert resp.status_code == 200
|
||||||
|
assert resp.json()["data"]["remark"] == "my remark"
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_update_account_remark(self, client):
|
||||||
|
auth, api = client
|
||||||
|
token = await _register_and_login(auth, "upd")
|
||||||
|
|
||||||
|
create_resp = await api.post("/api/v1/accounts", json={
|
||||||
|
"weibo_user_id": "55",
|
||||||
|
"cookie": "c=old",
|
||||||
|
}, headers=_auth_header(token))
|
||||||
|
account_id = create_resp.json()["data"]["id"]
|
||||||
|
|
||||||
|
resp = await api.put(f"/api/v1/accounts/{account_id}", json={
|
||||||
|
"remark": "updated remark",
|
||||||
|
}, headers=_auth_header(token))
|
||||||
|
assert resp.status_code == 200
|
||||||
|
assert resp.json()["data"]["remark"] == "updated remark"
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_delete_account(self, client):
|
||||||
|
auth, api = client
|
||||||
|
token = await _register_and_login(auth, "del")
|
||||||
|
|
||||||
|
create_resp = await api.post("/api/v1/accounts", json={
|
||||||
|
"weibo_user_id": "77",
|
||||||
|
"cookie": "c=del",
|
||||||
|
}, headers=_auth_header(token))
|
||||||
|
account_id = create_resp.json()["data"]["id"]
|
||||||
|
|
||||||
|
resp = await api.delete(f"/api/v1/accounts/{account_id}", headers=_auth_header(token))
|
||||||
|
assert resp.status_code == 200
|
||||||
|
|
||||||
|
# Verify it's gone
|
||||||
|
resp2 = await api.get(f"/api/v1/accounts/{account_id}", headers=_auth_header(token))
|
||||||
|
assert resp2.status_code == 404
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_access_other_users_account_forbidden(self, client):
|
||||||
|
auth, api = client
|
||||||
|
token_a = await _register_and_login(auth, "ownerA")
|
||||||
|
token_b = await _register_and_login(auth, "ownerB")
|
||||||
|
|
||||||
|
# User A creates an account
|
||||||
|
create_resp = await api.post("/api/v1/accounts", json={
|
||||||
|
"weibo_user_id": "111",
|
||||||
|
"cookie": "c=a",
|
||||||
|
}, headers=_auth_header(token_a))
|
||||||
|
account_id = create_resp.json()["data"]["id"]
|
||||||
|
|
||||||
|
# User B tries to access it
|
||||||
|
resp = await api.get(f"/api/v1/accounts/{account_id}", headers=_auth_header(token_b))
|
||||||
|
assert resp.status_code == 403
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_unauthenticated_request_rejected(self, client):
|
||||||
|
_, api = client
|
||||||
|
resp = await api.get("/api/v1/accounts")
|
||||||
|
assert resp.status_code in (401, 403)
|
||||||
238
backend/tests/test_api_signin_logs.py
Normal file
238
backend/tests/test_api_signin_logs.py
Normal file
@@ -0,0 +1,238 @@
|
|||||||
|
"""
|
||||||
|
Tests for api_service signin log query endpoints.
|
||||||
|
Validates task 6.1.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
import pytest_asyncio
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
from unittest.mock import patch
|
||||||
|
from httpx import AsyncClient, ASGITransport
|
||||||
|
|
||||||
|
from shared.models import get_db, Account, SigninLog
|
||||||
|
from tests.conftest import TEST_ENGINE, TestSessionLocal, Base, FakeRedis
|
||||||
|
|
||||||
|
|
||||||
|
@pytest_asyncio.fixture
|
||||||
|
async def client():
|
||||||
|
"""
|
||||||
|
Provide an httpx AsyncClient wired to the api_service app,
|
||||||
|
with DB overridden to test SQLite and a fake Redis for auth tokens.
|
||||||
|
"""
|
||||||
|
fake_redis = FakeRedis()
|
||||||
|
|
||||||
|
async with TEST_ENGINE.begin() as conn:
|
||||||
|
await conn.run_sync(Base.metadata.create_all)
|
||||||
|
|
||||||
|
# Import apps after DB is ready
|
||||||
|
from api_service.app.main import app as api_app
|
||||||
|
from auth_service.app.main import app as auth_app
|
||||||
|
|
||||||
|
async def override_get_db():
|
||||||
|
async with TestSessionLocal() as session:
|
||||||
|
yield session
|
||||||
|
|
||||||
|
async def _fake_get_redis():
|
||||||
|
return fake_redis
|
||||||
|
|
||||||
|
api_app.dependency_overrides[get_db] = override_get_db
|
||||||
|
auth_app.dependency_overrides[get_db] = override_get_db
|
||||||
|
|
||||||
|
with patch(
|
||||||
|
"auth_service.app.utils.security.get_redis",
|
||||||
|
new=_fake_get_redis,
|
||||||
|
):
|
||||||
|
async with AsyncClient(
|
||||||
|
transport=ASGITransport(app=auth_app), base_url="http://auth"
|
||||||
|
) as auth_client, AsyncClient(
|
||||||
|
transport=ASGITransport(app=api_app), base_url="http://api"
|
||||||
|
) as api_client:
|
||||||
|
yield auth_client, api_client
|
||||||
|
|
||||||
|
api_app.dependency_overrides.clear()
|
||||||
|
auth_app.dependency_overrides.clear()
|
||||||
|
|
||||||
|
|
||||||
|
async def _register_and_login(auth_client: AsyncClient, suffix: str = "1") -> str:
|
||||||
|
"""Helper: register a user and return an access token."""
|
||||||
|
reg = await auth_client.post("/auth/register", json={
|
||||||
|
"username": f"loguser{suffix}",
|
||||||
|
"email": f"log{suffix}@example.com",
|
||||||
|
"password": "Str0ng!Pass1",
|
||||||
|
})
|
||||||
|
assert reg.status_code == 201
|
||||||
|
resp = await auth_client.post("/auth/login", json={
|
||||||
|
"email": f"log{suffix}@example.com",
|
||||||
|
"password": "Str0ng!Pass1",
|
||||||
|
})
|
||||||
|
login_body = resp.json()
|
||||||
|
assert resp.status_code == 200
|
||||||
|
if "data" in login_body:
|
||||||
|
return login_body["data"]["access_token"]
|
||||||
|
return login_body["access_token"]
|
||||||
|
|
||||||
|
|
||||||
|
def _auth_header(token: str) -> dict:
|
||||||
|
return {"Authorization": f"Bearer {token}"}
|
||||||
|
|
||||||
|
|
||||||
|
async def _create_account(api: AsyncClient, token: str, weibo_id: str) -> str:
|
||||||
|
"""Helper: create an account and return its ID."""
|
||||||
|
resp = await api.post("/api/v1/accounts", json={
|
||||||
|
"weibo_user_id": weibo_id,
|
||||||
|
"cookie": f"cookie_{weibo_id}",
|
||||||
|
}, headers=_auth_header(token))
|
||||||
|
assert resp.status_code == 201
|
||||||
|
return resp.json()["data"]["id"]
|
||||||
|
|
||||||
|
|
||||||
|
async def _create_signin_logs(db, account_id: str, count: int, statuses: list = None):
|
||||||
|
"""Helper: create signin logs for testing."""
|
||||||
|
if statuses is None:
|
||||||
|
statuses = ["success"] * count
|
||||||
|
|
||||||
|
base_time = datetime.utcnow()
|
||||||
|
for i in range(count):
|
||||||
|
log = SigninLog(
|
||||||
|
account_id=account_id,
|
||||||
|
topic_title=f"Topic {i}",
|
||||||
|
status=statuses[i] if i < len(statuses) else "success",
|
||||||
|
signed_at=base_time - timedelta(hours=i), # Descending order
|
||||||
|
)
|
||||||
|
db.add(log)
|
||||||
|
await db.commit()
|
||||||
|
|
||||||
|
|
||||||
|
# ===================== Signin Log Query Tests =====================
|
||||||
|
|
||||||
|
|
||||||
|
class TestSigninLogQuery:
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_get_signin_logs_empty(self, client):
|
||||||
|
"""Test querying logs for an account with no logs."""
|
||||||
|
auth, api = client
|
||||||
|
token = await _register_and_login(auth, "empty")
|
||||||
|
account_id = await _create_account(api, token, "empty_acc")
|
||||||
|
|
||||||
|
resp = await api.get(
|
||||||
|
f"/api/v1/accounts/{account_id}/signin-logs",
|
||||||
|
headers=_auth_header(token)
|
||||||
|
)
|
||||||
|
assert resp.status_code == 200
|
||||||
|
data = resp.json()["data"]
|
||||||
|
assert data["total"] == 0
|
||||||
|
assert len(data["items"]) == 0
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_get_signin_logs_with_data(self, client):
|
||||||
|
"""Test querying logs returns data in descending order."""
|
||||||
|
auth, api = client
|
||||||
|
token = await _register_and_login(auth, "data")
|
||||||
|
account_id = await _create_account(api, token, "data_acc")
|
||||||
|
|
||||||
|
# Create logs directly in DB
|
||||||
|
async with TestSessionLocal() as db:
|
||||||
|
await _create_signin_logs(db, account_id, 5)
|
||||||
|
|
||||||
|
resp = await api.get(
|
||||||
|
f"/api/v1/accounts/{account_id}/signin-logs",
|
||||||
|
headers=_auth_header(token)
|
||||||
|
)
|
||||||
|
assert resp.status_code == 200
|
||||||
|
data = resp.json()["data"]
|
||||||
|
assert data["total"] == 5
|
||||||
|
assert len(data["items"]) == 5
|
||||||
|
|
||||||
|
# Verify descending order by signed_at
|
||||||
|
items = data["items"]
|
||||||
|
for i in range(len(items) - 1):
|
||||||
|
assert items[i]["signed_at"] >= items[i + 1]["signed_at"]
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_signin_logs_pagination(self, client):
|
||||||
|
"""Test pagination works correctly."""
|
||||||
|
auth, api = client
|
||||||
|
token = await _register_and_login(auth, "page")
|
||||||
|
account_id = await _create_account(api, token, "page_acc")
|
||||||
|
|
||||||
|
# Create 10 logs
|
||||||
|
async with TestSessionLocal() as db:
|
||||||
|
await _create_signin_logs(db, account_id, 10)
|
||||||
|
|
||||||
|
# Page 1, size 3
|
||||||
|
resp = await api.get(
|
||||||
|
f"/api/v1/accounts/{account_id}/signin-logs?page=1&size=3",
|
||||||
|
headers=_auth_header(token)
|
||||||
|
)
|
||||||
|
assert resp.status_code == 200
|
||||||
|
data = resp.json()["data"]
|
||||||
|
assert data["total"] == 10
|
||||||
|
assert len(data["items"]) == 3
|
||||||
|
assert data["page"] == 1
|
||||||
|
assert data["size"] == 3
|
||||||
|
assert data["total_pages"] == 4
|
||||||
|
|
||||||
|
# Page 2, size 3
|
||||||
|
resp2 = await api.get(
|
||||||
|
f"/api/v1/accounts/{account_id}/signin-logs?page=2&size=3",
|
||||||
|
headers=_auth_header(token)
|
||||||
|
)
|
||||||
|
data2 = resp2.json()["data"]
|
||||||
|
assert len(data2["items"]) == 3
|
||||||
|
assert data2["page"] == 2
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_signin_logs_status_filter(self, client):
|
||||||
|
"""Test status filtering works correctly."""
|
||||||
|
auth, api = client
|
||||||
|
token = await _register_and_login(auth, "filter")
|
||||||
|
account_id = await _create_account(api, token, "filter_acc")
|
||||||
|
|
||||||
|
# Create logs with different statuses
|
||||||
|
async with TestSessionLocal() as db:
|
||||||
|
statuses = ["success", "success", "failed_network", "success", "failed_already_signed"]
|
||||||
|
await _create_signin_logs(db, account_id, 5, statuses)
|
||||||
|
|
||||||
|
# Filter by success
|
||||||
|
resp = await api.get(
|
||||||
|
f"/api/v1/accounts/{account_id}/signin-logs?status=success",
|
||||||
|
headers=_auth_header(token)
|
||||||
|
)
|
||||||
|
assert resp.status_code == 200
|
||||||
|
data = resp.json()["data"]
|
||||||
|
assert data["total"] == 3
|
||||||
|
assert all(item["status"] == "success" for item in data["items"])
|
||||||
|
|
||||||
|
# Filter by failed_network
|
||||||
|
resp2 = await api.get(
|
||||||
|
f"/api/v1/accounts/{account_id}/signin-logs?status=failed_network",
|
||||||
|
headers=_auth_header(token)
|
||||||
|
)
|
||||||
|
data2 = resp2.json()["data"]
|
||||||
|
assert data2["total"] == 1
|
||||||
|
assert data2["items"][0]["status"] == "failed_network"
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_access_other_users_logs_forbidden(self, client):
|
||||||
|
"""Test that users cannot access other users' signin logs."""
|
||||||
|
auth, api = client
|
||||||
|
token_a = await _register_and_login(auth, "logA")
|
||||||
|
token_b = await _register_and_login(auth, "logB")
|
||||||
|
|
||||||
|
# User A creates an account
|
||||||
|
account_id = await _create_account(api, token_a, "logA_acc")
|
||||||
|
|
||||||
|
# User B tries to access logs
|
||||||
|
resp = await api.get(
|
||||||
|
f"/api/v1/accounts/{account_id}/signin-logs",
|
||||||
|
headers=_auth_header(token_b)
|
||||||
|
)
|
||||||
|
assert resp.status_code == 403
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_unauthenticated_logs_request_rejected(self, client):
|
||||||
|
"""Test that unauthenticated requests are rejected."""
|
||||||
|
_, api = client
|
||||||
|
resp = await api.get("/api/v1/accounts/fake-id/signin-logs")
|
||||||
|
assert resp.status_code in (401, 403)
|
||||||
226
backend/tests/test_api_tasks.py
Normal file
226
backend/tests/test_api_tasks.py
Normal file
@@ -0,0 +1,226 @@
|
|||||||
|
"""
|
||||||
|
Tests for API_Service task management endpoints.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
import pytest_asyncio
|
||||||
|
from httpx import AsyncClient
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
|
|
||||||
|
from shared.models import User, Account, Task
|
||||||
|
from auth_service.app.utils.security import create_access_token
|
||||||
|
from shared.crypto import encrypt_cookie, derive_key
|
||||||
|
|
||||||
|
|
||||||
|
@pytest_asyncio.fixture
|
||||||
|
async def test_user(db_session: AsyncSession) -> User:
|
||||||
|
"""Create a test user."""
|
||||||
|
user = User(
|
||||||
|
username="testuser",
|
||||||
|
email="test@example.com",
|
||||||
|
hashed_password="hashed_password",
|
||||||
|
)
|
||||||
|
db_session.add(user)
|
||||||
|
await db_session.commit()
|
||||||
|
await db_session.refresh(user)
|
||||||
|
return user
|
||||||
|
|
||||||
|
|
||||||
|
@pytest_asyncio.fixture
|
||||||
|
async def test_account(db_session: AsyncSession, test_user: User) -> Account:
|
||||||
|
"""Create a test account."""
|
||||||
|
key = derive_key("test-cookie-key")
|
||||||
|
ciphertext, iv = encrypt_cookie("test_cookie_data", key)
|
||||||
|
|
||||||
|
account = Account(
|
||||||
|
user_id=test_user.id,
|
||||||
|
weibo_user_id="123456",
|
||||||
|
remark="Test Account",
|
||||||
|
encrypted_cookies=ciphertext,
|
||||||
|
iv=iv,
|
||||||
|
status="pending",
|
||||||
|
)
|
||||||
|
db_session.add(account)
|
||||||
|
await db_session.commit()
|
||||||
|
await db_session.refresh(account)
|
||||||
|
return account
|
||||||
|
|
||||||
|
|
||||||
|
@pytest_asyncio.fixture
|
||||||
|
async def auth_headers(test_user: User) -> dict:
|
||||||
|
"""Generate JWT auth headers for test user."""
|
||||||
|
token = create_access_token({"sub": test_user.id})
|
||||||
|
return {"Authorization": f"Bearer {token}"}
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_create_task_valid_cron(
|
||||||
|
db_session: AsyncSession,
|
||||||
|
test_user: User,
|
||||||
|
test_account: Account,
|
||||||
|
auth_headers: dict,
|
||||||
|
):
|
||||||
|
"""Test creating a task with valid cron expression."""
|
||||||
|
from api_service.app.main import app
|
||||||
|
|
||||||
|
async with AsyncClient(app=app, base_url="http://test") as client:
|
||||||
|
response = await client.post(
|
||||||
|
f"/api/v1/accounts/{test_account.id}/tasks",
|
||||||
|
json={"cron_expression": "0 9 * * *"},
|
||||||
|
headers=auth_headers,
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 201
|
||||||
|
data = response.json()
|
||||||
|
assert data["success"] is True
|
||||||
|
assert data["data"]["cron_expression"] == "0 9 * * *"
|
||||||
|
assert data["data"]["is_enabled"] is True
|
||||||
|
assert data["data"]["account_id"] == test_account.id
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_create_task_invalid_cron(
|
||||||
|
db_session: AsyncSession,
|
||||||
|
test_user: User,
|
||||||
|
test_account: Account,
|
||||||
|
auth_headers: dict,
|
||||||
|
):
|
||||||
|
"""Test creating a task with invalid cron expression."""
|
||||||
|
from api_service.app.main import app
|
||||||
|
|
||||||
|
async with AsyncClient(app=app, base_url="http://test") as client:
|
||||||
|
response = await client.post(
|
||||||
|
f"/api/v1/accounts/{test_account.id}/tasks",
|
||||||
|
json={"cron_expression": "invalid cron"},
|
||||||
|
headers=auth_headers,
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 400
|
||||||
|
data = response.json()
|
||||||
|
assert data["success"] is False
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_list_tasks(
|
||||||
|
db_session: AsyncSession,
|
||||||
|
test_user: User,
|
||||||
|
test_account: Account,
|
||||||
|
auth_headers: dict,
|
||||||
|
):
|
||||||
|
"""Test listing tasks for an account."""
|
||||||
|
# Create two tasks
|
||||||
|
task1 = Task(account_id=test_account.id, cron_expression="0 9 * * *", is_enabled=True)
|
||||||
|
task2 = Task(account_id=test_account.id, cron_expression="0 18 * * *", is_enabled=False)
|
||||||
|
db_session.add_all([task1, task2])
|
||||||
|
await db_session.commit()
|
||||||
|
|
||||||
|
from api_service.app.main import app
|
||||||
|
|
||||||
|
async with AsyncClient(app=app, base_url="http://test") as client:
|
||||||
|
response = await client.get(
|
||||||
|
f"/api/v1/accounts/{test_account.id}/tasks",
|
||||||
|
headers=auth_headers,
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert data["success"] is True
|
||||||
|
assert len(data["data"]) == 2
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_update_task(
|
||||||
|
db_session: AsyncSession,
|
||||||
|
test_user: User,
|
||||||
|
test_account: Account,
|
||||||
|
auth_headers: dict,
|
||||||
|
):
|
||||||
|
"""Test updating a task (enable/disable)."""
|
||||||
|
task = Task(account_id=test_account.id, cron_expression="0 9 * * *", is_enabled=True)
|
||||||
|
db_session.add(task)
|
||||||
|
await db_session.commit()
|
||||||
|
await db_session.refresh(task)
|
||||||
|
|
||||||
|
from api_service.app.main import app
|
||||||
|
|
||||||
|
async with AsyncClient(app=app, base_url="http://test") as client:
|
||||||
|
response = await client.put(
|
||||||
|
f"/api/v1/tasks/{task.id}",
|
||||||
|
json={"is_enabled": False},
|
||||||
|
headers=auth_headers,
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert data["success"] is True
|
||||||
|
assert data["data"]["is_enabled"] is False
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_delete_task(
|
||||||
|
db_session: AsyncSession,
|
||||||
|
test_user: User,
|
||||||
|
test_account: Account,
|
||||||
|
auth_headers: dict,
|
||||||
|
):
|
||||||
|
"""Test deleting a task."""
|
||||||
|
task = Task(account_id=test_account.id, cron_expression="0 9 * * *", is_enabled=True)
|
||||||
|
db_session.add(task)
|
||||||
|
await db_session.commit()
|
||||||
|
await db_session.refresh(task)
|
||||||
|
|
||||||
|
from api_service.app.main import app
|
||||||
|
|
||||||
|
async with AsyncClient(app=app, base_url="http://test") as client:
|
||||||
|
response = await client.delete(
|
||||||
|
f"/api/v1/tasks/{task.id}",
|
||||||
|
headers=auth_headers,
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert data["success"] is True
|
||||||
|
|
||||||
|
# Verify task is deleted
|
||||||
|
from sqlalchemy import select
|
||||||
|
result = await db_session.execute(select(Task).where(Task.id == task.id))
|
||||||
|
deleted_task = result.scalar_one_or_none()
|
||||||
|
assert deleted_task is None
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_access_other_user_task_forbidden(
|
||||||
|
db_session: AsyncSession,
|
||||||
|
test_account: Account,
|
||||||
|
):
|
||||||
|
"""Test that users cannot access tasks from other users' accounts."""
|
||||||
|
# Create another user
|
||||||
|
other_user = User(
|
||||||
|
username="otheruser",
|
||||||
|
email="other@example.com",
|
||||||
|
hashed_password="hashed_password",
|
||||||
|
)
|
||||||
|
db_session.add(other_user)
|
||||||
|
await db_session.commit()
|
||||||
|
await db_session.refresh(other_user)
|
||||||
|
|
||||||
|
# Create a task for test_account
|
||||||
|
task = Task(account_id=test_account.id, cron_expression="0 9 * * *", is_enabled=True)
|
||||||
|
db_session.add(task)
|
||||||
|
await db_session.commit()
|
||||||
|
await db_session.refresh(task)
|
||||||
|
|
||||||
|
# Try to access with other_user's token
|
||||||
|
other_token = create_access_token({"sub": other_user.id})
|
||||||
|
other_headers = {"Authorization": f"Bearer {other_token}"}
|
||||||
|
|
||||||
|
from api_service.app.main import app
|
||||||
|
|
||||||
|
async with AsyncClient(app=app, base_url="http://test") as client:
|
||||||
|
response = await client.put(
|
||||||
|
f"/api/v1/tasks/{task.id}",
|
||||||
|
json={"is_enabled": False},
|
||||||
|
headers=other_headers,
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 403
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user