From 754e720ba762ca323d153ddcaaa2d6f85d06a9f6 Mon Sep 17 00:00:00 2001 From: Jeason <1710884619@qq.com> Date: Mon, 9 Mar 2026 14:05:00 +0800 Subject: [PATCH] 123 --- .kiro/specs/multi-user-signin/design.md | 566 ++++++++++++++++++ .kiro/specs/multi-user-signin/requirements.md | 138 +++++ .kiro/specs/multi-user-signin/tasks.md | 185 ++++++ README.md | 187 ++++++ backend/Dockerfile | 91 +++ backend/api_service/__init__.py | 0 .../__pycache__/__init__.cpython-311.pyc | Bin 0 -> 154 bytes backend/api_service/app/__init__.py | 0 .../app/__pycache__/__init__.cpython-311.pyc | Bin 0 -> 158 bytes .../__pycache__/dependencies.cpython-311.pyc | Bin 0 -> 2325 bytes .../app/__pycache__/main.cpython-311.pyc | Bin 0 -> 3373 bytes backend/api_service/app/config.py | 9 + backend/api_service/app/dependencies.py | 50 ++ backend/api_service/app/main.py | 75 +++ backend/api_service/app/routers/__init__.py | 0 .../__pycache__/__init__.cpython-311.pyc | Bin 0 -> 166 bytes .../__pycache__/accounts.cpython-311.pyc | Bin 0 -> 7346 bytes .../__pycache__/signin_logs.cpython-311.pyc | Bin 0 -> 5061 bytes .../routers/__pycache__/tasks.cpython-311.pyc | Bin 0 -> 9599 bytes backend/api_service/app/routers/accounts.py | 139 +++++ .../api_service/app/routers/signin_logs.py | 83 +++ backend/api_service/app/routers/tasks.py | 196 ++++++ backend/api_service/app/schemas/__init__.py | 0 .../__pycache__/__init__.cpython-311.pyc | Bin 0 -> 166 bytes .../__pycache__/account.cpython-311.pyc | Bin 0 -> 2574 bytes .../__pycache__/signin_log.cpython-311.pyc | Bin 0 -> 1808 bytes .../schemas/__pycache__/task.cpython-311.pyc | Bin 0 -> 2058 bytes backend/api_service/app/schemas/account.py | 34 ++ backend/api_service/app/schemas/signin_log.py | 30 + backend/api_service/app/schemas/task.py | 29 + backend/auth_service/Dockerfile | 34 ++ backend/auth_service/__init__.py | 0 .../__pycache__/__init__.cpython-311.pyc | Bin 0 -> 155 bytes backend/auth_service/app/__init__.py | 0 .../app/__pycache__/__init__.cpython-311.pyc | Bin 0 -> 159 bytes .../app/__pycache__/main.cpython-311.pyc | Bin 0 -> 9283 bytes backend/auth_service/app/config.py | 50 ++ backend/auth_service/app/main.py | 223 +++++++ backend/auth_service/app/models/__init__.py | 0 .../__pycache__/__init__.cpython-311.pyc | Bin 0 -> 166 bytes .../__pycache__/database.cpython-311.pyc | Bin 0 -> 1194 bytes backend/auth_service/app/models/database.py | 15 + backend/auth_service/app/schemas/__init__.py | 0 .../__pycache__/__init__.cpython-311.pyc | Bin 0 -> 167 bytes .../schemas/__pycache__/user.cpython-311.pyc | Bin 0 -> 4870 bytes backend/auth_service/app/schemas/user.py | 57 ++ backend/auth_service/app/services/__init__.py | 0 .../__pycache__/__init__.cpython-311.pyc | Bin 0 -> 168 bytes .../__pycache__/auth_service.cpython-311.pyc | Bin 0 -> 11764 bytes .../auth_service/app/services/auth_service.py | 191 ++++++ backend/auth_service/app/utils/__init__.py | 0 .../__pycache__/__init__.cpython-311.pyc | Bin 0 -> 165 bytes .../__pycache__/security.cpython-311.pyc | Bin 0 -> 8255 bytes backend/auth_service/app/utils/security.py | 148 +++++ backend/auth_service/requirements.txt | 31 + backend/requirements.txt | 33 + backend/shared/__init__.py | 1 + .../__pycache__/__init__.cpython-311.pyc | Bin 0 -> 221 bytes .../shared/__pycache__/config.cpython-311.pyc | Bin 0 -> 1465 bytes .../shared/__pycache__/crypto.cpython-311.pyc | Bin 0 -> 2590 bytes .../__pycache__/response.cpython-311.pyc | Bin 0 -> 1260 bytes backend/shared/config.py | 31 + backend/shared/crypto.py | 44 ++ backend/shared/models/__init__.py | 18 + .../__pycache__/__init__.cpython-311.pyc | Bin 0 -> 636 bytes .../__pycache__/account.cpython-311.pyc | Bin 0 -> 2430 bytes .../models/__pycache__/base.cpython-311.pyc | Bin 0 -> 1893 bytes .../__pycache__/signin_log.cpython-311.pyc | Bin 0 -> 1864 bytes .../models/__pycache__/task.cpython-311.pyc | Bin 0 -> 1921 bytes .../models/__pycache__/user.cpython-311.pyc | Bin 0 -> 1976 bytes backend/shared/models/account.py | 30 + backend/shared/models/base.py | 33 + backend/shared/models/signin_log.py | 23 + backend/shared/models/task.py | 24 + backend/shared/models/user.py | 25 + backend/shared/response.py | 35 ++ backend/signin_executor/Dockerfile | 34 ++ backend/signin_executor/app/config.py | 56 ++ backend/signin_executor/app/main.py | 226 +++++++ .../app/models/signin_models.py | 89 +++ .../app/services/signin_service.py | 271 +++++++++ .../app/services/weibo_client.py | 167 ++++++ backend/signin_executor/requirements.txt | 23 + backend/task_scheduler/Dockerfile | 30 + backend/task_scheduler/app/celery_app.py | 97 +++ backend/task_scheduler/app/config.py | 47 ++ .../task_scheduler/app/tasks/signin_tasks.py | 196 ++++++ backend/task_scheduler/requirements.txt | 18 + backend/tests/__init__.py | 0 .../__pycache__/__init__.cpython-311.pyc | Bin 0 -> 148 bytes .../conftest.cpython-311-pytest-8.3.3.pyc | Bin 0 -> 5547 bytes ..._api_accounts.cpython-311-pytest-8.3.3.pyc | Bin 0 -> 27161 bytes ...i_signin_logs.cpython-311-pytest-8.3.3.pyc | Bin 0 -> 29991 bytes ...est_api_tasks.cpython-311-pytest-8.3.3.pyc | Bin 0 -> 21562 bytes ..._auth_service.cpython-311-pytest-8.3.3.pyc | Bin 0 -> 43761 bytes .../test_shared.cpython-311-pytest-8.3.3.pyc | Bin 0 -> 27171 bytes backend/tests/conftest.py | 86 +++ backend/tests/test_api_accounts.py | 214 +++++++ backend/tests/test_api_signin_logs.py | 238 ++++++++ backend/tests/test_api_tasks.py | 226 +++++++ backend/tests/test_auth_service.py | 317 ++++++++++ backend/tests/test_shared.py | 171 ++++++ docker-compose.yml | 170 ++++++ init-db.sql | 64 ++ 开发文档.txt | 292 +++++++++ 105 files changed, 5890 insertions(+) create mode 100644 .kiro/specs/multi-user-signin/design.md create mode 100644 .kiro/specs/multi-user-signin/requirements.md create mode 100644 .kiro/specs/multi-user-signin/tasks.md create mode 100644 README.md create mode 100644 backend/Dockerfile create mode 100644 backend/api_service/__init__.py create mode 100644 backend/api_service/__pycache__/__init__.cpython-311.pyc create mode 100644 backend/api_service/app/__init__.py create mode 100644 backend/api_service/app/__pycache__/__init__.cpython-311.pyc create mode 100644 backend/api_service/app/__pycache__/dependencies.cpython-311.pyc create mode 100644 backend/api_service/app/__pycache__/main.cpython-311.pyc create mode 100644 backend/api_service/app/config.py create mode 100644 backend/api_service/app/dependencies.py create mode 100644 backend/api_service/app/main.py create mode 100644 backend/api_service/app/routers/__init__.py create mode 100644 backend/api_service/app/routers/__pycache__/__init__.cpython-311.pyc create mode 100644 backend/api_service/app/routers/__pycache__/accounts.cpython-311.pyc create mode 100644 backend/api_service/app/routers/__pycache__/signin_logs.cpython-311.pyc create mode 100644 backend/api_service/app/routers/__pycache__/tasks.cpython-311.pyc create mode 100644 backend/api_service/app/routers/accounts.py create mode 100644 backend/api_service/app/routers/signin_logs.py create mode 100644 backend/api_service/app/routers/tasks.py create mode 100644 backend/api_service/app/schemas/__init__.py create mode 100644 backend/api_service/app/schemas/__pycache__/__init__.cpython-311.pyc create mode 100644 backend/api_service/app/schemas/__pycache__/account.cpython-311.pyc create mode 100644 backend/api_service/app/schemas/__pycache__/signin_log.cpython-311.pyc create mode 100644 backend/api_service/app/schemas/__pycache__/task.cpython-311.pyc create mode 100644 backend/api_service/app/schemas/account.py create mode 100644 backend/api_service/app/schemas/signin_log.py create mode 100644 backend/api_service/app/schemas/task.py create mode 100644 backend/auth_service/Dockerfile create mode 100644 backend/auth_service/__init__.py create mode 100644 backend/auth_service/__pycache__/__init__.cpython-311.pyc create mode 100644 backend/auth_service/app/__init__.py create mode 100644 backend/auth_service/app/__pycache__/__init__.cpython-311.pyc create mode 100644 backend/auth_service/app/__pycache__/main.cpython-311.pyc create mode 100644 backend/auth_service/app/config.py create mode 100644 backend/auth_service/app/main.py create mode 100644 backend/auth_service/app/models/__init__.py create mode 100644 backend/auth_service/app/models/__pycache__/__init__.cpython-311.pyc create mode 100644 backend/auth_service/app/models/__pycache__/database.cpython-311.pyc create mode 100644 backend/auth_service/app/models/database.py create mode 100644 backend/auth_service/app/schemas/__init__.py create mode 100644 backend/auth_service/app/schemas/__pycache__/__init__.cpython-311.pyc create mode 100644 backend/auth_service/app/schemas/__pycache__/user.cpython-311.pyc create mode 100644 backend/auth_service/app/schemas/user.py create mode 100644 backend/auth_service/app/services/__init__.py create mode 100644 backend/auth_service/app/services/__pycache__/__init__.cpython-311.pyc create mode 100644 backend/auth_service/app/services/__pycache__/auth_service.cpython-311.pyc create mode 100644 backend/auth_service/app/services/auth_service.py create mode 100644 backend/auth_service/app/utils/__init__.py create mode 100644 backend/auth_service/app/utils/__pycache__/__init__.cpython-311.pyc create mode 100644 backend/auth_service/app/utils/__pycache__/security.cpython-311.pyc create mode 100644 backend/auth_service/app/utils/security.py create mode 100644 backend/auth_service/requirements.txt create mode 100644 backend/requirements.txt create mode 100644 backend/shared/__init__.py create mode 100644 backend/shared/__pycache__/__init__.cpython-311.pyc create mode 100644 backend/shared/__pycache__/config.cpython-311.pyc create mode 100644 backend/shared/__pycache__/crypto.cpython-311.pyc create mode 100644 backend/shared/__pycache__/response.cpython-311.pyc create mode 100644 backend/shared/config.py create mode 100644 backend/shared/crypto.py create mode 100644 backend/shared/models/__init__.py create mode 100644 backend/shared/models/__pycache__/__init__.cpython-311.pyc create mode 100644 backend/shared/models/__pycache__/account.cpython-311.pyc create mode 100644 backend/shared/models/__pycache__/base.cpython-311.pyc create mode 100644 backend/shared/models/__pycache__/signin_log.cpython-311.pyc create mode 100644 backend/shared/models/__pycache__/task.cpython-311.pyc create mode 100644 backend/shared/models/__pycache__/user.cpython-311.pyc create mode 100644 backend/shared/models/account.py create mode 100644 backend/shared/models/base.py create mode 100644 backend/shared/models/signin_log.py create mode 100644 backend/shared/models/task.py create mode 100644 backend/shared/models/user.py create mode 100644 backend/shared/response.py create mode 100644 backend/signin_executor/Dockerfile create mode 100644 backend/signin_executor/app/config.py create mode 100644 backend/signin_executor/app/main.py create mode 100644 backend/signin_executor/app/models/signin_models.py create mode 100644 backend/signin_executor/app/services/signin_service.py create mode 100644 backend/signin_executor/app/services/weibo_client.py create mode 100644 backend/signin_executor/requirements.txt create mode 100644 backend/task_scheduler/Dockerfile create mode 100644 backend/task_scheduler/app/celery_app.py create mode 100644 backend/task_scheduler/app/config.py create mode 100644 backend/task_scheduler/app/tasks/signin_tasks.py create mode 100644 backend/task_scheduler/requirements.txt create mode 100644 backend/tests/__init__.py create mode 100644 backend/tests/__pycache__/__init__.cpython-311.pyc create mode 100644 backend/tests/__pycache__/conftest.cpython-311-pytest-8.3.3.pyc create mode 100644 backend/tests/__pycache__/test_api_accounts.cpython-311-pytest-8.3.3.pyc create mode 100644 backend/tests/__pycache__/test_api_signin_logs.cpython-311-pytest-8.3.3.pyc create mode 100644 backend/tests/__pycache__/test_api_tasks.cpython-311-pytest-8.3.3.pyc create mode 100644 backend/tests/__pycache__/test_auth_service.cpython-311-pytest-8.3.3.pyc create mode 100644 backend/tests/__pycache__/test_shared.cpython-311-pytest-8.3.3.pyc create mode 100644 backend/tests/conftest.py create mode 100644 backend/tests/test_api_accounts.py create mode 100644 backend/tests/test_api_signin_logs.py create mode 100644 backend/tests/test_api_tasks.py create mode 100644 backend/tests/test_auth_service.py create mode 100644 backend/tests/test_shared.py create mode 100644 docker-compose.yml create mode 100644 init-db.sql create mode 100644 开发文档.txt diff --git a/.kiro/specs/multi-user-signin/design.md b/.kiro/specs/multi-user-signin/design.md new file mode 100644 index 0000000..86fc65a --- /dev/null +++ b/.kiro/specs/multi-user-signin/design.md @@ -0,0 +1,566 @@ +# 设计文档:Weibo-HotSign 多用户签到系统 + +## 概述 + +本设计文档描述 Weibo-HotSign 系统的架构重构与核心功能实现方案。核心目标是: + +1. 引入 `backend/shared/` 共享模块,统一 ORM 模型、数据库连接和加密工具,消除各服务间的代码重复 +2. 完善 `auth_service`,实现 Refresh Token 机制 +3. 从零实现 `api_service`,提供微博账号 CRUD、任务配置和签到日志查询 API +4. 将 `signin_executor` 和 `task_scheduler` 中的 Mock 实现替换为真实数据库交互 +5. 所有 API 遵循统一响应格式 + +技术栈:Python 3.11 + FastAPI + SQLAlchemy (async) + Celery + MySQL (aiomysql) + Redis + +## 架构 + +### 重构后的服务架构 + +```mermaid +graph TD + subgraph "客户端" + FE[Web Frontend / API Client] + end + + subgraph "后端服务层" + API[API_Service :8000
账号/任务/日志管理] + AUTH[Auth_Service :8001
注册/登录/Token刷新] + SCHED[Task_Scheduler
Celery Beat] + EXEC[Signin_Executor
Celery Worker] + end + + subgraph "共享层" + SHARED[shared/
ORM Models + DB Session
+ Crypto Utils + Response Format] + end + + subgraph "基础设施" + MYSQL[(MySQL)] + REDIS[(Redis
Cache + Message Queue)] + PROXY[Proxy Pool] + end + + FE -->|REST API| API + FE -->|REST API| AUTH + API -->|导入| SHARED + AUTH -->|导入| SHARED + SCHED -->|导入| SHARED + EXEC -->|导入| SHARED + SHARED -->|aiomysql| MYSQL + SCHED -->|发布任务| REDIS + EXEC -->|消费任务| REDIS + EXEC -->|获取代理| PROXY + EXEC -->|签到请求| WEIBO[Weibo.com] + +``` + +### 关键架构决策 + +1. **共享模块而非微服务间 RPC**:各服务通过 Python 包导入 `shared/` 模块访问数据库,而非通过 HTTP 调用其他服务查询数据。这简化了部署,减少了网络延迟,适合当前规模。 +2. **API_Service 作为唯一对外网关**:所有账号管理、任务配置、日志查询 API 集中在 `api_service` 中,`auth_service` 仅负责认证。 +3. **Celery 同时承担调度和执行**:`task_scheduler` 运行 Celery Beat(调度),`signin_executor` 运行 Celery Worker(执行),通过 Redis 消息队列解耦。 +4. **Dockerfile 多阶段构建**:保持现有的多阶段 Dockerfile 结构,新增 `shared/` 目录的 COPY 步骤。 + +### 目录结构(重构后) + +``` +backend/ +├── shared/ # 新增:共享模块 +│ ├── __init__.py +│ ├── models/ +│ │ ├── __init__.py +│ │ ├── base.py # SQLAlchemy Base + engine + session +│ │ ├── user.py # User ORM model +│ │ ├── account.py # Account ORM model +│ │ ├── task.py # Task ORM model +│ │ └── signin_log.py # SigninLog ORM model +│ ├── crypto.py # AES-256-GCM 加密/解密工具 +│ ├── response.py # 统一响应格式工具 +│ └── config.py # 共享配置(DB URL, Redis URL 等) +├── auth_service/ +│ └── app/ +│ ├── main.py # 重构:使用 shared models +│ ├── config.py +│ ├── schemas/ +│ │ └── user.py # 增加 RefreshToken schema +│ ├── services/ +│ │ └── auth_service.py # 增加 refresh token 逻辑 +│ └── utils/ +│ └── security.py # 增加 refresh token 生成/验证 +├── api_service/ +│ └── app/ +│ ├── __init__.py +│ ├── main.py # 新增:FastAPI 应用入口 +│ ├── config.py +│ ├── dependencies.py # JWT 认证依赖 +│ ├── schemas/ +│ │ ├── __init__.py +│ │ ├── account.py # 账号请求/响应 schema +│ │ ├── task.py # 任务请求/响应 schema +│ │ └── signin_log.py # 签到日志响应 schema +│ └── routers/ +│ ├── __init__.py +│ ├── accounts.py # 账号 CRUD 路由 +│ ├── tasks.py # 任务 CRUD 路由 +│ └── signin_logs.py # 签到日志查询路由 +├── signin_executor/ +│ └── app/ +│ ├── main.py +│ ├── config.py +│ ├── services/ +│ │ ├── signin_service.py # 重构:使用 shared models 查询真实数据 +│ │ └── weibo_client.py # 重构:实现真实加密/解密 +│ └── models/ +│ └── signin_models.py # 保留 Pydantic 请求/响应模型 +├── task_scheduler/ +│ └── app/ +│ ├── celery_app.py # 重构:从 DB 动态加载任务 +│ ├── config.py +│ └── tasks/ +│ └── signin_tasks.py # 重构:使用真实 DB 查询 +├── Dockerfile # 更新:各阶段 COPY shared/ +└── requirements.txt +``` + +## 组件与接口 + +### 1. shared 模块 + +#### 1.1 数据库连接管理 (`shared/models/base.py`) + +```python +# 提供异步 engine 和 session factory +# 所有服务通过 get_db() 获取 AsyncSession +async def get_db() -> AsyncGenerator[AsyncSession, None]: + async with AsyncSessionLocal() as session: + try: + yield session + finally: + await session.close() +``` + +#### 1.2 加密工具 (`shared/crypto.py`) + +```python +def encrypt_cookie(plaintext: str, key: bytes) -> tuple[str, str]: + """AES-256-GCM 加密,返回 (密文base64, iv_base64)""" + +def decrypt_cookie(ciphertext_b64: str, iv_b64: str, key: bytes) -> str: + """AES-256-GCM 解密,返回原始 Cookie 字符串""" +``` + +#### 1.3 统一响应格式 (`shared/response.py`) + +```python +def success_response(data: Any, message: str = "Operation successful") -> dict +def error_response(message: str, code: str, details: list = None, status_code: int = 400) -> JSONResponse +``` + +### 2. Auth_Service 接口 + +| 方法 | 路径 | 描述 | 需求 | +|------|------|------|------| +| POST | `/auth/register` | 用户注册 | 1.1, 1.2, 1.8 | +| POST | `/auth/login` | 用户登录,返回 access_token + refresh_token | 1.3, 1.4 | +| POST | `/auth/refresh` | 刷新 Token | 1.5, 1.6 | +| GET | `/auth/me` | 获取当前用户信息 | 1.7 | + +### 3. API_Service 接口 + +| 方法 | 路径 | 描述 | 需求 | +|------|------|------|------| +| POST | `/api/v1/accounts` | 添加微博账号 | 2.1, 2.7, 2.8 | +| GET | `/api/v1/accounts` | 获取账号列表 | 2.2, 2.8 | +| GET | `/api/v1/accounts/{id}` | 获取账号详情 | 2.3, 2.6, 2.8 | +| PUT | `/api/v1/accounts/{id}` | 更新账号信息 | 2.4, 2.6, 2.8 | +| DELETE | `/api/v1/accounts/{id}` | 删除账号 | 2.5, 2.6, 2.8 | +| POST | `/api/v1/accounts/{id}/tasks` | 创建签到任务 | 4.1, 4.2, 4.6 | +| GET | `/api/v1/accounts/{id}/tasks` | 获取任务列表 | 4.3 | +| PUT | `/api/v1/tasks/{id}` | 启用/禁用任务 | 4.4 | +| DELETE | `/api/v1/tasks/{id}` | 删除任务 | 4.5 | +| GET | `/api/v1/accounts/{id}/signin-logs` | 查询签到日志 | 8.1, 8.2, 8.3, 8.4, 8.5 | + +### 4. Task_Scheduler 内部接口 + +Task_Scheduler 不对外暴露 HTTP 接口,通过以下方式工作: + +- **启动时**:从 DB 加载 `is_enabled=True` 的任务,注册到 Celery Beat +- **运行时**:根据 Cron 表达式触发 `execute_signin_task` Celery task +- **动态更新**:通过 Redis pub/sub 接收任务变更通知,动态更新调度 + +### 5. Signin_Executor 内部流程 + +```mermaid +sequenceDiagram + participant Queue as Redis Queue + participant Exec as Signin_Executor + participant DB as MySQL + participant Weibo as Weibo.com + participant Proxy as Proxy Pool + + Queue->>Exec: 消费签到任务 (task_id, account_id) + Exec->>DB: 查询 Account (by account_id) + Exec->>Exec: 解密 Cookie (AES-256-GCM) + Exec->>Weibo: 验证 Cookie 有效性 + alt Cookie 无效 + Exec->>DB: 更新 account.status = "invalid_cookie" + Exec->>DB: 写入失败日志 + else Cookie 有效 + Exec->>Weibo: 获取超话列表 + loop 每个未签到超话 + Exec->>Proxy: 获取代理 IP + Exec->>Exec: 随机延迟 (1-3s) + Exec->>Weibo: 执行签到请求 + Exec->>DB: 写入 signin_log + end + end +``` + +## 数据模型 + +### ORM 模型定义(shared/models/) + +#### User 模型 + +```python +class User(Base): + __tablename__ = "users" + id = Column(String(36), primary_key=True, default=lambda: str(uuid4())) + username = Column(String(50), unique=True, nullable=False, index=True) + email = Column(String(255), unique=True, nullable=False, index=True) + hashed_password = Column(String(255), nullable=False) + created_at = Column(DateTime, server_default=func.now()) + is_active = Column(Boolean, default=True) + # Relationships + accounts = relationship("Account", back_populates="user", cascade="all, delete-orphan") +``` + +#### Account 模型 + +```python +class Account(Base): + __tablename__ = "accounts" + id = Column(String(36), primary_key=True, default=lambda: str(uuid4())) + user_id = Column(String(36), ForeignKey("users.id", ondelete="CASCADE"), nullable=False) + weibo_user_id = Column(String(20), nullable=False) + remark = Column(String(100)) + encrypted_cookies = Column(Text, nullable=False) + iv = Column(String(32), nullable=False) + status = Column(String(20), default="pending") # pending, active, invalid_cookie, banned + last_checked_at = Column(DateTime, nullable=True) + created_at = Column(DateTime, server_default=func.now()) + # Relationships + user = relationship("User", back_populates="accounts") + tasks = relationship("Task", back_populates="account", cascade="all, delete-orphan") + signin_logs = relationship("SigninLog", back_populates="account") +``` + +#### Task 模型 + +```python +class Task(Base): + __tablename__ = "tasks" + id = Column(String(36), primary_key=True, default=lambda: str(uuid4())) + account_id = Column(String(36), ForeignKey("accounts.id", ondelete="CASCADE"), nullable=False) + cron_expression = Column(String(50), nullable=False) + is_enabled = Column(Boolean, default=True) + created_at = Column(DateTime, server_default=func.now()) + # Relationships + account = relationship("Account", back_populates="tasks") +``` + +#### SigninLog 模型 + +```python +class SigninLog(Base): + __tablename__ = "signin_logs" + id = Column(BigInteger, primary_key=True, autoincrement=True) + account_id = Column(String(36), ForeignKey("accounts.id"), nullable=False) + topic_title = Column(String(100)) + status = Column(String(20), nullable=False) # success, failed_already_signed, failed_network, failed_banned + reward_info = Column(JSON, nullable=True) + error_message = Column(Text, nullable=True) + signed_at = Column(DateTime, server_default=func.now()) + # Relationships + account = relationship("Account", back_populates="signin_logs") +``` + +### Refresh Token 存储 + +Refresh Token 使用 Redis 存储,key 格式为 `refresh_token:{token_hash}`,value 为 `user_id`,TTL 为 7 天。这避免了在数据库中增加额外的表,同时利用 Redis 的自动过期机制。 + +```python +# 存储 +await redis.setex(f"refresh_token:{token_hash}", 7 * 24 * 3600, user_id) + +# 验证 +user_id = await redis.get(f"refresh_token:{token_hash}") + +# 刷新时删除旧 token,生成新 token(Token Rotation) +await redis.delete(f"refresh_token:{old_token_hash}") +await redis.setex(f"refresh_token:{new_token_hash}", 7 * 24 * 3600, user_id) +``` + +## 正确性属性 (Correctness Properties) + +*属性(Property)是指在系统所有有效执行中都应成立的特征或行为——本质上是对系统应做什么的形式化陈述。属性是人类可读规格说明与机器可验证正确性保证之间的桥梁。* + +### Property 1: Cookie 加密 Round-trip + +*For any* 有效的 Cookie 字符串,使用 AES-256-GCM 加密后再用相同密钥和 IV 解密,应产生与原始字符串完全相同的结果。 + +**Validates: Requirements 3.1, 3.2, 3.3** + +### Property 2: 用户注册后可登录获取信息 + +*For any* 有效的注册信息(用户名、邮箱、符合强度要求的密码),注册后使用相同邮箱和密码登录应成功返回 Token,使用该 Token 调用 `/auth/me` 应返回与注册时一致的用户名和邮箱。 + +**Validates: Requirements 1.1, 1.3, 1.7** + +### Property 3: 用户名/邮箱唯一性约束 + +*For any* 已注册的用户,使用相同用户名或相同邮箱再次注册应返回 409 Conflict 错误。 + +**Validates: Requirements 1.2** + +### Property 4: 无效凭证登录拒绝 + +*For any* 不存在的邮箱或错误的密码,登录请求应返回 401 Unauthorized 错误。 + +**Validates: Requirements 1.4** + +### Property 5: Refresh Token 轮换 + +*For any* 已登录用户的有效 Refresh Token,刷新操作应返回新的 Access Token 和新的 Refresh Token,且旧的 Refresh Token 应失效(再次使用应返回 401)。 + +**Validates: Requirements 1.5, 1.6** + +### Property 6: 弱密码拒绝 + +*For any* 不满足强度要求的密码(缺少大写字母、小写字母、数字或特殊字符,或长度不足8位),注册请求应返回 400 Bad Request。 + +**Validates: Requirements 1.8** + +### Property 7: 账号创建与列表一致性 + +*For any* 用户和任意数量的有效微博账号数据,创建 N 个账号后查询列表应返回恰好 N 条记录,每条记录的状态应为 "pending",且响应中不应包含解密后的 Cookie 明文。 + +**Validates: Requirements 2.1, 2.2, 2.7** + +### Property 8: 账号详情 Round-trip + +*For any* 已创建的微博账号,通过详情接口查询应返回与创建时一致的备注和微博用户 ID。 + +**Validates: Requirements 2.3** + +### Property 9: 账号更新反映 + +*For any* 已创建的微博账号和任意新的备注字符串,更新备注后再次查询应返回更新后的值。 + +**Validates: Requirements 2.4** + +### Property 10: 账号删除级联 + +*For any* 拥有关联 Task 和 SigninLog 的账号,删除该账号后,查询该账号的 Task 列表和 SigninLog 列表应返回空结果。 + +**Validates: Requirements 2.5** + +### Property 11: 跨用户资源隔离 + +*For any* 两个不同用户 A 和 B,用户 A 尝试访问、修改或删除用户 B 的账号、任务或签到日志时,应返回 403 Forbidden。 + +**Validates: Requirements 2.6, 4.6, 8.5** + +### Property 12: 受保护接口认证要求 + +*For any* 受保护的 API 端点(账号管理、任务管理、日志查询),不携带 JWT Token 或携带无效 Token 的请求应返回 401 Unauthorized。 + +**Validates: Requirements 2.8, 8.4, 9.4** + +### Property 13: 有效 Cron 表达式创建任务 + +*For any* 有效的 Cron 表达式和已存在的账号,创建任务应成功,且查询该账号的任务列表应包含新创建的任务。 + +**Validates: Requirements 4.1, 4.3** + +### Property 14: 无效 Cron 表达式拒绝 + +*For any* 无效的 Cron 表达式字符串,创建任务请求应返回 400 Bad Request。 + +**Validates: Requirements 4.2** + +### Property 15: 任务启用/禁用切换 + +*For any* 已创建的任务,切换 `is_enabled` 状态后查询应反映新的状态值。 + +**Validates: Requirements 4.4** + +### Property 16: 任务删除 + +*For any* 已创建的任务,删除后查询该任务应返回 404 或不在列表中出现。 + +**Validates: Requirements 4.5** + +### Property 17: 调度器加载已启用任务 + +*For any* 数据库中的任务集合,Task_Scheduler 启动时加载的任务数量应等于 `is_enabled=True` 的任务数量。 + +**Validates: Requirements 5.1** + +### Property 18: 分布式锁防重复调度 + +*For any* 签到任务,同一时刻并发触发两次应只产生一次实际执行。 + +**Validates: Requirements 5.5** + +### Property 19: 签到结果持久化 + +*For any* 签到执行结果(成功或失败),`signin_logs` 表中应存在对应的记录,且记录的 `account_id`、`status` 和 `topic_title` 与执行结果一致。 + +**Validates: Requirements 6.1, 6.4** + +### Property 20: Cookie 失效时更新账号状态 + +*For any* Cookie 已失效的账号,执行签到时应将账号状态更新为 "invalid_cookie"。 + +**Validates: Requirements 6.5, 3.4** + +### Property 21: 随机延迟范围 + +*For any* 调用反爬虫延迟函数的结果,延迟值应在配置的 `[min, max]` 范围内。 + +**Validates: Requirements 7.1** + +### Property 22: User-Agent 来源 + +*For any* 调用 User-Agent 选择函数的结果,返回的 UA 字符串应属于预定义列表中的某一个。 + +**Validates: Requirements 7.2** + +### Property 23: 签到日志时间倒序 + +*For any* 包含多条签到日志的账号,查询返回的日志列表应按 `signed_at` 降序排列。 + +**Validates: Requirements 8.1** + +### Property 24: 签到日志分页 + +*For any* 包含 N 条日志的账号和分页参数 (page, size),返回的记录数应等于 `min(size, N - (page-1)*size)` 且总记录数应等于 N。 + +**Validates: Requirements 8.2** + +### Property 25: 签到日志状态过滤 + +*For any* 状态过滤参数,返回的所有日志记录的 `status` 字段应与过滤参数一致。 + +**Validates: Requirements 8.3** + +### Property 26: 统一响应格式 + +*For any* API 调用,成功响应应包含 `success=true` 和 `data` 字段;错误响应应包含 `success=false`、`data=null` 和 `error` 字段。 + +**Validates: Requirements 9.1, 9.2, 9.3** + +## 错误处理 + +### 错误分类与处理策略 + +| 错误类型 | HTTP 状态码 | 错误码 | 处理策略 | +|----------|------------|--------|----------| +| 请求参数校验失败 | 400 | VALIDATION_ERROR | 返回字段级错误详情 | +| 未认证 | 401 | UNAUTHORIZED | 返回标准 401 响应 | +| 权限不足 | 403 | FORBIDDEN | 返回资源不可访问提示 | +| 资源不存在 | 404 | NOT_FOUND | 返回资源未找到提示 | +| 资源冲突 | 409 | CONFLICT | 返回冲突字段说明 | +| 服务器内部错误 | 500 | INTERNAL_ERROR | 记录详细日志,返回通用错误提示 | + +### 签到执行错误处理 + +- **Cookie 解密失败**:标记账号为 `invalid_cookie`,记录错误日志,终止该账号签到 +- **Cookie 验证失败**(微博返回未登录):同上 +- **网络超时/连接错误**:记录 `failed_network` 日志,不更改账号状态(可能是临时问题) +- **微博返回封禁**:标记账号为 `banned`,记录日志,发送通知 +- **代理池不可用**:降级为直连,记录警告日志 +- **Celery 任务失败**:自动重试最多3次,间隔60秒,最终失败记录日志 + +### 全局异常处理 + +所有 FastAPI 服务注册统一的异常处理器: + +```python +@app.exception_handler(HTTPException) +async def http_exception_handler(request, exc): + return error_response(exc.detail, f"HTTP_{exc.status_code}", status_code=exc.status_code) + +@app.exception_handler(RequestValidationError) +async def validation_exception_handler(request, exc): + details = [{"field": e["loc"][-1], "message": e["msg"]} for e in exc.errors()] + return error_response("Validation failed", "VALIDATION_ERROR", details, 400) +``` + +## 测试策略 + +### 测试框架选择 + +- **单元测试**:`pytest` + `pytest-asyncio`(异步测试支持) +- **属性测试**:`hypothesis`(Python 属性测试库) +- **HTTP 测试**:`httpx` + FastAPI `TestClient` +- **数据库测试**:使用 SQLite in-memory 或测试专用 MySQL 实例 + +### 测试分层 + +#### 1. 单元测试 +- 加密/解密函数的边界情况 +- 密码强度验证的各种组合 +- Cron 表达式验证 +- 响应格式化函数 +- 具体的错误场景(网络超时、解密失败等) + +#### 2. 属性测试(Property-Based Testing) +- 使用 `hypothesis` 库,每个属性测试至少运行 100 次迭代 +- 每个测试用注释标注对应的设计文档属性编号 +- 标注格式:`# Feature: multi-user-signin, Property {N}: {property_text}` +- 每个正确性属性对应一个独立的属性测试函数 + +#### 3. 集成测试 +- API 端点的完整请求/响应流程 +- 数据库 CRUD 操作的正确性 +- 服务间通过 Redis 消息队列的交互 +- Celery 任务的调度和执行 + +### 属性测试配置 + +```python +from hypothesis import given, settings, strategies as st + +@settings(max_examples=100) +@given(cookie=st.text(min_size=1, max_size=1000)) +def test_cookie_encryption_roundtrip(cookie): + """Feature: multi-user-signin, Property 1: Cookie 加密 Round-trip""" + key = generate_test_key() + ciphertext, iv = encrypt_cookie(cookie, key) + decrypted = decrypt_cookie(ciphertext, iv, key) + assert decrypted == cookie +``` + +### 测试目录结构 + +``` +backend/ +├── tests/ +│ ├── conftest.py # 共享 fixtures(DB session, test client 等) +│ ├── unit/ +│ │ ├── test_crypto.py # 加密/解密单元测试 +│ │ ├── test_password.py # 密码验证单元测试 +│ │ └── test_cron.py # Cron 表达式验证单元测试 +│ ├── property/ +│ │ ├── test_crypto_props.py # Property 1 +│ │ ├── test_auth_props.py # Property 2-6 +│ │ ├── test_account_props.py # Property 7-12 +│ │ ├── test_task_props.py # Property 13-18 +│ │ ├── test_signin_props.py # Property 19-22 +│ │ └── test_log_props.py # Property 23-26 +│ └── integration/ +│ ├── test_auth_flow.py # 完整认证流程 +│ ├── test_account_flow.py # 账号管理流程 +│ └── test_signin_flow.py # 签到执行流程 +``` diff --git a/.kiro/specs/multi-user-signin/requirements.md b/.kiro/specs/multi-user-signin/requirements.md new file mode 100644 index 0000000..9364151 --- /dev/null +++ b/.kiro/specs/multi-user-signin/requirements.md @@ -0,0 +1,138 @@ +# 需求文档:Weibo-HotSign 多用户签到系统 + +## 简介 + +Weibo-HotSign 是一个分布式微博超话自动签到系统,采用微服务架构(FastAPI + Celery + MySQL + Redis)。本需求文档覆盖系统的五大核心模块:用户认证(含 Token 刷新)、微博账号管理、定时签到任务配置、签到执行引擎、以及整体架构重构。目标是将当前分散的、含大量 Mock 实现的代码库重构为一个真正可运行的、模块间紧密集成的生产级系统。 + +## 术语表 + +- **System**: 指 Weibo-HotSign 后端系统整体 +- **Auth_Service**: 用户认证与授权服务(`backend/auth_service`) +- **API_Service**: API 网关与账号/任务管理服务(`backend/api_service`) +- **Task_Scheduler**: 基于 Celery Beat 的定时任务调度服务(`backend/task_scheduler`) +- **Signin_Executor**: 签到执行 Worker 服务(`backend/signin_executor`) +- **User**: 使用本系统的注册用户 +- **Weibo_Account**: 用户绑定到系统中的微博账号,以 Cookie 形式存储凭证 +- **Task**: 用户为某个 Weibo_Account 配置的定时签到任务 +- **Cookie**: 微博网站的登录凭证,用于模拟已登录状态 +- **Cron_Expression**: 标准 Cron 表达式,用于定义任务调度时间 +- **Signin_Log**: 每次签到执行的结果记录 +- **JWT**: JSON Web Token,用于用户身份认证 +- **Refresh_Token**: 用于在 Access Token 过期后获取新 Token 的长期凭证 +- **AES-256-GCM**: 对称加密算法,用于加密存储 Cookie + +## 需求 + +### 需求 1:用户认证与 Token 管理 + +**用户故事:** 作为用户,我希望能够注册、登录并安全地维持会话,以便长期使用系统而无需频繁重新登录。 + +#### 验收标准 + +1. WHEN 用户提交有效的注册信息(用户名、邮箱、密码),THE Auth_Service SHALL 创建用户账户并返回用户信息 +2. WHEN 用户提交的用户名或邮箱已存在,THE Auth_Service SHALL 返回 409 Conflict 错误并指明冲突字段 +3. WHEN 用户提交有效的邮箱和密码进行登录,THE Auth_Service SHALL 返回包含 Access Token 和 Refresh Token 的认证响应 +4. WHEN 用户提交无效的邮箱或密码进行登录,THE Auth_Service SHALL 返回 401 Unauthorized 错误 +5. WHEN 用户携带有效的 Refresh Token 请求刷新,THE Auth_Service SHALL 返回新的 Access Token 和新的 Refresh Token +6. WHEN 用户携带过期或无效的 Refresh Token 请求刷新,THE Auth_Service SHALL 返回 401 Unauthorized 错误 +7. WHEN 用户携带有效的 Access Token 请求 `/auth/me`,THE Auth_Service SHALL 返回当前用户的完整信息 +8. IF 用户密码不满足强度要求(至少8位,含大小写字母、数字和特殊字符),THEN THE Auth_Service SHALL 返回 400 Bad Request 并说明密码强度不足 + +### 需求 2:微博账号管理 + +**用户故事:** 作为用户,我希望能够添加、查看、更新和删除我的微博账号,以便集中管理多个微博账号的签到。 + +#### 验收标准 + +1. WHEN 用户提交微博 Cookie 和备注信息,THE API_Service SHALL 使用 AES-256-GCM 加密 Cookie 后存储,并返回新创建的账号信息 +2. WHEN 用户请求获取账号列表,THE API_Service SHALL 返回该用户拥有的所有 Weibo_Account(不包含解密后的 Cookie) +3. WHEN 用户请求获取单个账号详情,THE API_Service SHALL 返回该账号的状态、备注和最近签到信息 +4. WHEN 用户请求更新账号的备注或 Cookie,THE API_Service SHALL 更新对应字段并返回更新后的账号信息 +5. WHEN 用户请求删除一个账号,THE API_Service SHALL 级联删除该账号关联的所有 Task 和 Signin_Log,并返回成功响应 +6. IF 用户尝试操作不属于自己的账号,THEN THE API_Service SHALL 返回 403 Forbidden 错误 +7. WHEN 账号被创建时,THE API_Service SHALL 将账号状态初始化为 "pending" +8. THE API_Service SHALL 对所有账号管理接口要求有效的 JWT Access Token 认证 + +### 需求 3:Cookie 加密与验证 + +**用户故事:** 作为用户,我希望我的微博 Cookie 被安全存储,并且系统能自动检测 Cookie 是否失效,以便我及时更新。 + +#### 验收标准 + +1. WHEN 存储 Cookie 时,THE API_Service SHALL 使用 AES-256-GCM 算法加密,并将密文和 IV 分别存储到 `encrypted_cookies` 和 `iv` 字段 +2. WHEN 读取 Cookie 用于签到时,THE Signin_Executor SHALL 使用对应的 IV 解密 Cookie 并还原为原始字符串 +3. FOR ALL 有效的 Cookie 字符串,加密后再解密 SHALL 产生与原始字符串完全相同的结果(Round-trip 属性) +4. IF 解密过程中发生错误(密钥不匹配、数据损坏),THEN THE System SHALL 将账号状态标记为 "invalid_cookie" 并记录错误日志 + +### 需求 4:定时签到任务配置 + +**用户故事:** 作为用户,我希望能够为每个微博账号配置独立的签到时间计划,以便灵活控制签到频率和时间。 + +#### 验收标准 + +1. WHEN 用户为某个账号创建签到任务并提供有效的 Cron_Expression,THE API_Service SHALL 创建任务记录并将任务注册到 Task_Scheduler +2. WHEN 用户提交无效的 Cron_Expression,THE API_Service SHALL 返回 400 Bad Request 并说明表达式格式错误 +3. WHEN 用户请求获取某个账号的任务列表,THE API_Service SHALL 返回该账号关联的所有 Task 及其启用状态 +4. WHEN 用户启用或禁用一个任务,THE API_Service SHALL 更新数据库中的 `is_enabled` 字段,并同步更新 Task_Scheduler 中的调度状态 +5. WHEN 用户删除一个任务,THE API_Service SHALL 从数据库删除任务记录,并从 Task_Scheduler 中移除对应的调度 +6. IF 用户尝试为不属于自己的账号创建任务,THEN THE API_Service SHALL 返回 403 Forbidden 错误 + +### 需求 5:任务调度引擎 + +**用户故事:** 作为系统,我需要根据用户配置的 Cron 表达式准时触发签到任务,以确保签到按时执行。 + +#### 验收标准 + +1. WHEN Task_Scheduler 启动时,THE Task_Scheduler SHALL 从数据库加载所有 `is_enabled=True` 的任务并注册到 Celery Beat 调度器 +2. WHEN Celery Beat 根据 Cron_Expression 触发一个任务,THE Task_Scheduler SHALL 向消息队列发送包含 `task_id` 和 `account_id` 的签到消息 +3. WHEN 新任务被创建或现有任务被更新,THE Task_Scheduler SHALL 动态更新 Celery Beat 的调度配置而无需重启服务 +4. IF 任务执行失败,THEN THE Task_Scheduler SHALL 按照配置的重试策略(最多3次,间隔60秒)进行重试 +5. WHILE Task_Scheduler 运行中,THE Task_Scheduler SHALL 使用 Redis 分布式锁确保同一任务不会被重复调度 + +### 需求 6:签到执行引擎 + +**用户故事:** 作为系统,我需要真正执行微博超话签到操作,并将结果持久化到数据库,以替代当前的 Mock 实现。 + +#### 验收标准 + +1. WHEN Signin_Executor 从消息队列接收到签到任务,THE Signin_Executor SHALL 从数据库查询对应的 Weibo_Account 信息(替代 Mock 数据) +2. WHEN 执行签到前,THE Signin_Executor SHALL 解密 Cookie 并验证其有效性 +3. WHEN Cookie 有效时,THE Signin_Executor SHALL 获取该账号关注的超话列表并逐一执行签到 +4. WHEN 单个超话签到完成后,THE Signin_Executor SHALL 将结果(成功/失败/已签到、奖励信息、错误信息)写入 `signin_logs` 表 +5. IF Cookie 已失效,THEN THE Signin_Executor SHALL 将账号状态更新为 "invalid_cookie" 并终止该账号的签到流程 +6. IF 签到过程中遇到网络错误,THEN THE Signin_Executor SHALL 记录错误日志并将该超话的签到状态标记为 "failed_network" + +### 需求 7:反爬虫防护 + +**用户故事:** 作为系统,我需要在执行签到时采取反爬虫措施,以降低被微博风控系统检测和封禁的风险。 + +#### 验收标准 + +1. WHEN 执行签到请求时,THE Signin_Executor SHALL 在每次请求之间插入随机延迟(1-3秒可配置) +2. WHEN 构造 HTTP 请求时,THE Signin_Executor SHALL 从预定义的 User-Agent 列表中随机选择一个 +3. WHEN 代理池服务可用时,THE Signin_Executor SHALL 为每次签到请求分配一个代理 IP +4. IF 代理池服务不可用,THEN THE Signin_Executor SHALL 使用直连方式继续执行签到并记录警告日志 + +### 需求 8:签到日志与查询 + +**用户故事:** 作为用户,我希望能够查看每个微博账号的签到历史记录,以便了解签到执行情况。 + +#### 验收标准 + +1. WHEN 用户请求查看某个账号的签到日志,THE API_Service SHALL 返回按时间倒序排列的 Signin_Log 列表 +2. WHEN 用户请求签到日志时提供分页参数,THE API_Service SHALL 返回对应页码的日志数据和总记录数 +3. WHEN 用户请求签到日志时提供状态过滤参数,THE API_Service SHALL 仅返回匹配该状态的日志记录 +4. THE API_Service SHALL 对签到日志查询接口要求有效的 JWT Access Token 认证 +5. IF 用户尝试查看不属于自己账号的签到日志,THEN THE API_Service SHALL 返回 403 Forbidden 错误 + +### 需求 9:统一 API 响应格式与错误处理 + +**用户故事:** 作为 API 消费者,我希望所有接口返回统一格式的响应,以便前端能够一致地处理成功和错误情况。 + +#### 验收标准 + +1. THE API_Service SHALL 对所有成功响应返回 `{"success": true, "data": ..., "message": ...}` 格式 +2. THE API_Service SHALL 对所有错误响应返回 `{"success": false, "data": null, "message": ..., "error": {"code": ..., "details": [...]}}` 格式 +3. WHEN 请求参数校验失败,THE API_Service SHALL 返回 400 状态码,并在 `error.details` 中列出每个字段的具体错误 +4. WHEN 未认证用户访问受保护接口,THE API_Service SHALL 返回 401 状态码和标准错误响应 + diff --git a/.kiro/specs/multi-user-signin/tasks.md b/.kiro/specs/multi-user-signin/tasks.md new file mode 100644 index 0000000..5b32fe9 --- /dev/null +++ b/.kiro/specs/multi-user-signin/tasks.md @@ -0,0 +1,185 @@ +# 实现计划:Weibo-HotSign 多用户签到系统 + +## 概述 + +按照自底向上的顺序实现:先构建共享基础层,再逐步实现各微服务。每个阶段包含核心实现和对应的测试任务。 + +## Tasks + +- [x] 1. 创建共享模块 (shared/) + - [x] 1.1 创建 `backend/shared/` 包结构和共享配置 + - 创建 `shared/__init__.py`、`shared/config.py` + - 配置项包括 DATABASE_URL、REDIS_URL、JWT_SECRET_KEY、COOKIE_ENCRYPTION_KEY + - 使用 pydantic-settings 从环境变量加载 + - _Requirements: 10.1, 10.2_ + - [x] 1.2 创建共享 ORM 模型和数据库连接管理 + - 创建 `shared/models/base.py`:AsyncEngine、AsyncSessionLocal、Base、get_db() + - 创建 `shared/models/user.py`:User 模型(含 accounts relationship) + - 创建 `shared/models/account.py`:Account 模型(含 tasks、signin_logs relationship) + - 创建 `shared/models/task.py`:Task 模型 + - 创建 `shared/models/signin_log.py`:SigninLog 模型 + - 所有模型与 `init-db.sql` 中的表结构对齐 + - _Requirements: 10.1, 10.2, 10.3_ + - [x] 1.3 实现 Cookie 加密/解密工具 (`shared/crypto.py`) + - 使用 pycryptodome 实现 AES-256-GCM 加密/解密 + - `encrypt_cookie(plaintext, key) -> (ciphertext_b64, iv_b64)` + - `decrypt_cookie(ciphertext_b64, iv_b64, key) -> plaintext` + - 密钥从环境变量 COOKIE_ENCRYPTION_KEY 派生(使用 SHA-256 哈希为32字节) + - _Requirements: 3.1, 3.2, 10.4_ + - [ ]* 1.4 编写 Cookie 加密 Round-trip 属性测试 + - **Property 1: Cookie 加密 Round-trip** + - 使用 hypothesis 生成随机字符串,验证 encrypt 后 decrypt 还原 + - **Validates: Requirements 3.1, 3.2, 3.3** + - [x] 1.5 实现统一响应格式工具 (`shared/response.py`) + - `success_response(data, message)` 返回标准成功格式 + - `error_response(message, code, details, status_code)` 返回标准错误格式 + - _Requirements: 9.1, 9.2_ + +- [x] 2. 重构 Auth_Service(Token 刷新机制) + - [x] 2.1 重构 Auth_Service 使用 shared 模块 + - 修改 `auth_service/app/main.py` 导入 shared models 和 get_db + - 删除 `auth_service/app/models/database.py` 中的重复 User 模型定义 + - 更新 `auth_service/app/services/auth_service.py` 使用 shared User 模型 + - _Requirements: 10.3_ + - [x] 2.2 实现 Refresh Token 机制 + - 在 `auth_service/app/utils/security.py` 中添加 `create_refresh_token()` 和 `verify_refresh_token()` + - Refresh Token 使用 Redis 存储(key: `refresh_token:{hash}`, value: `user_id`, TTL: 7天) + - 登录接口返回 access_token + refresh_token + - 实现 `/auth/refresh` 端点:验证旧 token → 删除旧 token → 生成新 token 对(Token Rotation) + - 更新 `auth_service/app/schemas/user.py` 添加 RefreshToken 相关 schema + - _Requirements: 1.3, 1.5, 1.6_ + - [x] 2.3 为 Auth_Service 所有响应应用统一格式 + - 注册、登录、刷新、获取用户信息接口使用 `shared/response.py` 格式化响应 + - 注册全局异常处理器(HTTPException、RequestValidationError) + - _Requirements: 9.1, 9.2, 9.3, 9.4_ + - [ ]* 2.4 编写认证流程属性测试 + - **Property 2: 用户注册后可登录获取信息** + - **Property 3: 用户名/邮箱唯一性约束** + - **Property 4: 无效凭证登录拒绝** + - **Property 5: Refresh Token 轮换** + - **Property 6: 弱密码拒绝** + - **Validates: Requirements 1.1-1.8** + +- [x] 3. Checkpoint - 确保共享模块和认证服务测试通过 + - 运行所有测试,确认 shared 模块和 auth_service 工作正常 + - 如有问题请向用户确认 + +- [x] 4. 实现 API_Service(账号管理) + - [x] 4.1 创建 API_Service 基础结构 + - 创建 `api_service/app/__init__.py`、`main.py`、`config.py`、`dependencies.py` + - `main.py`:FastAPI 应用,注册 CORS、全局异常处理器、路由 + - `dependencies.py`:JWT 认证依赖(`get_current_user`),复用 shared 的 JWT 验证逻辑 + - _Requirements: 2.8, 9.1, 9.2, 9.3, 9.4_ + - [x] 4.2 实现微博账号 CRUD 路由 + - 创建 `api_service/app/schemas/account.py`:AccountCreate、AccountUpdate、AccountResponse + - 创建 `api_service/app/routers/accounts.py`: + - `POST /api/v1/accounts`:加密 Cookie 后存储,状态初始化为 "pending" + - `GET /api/v1/accounts`:返回当前用户的账号列表(不含 Cookie 明文) + - `GET /api/v1/accounts/{id}`:返回账号详情 + - `PUT /api/v1/accounts/{id}`:更新备注或 Cookie(更新 Cookie 时重新加密) + - `DELETE /api/v1/accounts/{id}`:删除账号(级联删除 tasks 和 logs) + - 所有接口验证资源归属(user_id 匹配) + - _Requirements: 2.1-2.8_ + - [ ]* 4.3 编写账号管理属性测试 + - **Property 7: 账号创建与列表一致性** + - **Property 8: 账号详情 Round-trip** + - **Property 9: 账号更新反映** + - **Property 10: 账号删除级联** + - **Property 11: 跨用户资源隔离** + - **Property 12: 受保护接口认证要求** + - **Validates: Requirements 2.1-2.8, 4.6, 8.5, 8.4, 9.4** + +- [-] 5. 实现 API_Service(任务配置) + - [x] 5.1 实现签到任务 CRUD 路由 + - 创建 `api_service/app/schemas/task.py`:TaskCreate、TaskUpdate、TaskResponse + - 创建 `api_service/app/routers/tasks.py`: + - `POST /api/v1/accounts/{id}/tasks`:验证 Cron 表达式有效性,创建任务 + - `GET /api/v1/accounts/{id}/tasks`:获取账号的任务列表 + - `PUT /api/v1/tasks/{id}`:更新任务(启用/禁用) + - `DELETE /api/v1/tasks/{id}`:删除任务 + - 使用 `croniter` 库验证 Cron 表达式 + - 任务创建/更新/删除时通过 Redis pub/sub 通知 Task_Scheduler + - _Requirements: 4.1-4.6_ + - [ ]* 5.2 编写任务配置属性测试 + - **Property 13: 有效 Cron 表达式创建任务** + - **Property 14: 无效 Cron 表达式拒绝** + - **Property 15: 任务启用/禁用切换** + - **Property 16: 任务删除** + - **Validates: Requirements 4.1-4.6** + +- [x] 6. 实现 API_Service(签到日志查询) + - [x] 6.1 实现签到日志查询路由 + - 创建 `api_service/app/schemas/signin_log.py`:SigninLogResponse、PaginatedResponse + - 创建 `api_service/app/routers/signin_logs.py`: + - `GET /api/v1/accounts/{id}/signin-logs`:支持分页(page, size)和状态过滤(status) + - 返回按 `signed_at` 降序排列的日志 + - 返回总记录数用于前端分页 + - 验证账号归属权限 + - _Requirements: 8.1-8.5_ + - [ ]* 6.2 编写签到日志查询属性测试 + - **Property 23: 签到日志时间倒序** + - **Property 24: 签到日志分页** + - **Property 25: 签到日志状态过滤** + - **Validates: Requirements 8.1-8.3** + +- [ ] 7. Checkpoint - 确保 API_Service 所有测试通过 + - 运行所有测试,确认账号管理、任务配置、日志查询功能正常 + - 如有问题请向用户确认 + +- [ ] 8. 重构 Task_Scheduler(真实数据库交互) + - [ ] 8.1 重构 Task_Scheduler 使用 shared 模块 + - 修改 `task_scheduler/app/celery_app.py` 导入 shared models + - 实现 `load_scheduled_tasks()`:从 DB 查询 `is_enabled=True` 的 Task,动态注册到 Celery Beat + - 实现 Redis pub/sub 监听:接收任务变更通知,动态更新调度 + - 替换 `signin_tasks.py` 中的 mock 账号列表为真实 DB 查询 + - _Requirements: 5.1, 5.2, 5.3_ + - [ ] 8.2 实现分布式锁和重试机制 + - 使用 Redis SETNX 实现分布式锁,防止同一任务重复调度 + - 配置 Celery 任务重试:`max_retries=3`、`default_retry_delay=60` + - _Requirements: 5.4, 5.5_ + - [ ]* 8.3 编写调度器属性测试 + - **Property 17: 调度器加载已启用任务** + - **Property 18: 分布式锁防重复调度** + - **Validates: Requirements 5.1, 5.5** + +- [ ] 9. 重构 Signin_Executor(真实数据库交互) + - [ ] 9.1 重构 Signin_Executor 使用 shared 模块 + - 修改 `signin_service.py` 中的 `_get_account_info()` 从 DB 查询真实 Account 数据 + - 修改 `weibo_client.py` 中的 `_decrypt_cookies()` 使用 `shared/crypto.py` + - 实现签到结果写入 `signin_logs` 表(替代 mock) + - 实现 Cookie 失效时更新 `account.status = "invalid_cookie"` + - _Requirements: 6.1, 6.2, 6.4, 6.5_ + - [ ] 9.2 实现反爬虫防护模块 + - 实现随机延迟函数:返回 `[min, max]` 范围内的随机值 + - 实现 User-Agent 轮换:从预定义列表中随机选择 + - 实现代理池集成:调用 proxy pool 服务获取代理,不可用时降级为直连 + - _Requirements: 7.1, 7.2, 7.3, 7.4_ + - [ ]* 9.3 编写签到执行属性测试 + - **Property 19: 签到结果持久化** + - **Property 20: Cookie 失效时更新账号状态** + - **Property 21: 随机延迟范围** + - **Property 22: User-Agent 来源** + - **Validates: Requirements 6.1, 6.4, 6.5, 7.1, 7.2** + +- [ ] 10. 更新 Dockerfile 和集成配置 + - [ ] 10.1 更新 `backend/Dockerfile` + - 在每个构建阶段添加 `COPY shared/ ./shared/` + - 确保 shared 模块在所有服务容器中可用 + - _Requirements: 10.1, 10.3_ + - [ ] 10.2 更新 `backend/requirements.txt` + - 添加 `croniter`(Cron 表达式解析) + - 添加 `hypothesis`(属性测试) + - 添加 `pytest`、`pytest-asyncio`(测试框架) + - 确认 `pycryptodome`、`redis`、`celery` 等已存在 + +- [ ] 11. 最终 Checkpoint - 全量测试 + - 运行所有单元测试和属性测试 + - 验证各服务可独立启动 + - 如有问题请向用户确认 + +## 备注 + +- 标记 `*` 的任务为可选测试任务,可跳过以加快 MVP 进度 +- 每个任务引用了具体的需求编号以确保可追溯性 +- 属性测试使用 `hypothesis` 库,每个测试至少运行 100 次迭代 +- Checkpoint 任务用于阶段性验证,确保增量开发的正确性 diff --git a/README.md b/README.md new file mode 100644 index 0000000..6febbcd --- /dev/null +++ b/README.md @@ -0,0 +1,187 @@ +# Weibo-HotSign - 微博超话智能签到系统 + +基于开发文档实现的分布式微博超话智能签到系统,具备多账户管理、高稳定性反爬虫、Web可视化管理等核心功能。 + +## 🏗️ 项目架构 + +本项目采用微服务架构,包含以下核心服务: + +- **认证服务** (auth_service) - 用户注册、登录、JWT认证 +- **API网关** (api_service) - 统一API入口和路由 +- **任务调度** (task_scheduler) - 基于Celery Beat的定时任务 +- **签到执行** (signin_executor) - 核心签到业务逻辑 +- **浏览器自动化** (browser_automation_service) - 处理复杂JS加密 +- **通知中心** (notification_hub) - 多渠道通知分发 +- **前端应用** (frontend) - React可视化界面 + +## 🚀 快速启动 + +### 环境要求 +- Docker & Docker Compose +- Python 3.11+ +- Node.js 18+ + +### 启动步骤 + +1. **克隆项目** + ```bash + cd d:/code/weibo + ``` + +2. **启动所有服务** + ```bash + docker-compose up -d + ``` + +3. **查看服务状态** + ```bash + docker-compose ps + ``` + +4. **访问服务** + - 前端界面: http://localhost:3000 + - API文档: http://localhost:8000/docs + - 认证服务: http://localhost:8001/docs + - 健康检查: http://localhost:8000/health + +## 📋 已实现功能 + +### ✅ 认证服务 (auth_service) +- [x] 用户注册 (`POST /auth/register`) +- [x] 用户登录 (`POST /auth/login`) +- [x] JWT Token生成和验证 +- [x] 密码强度验证和bcrypt哈希 +- [x] CORS跨域支持 +- [x] 数据库连接管理 +- [x] 完整的错误处理和日志记录 + +### ✅ 任务调度服务 (task_scheduler) +- [x] Celery Beat定时任务调度 +- [x] Cron表达式解析和动态任务加载 +- [x] 任务队列管理 (Redis) +- [x] 任务重试和错误处理 +- [x] 调用签到执行服务 + +### ✅ 签到执行服务 (signin_executor) +- [x] 微博超话签到核心逻辑 +- [x] 动态IP代理池集成 (模拟) +- [x] 浏览器指纹模拟 (模拟) +- [x] Cookie管理和验证 (模拟) +- [x] 完整的签到工作流和状态管理 +- [x] 反爬虫保护机制 (随机延迟) + +### ✅ 基础设施 +- [x] Docker容器化配置 +- [x] PostgreSQL数据库初始化 +- [x] Redis缓存配置 +- [x] Nginx反向代理配置 +- [x] 微服务网络通信 + +### 🔄 待实现功能 + +#### API网关服务 (api_service) +- 请求路由和负载均衡 +- API组合和聚合 +- 速率限制和熔断 + +#### 浏览器自动化服务 (browser_automation_service) +- Playwright无头浏览器 +- JS加密参数逆向 +- 网络请求拦截和提取 + +#### 前端React应用 (frontend) +- 用户登录注册界面 +- 账号管理面板 +- 任务配置界面 +- 签到日志查看 +- 实时状态监控 + +#### 通知中心服务 (notification_hub) +- 多渠道通知分发 (Server酱, Email等) + +## 🛠️ 技术栈 + +### 后端 +- **Web框架**: FastAPI (Python) +- **数据库**: PostgreSQL + SQLAlchemy +- **缓存**: Redis +- **任务队列**: Celery + Redis +- **认证**: JWT + bcrypt +- **浏览器自动化**: Playwright + +### 前端 +- **框架**: React 18 + Vite +- **状态管理**: Zustand +- **UI库**: Ant Design +- **HTTP客户端**: Axios + +### 基础设施 +- **容器化**: Docker + Docker Compose +- **反向代理**: Nginx +- **监控**: Prometheus + Grafana +- **日志**: ELK Stack + +## 📊 数据库设计 + +系统包含以下核心数据表: + +- `users` - 用户信息 +- `accounts` - 微博账号管理 +- `tasks` - 签到任务配置 +- `signin_logs` - 签到历史记录 + +详细表结构见 `init-db.sql` + +## 🔧 配置说明 + +### 环境变量 +主要配置通过环境变量设置: + +```bash +# 数据库 +DATABASE_URL=postgresql+asyncpg://user:pass@postgres:5432/dbname +REDIS_URL=redis://redis:6379 + +# JWT +JWT_SECRET_KEY=your-super-secret-jwt-key +JWT_EXPIRATION_HOURS=24 + +# 应用 +DEBUG=true +HOST=0.0.0.0 +PORT=8000 +``` + +## 📝 API规范 + +遵循RESTful设计规范: + +- **协议**: HTTPS +- **数据格式**: JSON +- **认证**: Bearer Token (JWT) +- **版本控制**: URL路径 (`/api/v1/`) +- **通用响应结构**: + ```json + { + "success": true, + "data": {...}, + "message": "Operation successful", + "error": null + } + ``` + +## 🤝 贡献指南 + +1. Fork项目 +2. 创建特性分支 +3. 提交代码变更 +4. 推送到分支 +5. 创建Pull Request + +## 📄 许可证 + +MIT License + +## 🙏 致谢 + +感谢开发文档提供的详细技术规范和架构指导,本实现严格遵循文档中的各项技术要求。 diff --git a/backend/Dockerfile b/backend/Dockerfile new file mode 100644 index 0000000..ac2e1e2 --- /dev/null +++ b/backend/Dockerfile @@ -0,0 +1,91 @@ +# Base stage for all Python services +FROM python:3.11-slim AS base + +# Set working directory +WORKDIR /app + +# Install common system dependencies for MySQL +RUN apt-get update && apt-get install -y \ + gcc \ + default-libmysqlclient-dev \ + && rm -rf /var/lib/apt/lists/* + +# Copy and install unified requirements +COPY requirements.txt . +RUN pip install --no-cache-dir -r requirements.txt + +# Create non-root user for security +RUN groupadd -r appuser && useradd -r -g appuser appuser + + +# --- API Gateway Service Stage --- +FROM base AS api_gateway + +# Copy application code +COPY api_service/app/ ./app/ + +# Switch to non-root user +USER appuser + +# Expose port +EXPOSE 8000 + +# Health check +HEALTHCHECK --interval=30s --timeout=30s --start-period=5s --retries=3 \ + CMD curl -f http://localhost:8000/health || exit 1 + +# Start application +CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"] + + +# --- Auth Service Stage --- +FROM base AS auth_service + +# Copy application code +COPY auth_service/app/ ./app/ + +# Switch to non-root user +USER appuser + +# Expose port +EXPOSE 8000 + +# Health check +HEALTHCHECK --interval=30s --timeout=30s --start-period=5s --retries=3 \ + CMD curl -f http://localhost:8000/health || exit 1 + +# Start application +CMD ["python", "-m", "app.main"] + + +# --- Task Scheduler Service Stage --- +FROM base AS task_scheduler + +# Copy application code +COPY task_scheduler/app/ ./app/ + +# Switch to non-root user +USER appuser + +# Start Celery Beat scheduler +CMD ["celery", "-A", "app.celery_app", "beat", "--loglevel=info"] + + +# --- Sign-in Executor Service Stage --- +FROM base AS signin_executor + +# Copy application code +COPY signin_executor/app/ ./app/ + +# Switch to non-root user +USER appuser + +# Expose port +EXPOSE 8000 + +# Health check +HEALTHCHECK --interval=30s --timeout=30s --start-period=5s --retries=3 \ + CMD curl -f http://localhost:8000/health || exit 1 + +# Start application +CMD ["python", "-m", "app.main"] diff --git a/backend/api_service/__init__.py b/backend/api_service/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/backend/api_service/__pycache__/__init__.cpython-311.pyc b/backend/api_service/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..24d46597345ae7f5850cf4695c7e1ddeafbf3849 GIT binary patch literal 154 zcmZ3^%ge<81R+uDGC}lX5CH>>P{wCAAY(d13PUi1CZpdS7kQetv;YF2X#0(Sz0NYt3OaK4? literal 0 HcmV?d00001 diff --git a/backend/api_service/app/__init__.py b/backend/api_service/app/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/backend/api_service/app/__pycache__/__init__.cpython-311.pyc b/backend/api_service/app/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..fd18599ebe87f269af9e3d1fe3ec40185099e577 GIT binary patch literal 158 zcmZ3^%ge<81ffyuGC}lX5CH>>P{wCAAY(d13PUi1CZpdS7kQetv;YFyY z#Za=Up@wtW?a69}7Sa(nmhClqLptilvk4=SO&ZB?uDE^KexqMNA|lE?@N~cB9~lF% zCX<1C0<6N@Xs{-dMCaOyh*hHE9-?85Bv+z1_F=>r#J%$<6)zHPe$}D`>zL$;hlyu9 z#Mf7HRG+*&qt6q%;n+l{xk5nvw04>1HXIBBZ@;^6dfD|+;>!$?PJLzwE_B`=@um4xU#I$T1q|G3=PCOQ~+t!!}(osr+0`+XMnO6RR3P88ueFBL`myiIb9_-~Ep%Pl# zn|0>@hFB8IM}r7A(r5v<)og2Wi^AfM(mk+!f6wa&LP^}-r`M812+Ie2NF~YtKRzN| zd(jXoZSMvyf0lvcchQx8*)kB zjf%fou3 zjC4SV-UNis>rkJ&@{aC!Q0ksd(zC_k8E=ED>Hs^knRhrI!^(sLjx_*b@~UM~OajYs zndBFii-%fTp{Sm>w%nYBi;2*KmkR*J1rHaGhI6QQm<77y1JW%!a5f-LoJvx@dCGT3 zc_C6>3G5Jun|Wq*%v_wETwHi-?#hfYonaDSi78|guo@7`PnGi2vdHySLJ1Qc%m#eh zaxH4+JYwdk>A{x`G_9R6XXh5o3v-LJ8P?hZHXiScS;ynL~yB+54*CyqWI9;ptW zsSTd4N2H|oCxYQGKGc!aqt!v}@Mr6vuHSueC!tppI_Sj)>PU`g`&3{057nW=cgDXQ z|90iOg|FRj+(#p$4@X9S9s1?SgNqNx&hLz5sw0`5!RhMYbR~YUmoL;}{f}ZNAI47Z z#7Rtf+ynjISBsXvzhNtQT z*etl@98JTNU+n%rFw`YM5bB~N$Z)tYXh?XYf+n8bYiPRCt=G^imCjy6&sRG8E;?1| z)@$g^N@w4V9r}3ulZo3CWwoxNv9mv`4~BO}r>dh literal 0 HcmV?d00001 diff --git a/backend/api_service/app/__pycache__/main.cpython-311.pyc b/backend/api_service/app/__pycache__/main.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d7fb92daed318327a74455cc509e0bd4a4d4dc1d GIT binary patch literal 3373 zcmbUjU2NON`ACYQELpNFIa2Ky9Rnv>95}M-E6?SHPMo4 zlE87vOjvSF4(ACoX(=@&oXcj)8mOhidD6^SgS9~`Tg!&HV&<%&+K`p6<*nh`uz*BF zM4Z9{IE^#U2DcLkJ%tgqT7eXv6nogwFHvw7=Ncj&!uf{4$Ft#W8E9cdO+Y|9(!-8^ zNqSp=lQ5!&3q-<2l3q*Tkw=MbSbW;sLbWq5Z$$gCySjpa6S-CU+o=)0aC}5a(I=W6Ar%s8} zwp$KiIl`%9d{SbmuB;A|xr7eOM6%VnYzLz+HO7JbTBTa4uFKQd)}8g_G$k;ZOaL3h zH3?Jh5$b>#EXDWh9cr?H@Ccu-pJ}>o`}*{>Y3rKVv>o^Q>(y#?y?A2zT6Kj9mw|0o zGfn$`-KIvvXgO>+LUmvX)-^QK>7B5M+q7|vHi?Eo5~k3|rc5I5={l5>xbZao0LK0T zfbSy#ii{g4Btj2`cVi3-fW@@GLBAD`;?Klw2@)1Y)F7m!1txQ^)WJO&$JN}9Q`c=w zCM7xmhgcF5S2IjH4#=cPdHzuz#16kiRS&M*yuQlMuiocDx%!T#-z6=)s@SyZ-!0jAo`wam9g5L)EXJ*t-Wj;*%sdJms){vJv z2i+ejZ!HE&VlcUf0KDKJKnXdy4|J%{YDriKt=5D|WHl^20&6`1{ABA-)|HiDzKjAZZ9Z(F4y1 zvw?&r&Oe#|-6gN^#^$-r`K?PI&umR?PJK4;XYH@@Uz%QO5qco?NJ3(&0=M}n`$2EV!!N!~aNuj|pKXukm zjrsX9L~Jl1^LPdDf`b6baxzr(K$zBndEDYe=Y~C*==@&=mOg>N+lK6W8mBprgaw)f z8kz&Zjnd6Mng@Kn(0v|M!%=Cc1$DjgsX$Zw{3Hh|2Rs^Bg4(uAuflW=hI0`>FB#AJ zBh!C+(@Wii?x)80WQnT-0Q}ZfXI+_wVrII{KBf2{ln-F+UjlHtQijm~07i8w8I>cZ zL`)x0UK@@}JQ!`(^(N8p(l_7)k4J~6ODy@vKYsGS8-HylGqanS@e(tk!V8n*OsUtQ z_Sfr7S%WtdY8K`3LAk`N&|Rv;#mb@FKdju_u;Qn!qTAG=yx_1A;6qK~y5uM!|BJ{< z%e}3>XOIm!O?V!0rS}F#o(ZdO`A8kDKpm}6yW_%RLHWN?n&DuOvzB0w(Vg2x+K@8&Z%*LgEWl_#74dM8Pj!_NL~%!gu`Am|rYyUI;;uK|*m4B?TY? z5QLzFMEMJ$^tn*-%2#%T%C1leL|I7w;p>pwLejgo5PlEAo`l5d-YK~D6!Nr?{7eZD ze7qYg;r9?6lEZfJYAp5UwXJXMpov{H;h_m1Ns%s_SGQ(&kh+Uh52=T%-`p);*g+R} z(M1nk1Q&_&FU!9u|El=&@kir User: + """Validate JWT and return the current User ORM instance.""" + payload = decode_access_token(credentials.credentials) + if payload is None: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Invalid or expired token", + ) + + user_id = payload.get("sub") + if not user_id: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Invalid token payload", + ) + + result = await db.execute(select(User).where(User.id == user_id)) + user = result.scalar_one_or_none() + + if user is None: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="User not found", + ) + if not user.is_active: + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail="User account is deactivated", + ) + + return user diff --git a/backend/api_service/app/main.py b/backend/api_service/app/main.py new file mode 100644 index 0000000..489a5b3 --- /dev/null +++ b/backend/api_service/app/main.py @@ -0,0 +1,75 @@ +""" +Weibo-HotSign API Service +Main FastAPI application entry point — account management, task config, signin logs. +""" + +from fastapi import FastAPI, Request +from fastapi.exceptions import RequestValidationError +from fastapi.middleware.cors import CORSMiddleware +from starlette.exceptions import HTTPException as StarletteHTTPException + +from shared.response import success_response, error_response +from api_service.app.routers import accounts, tasks, signin_logs + +app = FastAPI( + title="Weibo-HotSign API Service", + version="1.0.0", + docs_url="/docs", + redoc_url="/redoc", +) + +# CORS +app.add_middleware( + CORSMiddleware, + allow_origins=["http://localhost:3000", "http://localhost:80"], + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], +) + + +# ---- Global exception handlers (unified response format) ---- + +@app.exception_handler(StarletteHTTPException) +async def http_exception_handler(request: Request, exc: StarletteHTTPException): + return error_response( + exc.detail, + f"HTTP_{exc.status_code}", + status_code=exc.status_code, + ) + + +@app.exception_handler(RequestValidationError) +async def validation_exception_handler(request: Request, exc: RequestValidationError): + details = [ + {"field": e["loc"][-1] if e["loc"] else "unknown", "message": e["msg"]} + for e in exc.errors() + ] + return error_response( + "Validation failed", + "VALIDATION_ERROR", + details=details, + status_code=400, + ) + + +# ---- Routers ---- + +app.include_router(accounts.router) +app.include_router(tasks.router) +app.include_router(signin_logs.router) + + +# ---- Health / root ---- + +@app.get("/") +async def root(): + return success_response( + {"service": "Weibo-HotSign API Service", "version": "1.0.0"}, + "Service is running", + ) + + +@app.get("/health") +async def health_check(): + return success_response({"status": "healthy"}) diff --git a/backend/api_service/app/routers/__init__.py b/backend/api_service/app/routers/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/backend/api_service/app/routers/__pycache__/__init__.cpython-311.pyc b/backend/api_service/app/routers/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c9ee81f7ec719f687a862a4e5f1d5f1e6894c2fc GIT binary patch literal 166 zcmZ3^%ge<81YuF@GC}lX5CH>>P{wCAAY(d13PUi1CZpdS7kQetv;YFfrpQX*)@M=JHJ%11u>IcrVCY9u72DpLEmi1sI6J@?M;?9AGb()8}k z*>mqb_ntZDp2xlS4}pLmLHTXuPl<1W2>qQ@Duwny<;gJ)p&Lj-5hO7Zn`BaKgiUb~ zF2zUqlq=$5NIREwr#ul4#rdQ+<%{?z?n?SoLPVgrJ6V?sL;@)>BBp|oAZ_y`>r)Mp zhE!vuG1U}lqHW$}bE+lMLUCWRHPserOKpj4p>=<n0 zOC2k&+dzJ|G6+Ss!xx0Dstu9aoO3+~&$65#d*=pzJD$p~>wo5F(=Eu@y@BUTz;mZf zGz*n}9M5*S+tRI9>e;~er@(i>VrD~rB;SgD-BRxco`X{81Pb>p$^O%F;#?*)5|3x* z(`x9@^r_Jh&djSa9`KJOlOZ`RWiyGis)Vro{(J(YULj>|x%WabHyxtNx*GMC5>`2R_S3iF0<q%5fF2x)EhD9X}x}3Q&-`*^@{oGchU58SYtGjY{VXPgx3sKc&bR zIz%}g$BS7t8qZ`dBxFOBWJldE$vCkfM=!{WFl|toi(y%cDzd63(z8mKHR_f5cpUgf zL8@#ft-usA#u;qYVOTxk9-qe;6cU{`83)Q=atO;YRW>TsQ(1{rgXP}o@}x`kgRyL4 zaN*^_n7Is!;jLis5$-IOXA+kTUX9Hvh8xT3JWj`Lc7h--()Y;_AUBX|c{@U?6?qUk zZ{bcK0!v^f!yJadi*P*vqvg=l)Wp%T=-A|;>35HxJUTTQofv!fOY{Uf(f9jCUq2Ji zNb;GB#CFb{i^VU%Ql0@BKzDc{5tjkao-xfxIa3j3AiHP;qva)ml_YA%A(*foUZn{@ z9z7IVex1=<-zW&fx-hKq!?*|94NrNFxJ|OE)YCxXl={3ymqSij*DM{tAgf5sJ=RGF zI^iY_{=7mhj!%KOprKY6VTh!Xj4s5I3A&tqvo6i2vS9}I0f|D`hj1^Uu8)?#J*sA+ zQX;Nm60f@n)iwY`b217}teyjUP94;IGV+kPM#K znZZiu;$-P7#SO@Z@IE;U{3j3tO3ATnm0ricY8$8W)h?$J)@)Q&8aw@YYitf(psIDw z;H-TJ3Hk%>9dv{H1;5PYxHVUG)+**qJ2e(MD)7k>Jzl`RL7_|0Yo<8Up#aadO4a}fJN<%OYPZ~-F9%wi~^hR(^!3`CKT znh8O$43#59h^*Ye(IpU@>YG?*xuLPbhn86Ril_VQ(NmgTtDB{@zav}ZKtg@u3qbw>zdS0{q2|^bum9@xFJ^v!`Zt+dnFl?4)_eB+`N&t} zU+-8SI#%d;Tkmz+Q%(^sMT+n^>!3SvYTBbql-6dLoBrkg=5#AFv> z@aHnpqQk$4tx^A)v;4~*YHRG4v-?=i7JPFxc6tWRXT*6l7QEuir3OHmQUHo)Xwf4rM6UQ z)wU{LJ4&k24R{(qX?Vz10a9|639l`pCI0O_YLK$eAg3l!5~MDuGbXBh*6-S8^k05F~9-*rgn zDEK>de`o&C!^nMo!@rtymPwHwO8-jtG)VGK{%=lM>YN^^{Kc$=MqDuPPEf< zq8zgZjT?*If$T}gX~xf&q^hp;U!^O{@RwO|7R$-(-|BRh&0Wt)v&^Z)e#%8+R#1)` zt#LYubtW^qA8~AdNK7D`rY9s?-9WP_hu&*gX6z9u$2e8>8GEhq1j1zV0a1>cFKe&^ zy9_3#kTois<@f&KWI|EF=CcQu`UqMvP-S2^nSuPG8~~(3@(zqth<)dg<{dPjwT5qf z?Kk@hzAoL@WwlakSZXY%K60c`aY9~ztmeby1XRsvc#u?yvr~UGJZ4r?z#WrG>V09p z>T`H6AtYf%;Q>_rE8zHe{5EtRBUHBllyi~4oQocM#a~|hICw4iz}vm5 zS8eM(uNA!ebnm{$9NP>|1fYai@#!Fcxa3Clt)Kqq8@xP z|JFmHvq+V+HV0}ccn5XwVA0$4*u~Ys>;OuLbGc1L1qMGTZ!I9oEjv;XS$yh6X+X@a zo@3!rj>)mNVViRoUbEN(GFYJ14rc@_y+D%SoF*eN8|;v^?Ov)Y$k9w+4UAv%H9H40 z1UlD8Ap3=~jQBWw=X>xfBnn!=ucd!&>Hg4P8Vk+Cdh@U*);{?opi)Ql87~MEx-g;f z6K3=wEGxy*8`0LLdSgKYPuCj{i5W(k;9mCR+`z|Lubc;JR#buw^`?p6$~lz5 zAf=ppP41TYkyB&>UZnt_Y^$an}}^rk1SXCE6BU4 zEBrtRfrMxoJWOGg+7RM29T;V~hBX9`XBny8fh0IXI$Z-&BZMT&sNifatB_K7YEp2Mdi)Z^&89jJPVbMb zvGGiLCNXPXCzWs31}e8`rUy5Y$+~Q}MgwqbG$7G?LDFaGhIJVU7@)gST#83{Fw8X&QVsqo_tbJ@xM1^>Xyk^w`MBu~AI& zkHJA|q8kR9mkcgDuVNA+@JRxQfzW*vT^t%vzV{A;zUGti0ZhUOaSf#a3smA5hAFWe z!$WEZ;77~>4efvSQ$*dG^C_Z1&G{5jzvg@%ISDqlbe>e0KTs%Oy`UGtQI{fkl5?i6aPQN!v}H zRT7i%F^AY!$h<7=*-f~1Tt%7xaz*P`>a_ Xr6i4}kdTqgsh{Py8M3tDnV;F literal 0 HcmV?d00001 diff --git a/backend/api_service/app/routers/__pycache__/signin_logs.cpython-311.pyc b/backend/api_service/app/routers/__pycache__/signin_logs.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7b213ca67dcc428302b40161845e0cacd2136646 GIT binary patch literal 5061 zcmcgvUu+b|8K1k``@4JR`(q6D9cGO!;0U!%Y*3-#h+-f%#34XJbRs^TZ|CfV`^W4a z;LBN@Qq-nVrI08UP6h4tAv6`VvZ7YyrBxpLxNB|Y(@K$$`cmF5qIt~#;zJxEwCwTVm%nCVw!q57yY#`T@=*a~W z!CWX2%7qhQHtx zJXT2zNPZlZ0yr%7V80Z^!Hh==-Qp54JW$)-r0V>a!uMfs58$y1$aYH+`1L;Aa^C?n z@N~@d!OT`^+r$3*9L&MfF*hJZrP#ym&v7|%-*am?aZmSH#>q4v#DmffctmW+(f+1* z)Z9@^YUU|&WmRXi{jN1Cd(ozlvGSceaEs{5$C01 zLC&j+NbuE?Ot3h6@q(BtsY^Jo%ITCU7xH2%4@>7W1(L=nNuG)e9HSp16_5jLdYtLDex`ikebOO56@T zoW*I?gRPm|n+hht6I4p+ zG;kzA(PAO5U^+(m(j`J*PstLcvzwt7>$y!rGjuk!DCbiumYPH5;8?0Ck6qh8maTl1hO|XEW%bw=AWPj-BcG35274EXPIbX50b-lv&MZQ}q z11icXaW9ezPT(w0zJ-aLSr*l$lnMCTsgF9XJWpf ziW#st2@Hp~%r%)VNZ52qSWU^qs&`rvyDmsi&IKj)pT#}Q=yk9((>(*>BE<(>X)u#d7*G=A(g%YzIh4E zJP9jYlhYXb#Y+}OD3>g2Pf}|iEiRk=$!qK+lbeE*EfgTTVTlMl$~|cQ1-~k42&lL3 z&f$*_-^+Y<@zcWH!p7jl`ryQ$PyK23%f0K9XY|3>jKSCR$eadAVbcO$TS zJ+NC3#En3_daCXYRBwJ2i2lK4>^Y{*zM}=^^}xIlnAbe>Fn8d4*Q>+T`VJ=CQ7 z`+MJ{7;PcC&s+&?}Zk2}a-m^1BJOkRS{ z9J}gr@T8mtBM}#t#TE)WQ&~BsFe;NPf)XZ2Wtsj>!^<$1G_{vzaYKip$=|?REHqJD z!3&`Zs&Ex&cL?hAZyWl5Va$rF;vmBn*Y`lH7T#1GFG8yoSC`Hmm~)w)1}oj@)QxYo z78*6F=|5=JLZQy_|0Ze0U2)c0DAdVSad)rTdFCsgil+vt)$^r$traimBs@=rP#3nh z_FwBj+%{LY%x8m*FtTn?Oyj@YT@=wyVp&#B2>JR;|hy^gs*sEpFrB$GsMC?B&{ocm15Xtk+@r^ zB21%hNx*BX*em{r&iimu4_Y?55>Vq!PB_OvZA<&D-IH7opRowZ(;WvPT6xRA^--i| z$&qNml1T9vA`lT%qEf_ZIU}cAG3>gmE{T8>G<9iaYZg?97F5Xj#Ze~m9Lqx#OPW$3 zkm)ewfs6)ANeGRQCQ?X)fB-yVfk+hk^7uMdApXfjQR9}NIYM_!XcCd`z!^l(gT{cT z2cfxx76EzQ>07aNV$tOmeZ|&&`ybhVvGqLqJ&M~2y;C!ggD{m%UIRFkASt7mJ34MU z=uyyxMUm`+zi)j9;LLv@^?Z% zSs+(tDTL`^x0GC1HobJ?>>f>y-lFL$WHJg?O=nil$&d_K9E#~kWwV4{6QTZV`mA%7 zO1WaZ$I_&=LGm&sYbO>?rh&;b{gT#;V4Z=La*!WUhLg0RXKZqG`{W4idN*wpOA197 zYU3S+U8dRd%mtFOK)DkhhWL@%>>9VWp!*IPzC#-Kg8GdM`6ItPqzl^( zVf%(KyeA%-|ci_|5-IyNUWrTNagkN3{zr1$fFH>Jk=@Z9|iQ{^B+6YfqPuKgS zzZv+|K<%{NKW_AoYmsMb^-NvZwju0T7k22vkRc2)9sJ=B=RTObcgTp2ZbT>6qZ4}c zfDt{Q3kMD1VD&^j5WfA!&zDw5bpIeEMMCJqGasB;?b8L(5X22(*SfH)rs~2zL)gc} zSvl7#)*~ZEWJL2nTR{dN`f%!lDV>iRd~}1~vCi+%`5}WJs!qd^>_7O(_Jw!Nd;7rZ zSYXr@| zX;Yo~CWwUC>dZ!Ld_6Y4meFG4dhDnXJF30%md;-^_=_5M(Q-A>bSINiA)QQ`E_JyG zFhuAFfa%Vp0Mm*xp&(8u&R7XEjrCU6>{G60Q`z(q&MjNs#XPC6JvxeSsG}+RMJX2u zjl7UiEu{#SMwtUBghpAGsuSjz)JV;qO%kwf_BCm_Ospea9))yyRAOJa7(`=*(5PkZ ze3nF^s-=|CHaszyV~z+3yY;E;P*lPI!gPQqm=5?dXD&bvP~9jPS)k`R0y>wnc#6!! z7=={D1EAP&*lf0j-C^Szh&DcCi)kp@S?g$8`+l{KhP4;fIvUfu>tl3S`(L$=_G#U< zjt*(vwT{kfFRG8d;rIKsfqiSQt-YdokL%v!hWB{Y)o=;6NIe*7IN<-WFVJwpKiIE# zOT$fj9^~>jytIe9n?Ypv{A}R80ZrJg+x8f?J-`&|yY0N4T0N${^r{w|(StKaa0WmD z1{YS}s7+{-Gg@#~56&9F+4}IF+JVn|*WT7nzoQM$>%;TL@O(YA?e9K c))nZUI^TWl0pny%`uzPHQm8^%}4=Gwrt0Xs`DEQ3iKhJ-kYG0CPw=xw^nb~}A>PE})Y z+CvXPGXxP0vpXX^T9Ev(lO+l>;vpmPI2rB3K2}>L(JBcAX%%Vqtr0U)lzrL%Kh<4b z)wUtoec9^MfBkjt=hXlGms9`N)Z}O2%KzgZQh(mWF#m}UwTqU(!?Tc+VQw=b6JtbH zbfnpgBj(6BW6lg0<1((8i^Y6r+MV&lJe23s-i$Biqr5Ba&-i10%DdBnOfc3|<@p#7 z?L6t`Oehx0w8UC6t+CcjTda+id(-Wij#vlfed$e^&R8eq{prn_u2>i41L^KePpl`? z8|%&V#rjyr!AK4{-F17e%hCbmmH z$ZwhUiXD(YC~mUlw?cj=Lw-nfL;Egx-X?a#bC0xz=DEdQ$lEUUuk5IeXuXSl zl51{fE$4o}eQq#or_}Ws3-g7)anZbxJ#X4U=e$Mgsm&YaCR}rU22H-GHDqGDU>0HX z(ZYy*_Wpk0xYfOyc8ddR#@r2K?y(GIvf|+XdQ5x|TclpgTDPtlcQ1_lBXckd^M4$- zzSeDP#*B#WGfa5lFDU;L{@}~(>aoqpqiY^Ba>UmN<^Mb%}4zIg-Z%^x^L{w zQJ7l`pMMPo8y;~(mZyhyJaPz?N5e1XdN#V^gQ@$BtehQbLgo{_MPnd=T>|t8EuOLy7s+(8c|_OMz@T z?9kowM4C=r(m5qDBO`_BxQV3XNj7OB12zQo5&b`#1m-rQSjy&1zQEWeZ^hIF(8#<* z_P)bj58kbFLg(gWx?@}>CrV)_X@eFzj}Phi#Y8$q*W)*y#e8Ny%#u#1BIEGFcYhZh zJ(q;K=N6>Yh1|IdiR1_H%Fe-hVLQpiR8j(d{+uB$@;SQx$oyr!DUNJh$;HJ~QXyNQ zvu^Bl8;~ONIMAsDcC09+z+Nq|_fg>BYT)3*x7EPGQXr}YqAC}C2K5--pDQ6qU^Z6p9hcP-3HMA97~8m)3Nkxr0!E-S@zB%Xyk!EGie9eOKjm-xuO zk@&|DT$ z^kaEz990J@J276ZLJmLy7Rq;l`~`kRrV?b@H{Blnr_sC9zkBDmxjVT>{llyM!+$*a z<*R?%y*hHb)c?BH|9YuyLTj5)n;KK`M44}Y#P3|?cb53D#)pe1%O1XX03G#gta2QH`yO1P4=aTHDOwJEWH# zSN5kB%ZRo53}frZoX7V>t)+T;8>fh85qI_Fp)vS9;dSDTtb{! z)ZoixKj!a7g02_Y zi^K#lLAQuRkPHC{yU8%}COnO<_hBA2TqrOUoK{I?4hzU*CUX|Np zO;6f75Uh{A{HMw%!JEOaeW4qjw|Z~&-WBhpOD%h}mOUlkUd^}nYaf53<5T74!t&15 z=7ExLQ1cBw^>FR(zcN57499OwiYF?5CfNEYux&N4trXY+X0aG8djiEm4YBI0F_nL> z#J{KU@2TGRDsILbyjIXWJb26)q4?ebpo!=a1X(@kW4}Wvjg59Eks8#vevs+p7j;U;Ox3G(I{0*4gnhQ@T@D zNZ3IRVGpb%<1W?gLzn4j#myICOqCpiYEk&h86aRHIJe9H*c-alRr2;|-X67OuNO^E8H$Cl*?K+-Jry%muJsE&TVn!lAhO$Oa zs|B1poL}6WA9)L^y@6H#0;H2Uq#Qm7{+g`EvB(OlkOpHhiMQpVau1s&}1K z=DQ#9!YVJ6_(6>yRJ|LfZyyKRD~!W+xK6(DHmUrf2MHjMO5Rb;JF2diD0p4a7Qwq2 z7JF?hGB(0|HR3+wy_KW}mYzt}reb5w^us&e4#E)C(^hv=8=fW|s^Atzo= zT7hN)`<@AGYd{JBbpX_Cu(d@uoHfD%-^ElKji9ia2#8nAIau4Qqas`zMNb7+;|Nd_ zSF`3?19LMr1J&SM--~SQ-MVI$QCjMQh&Gkb*lvwlvW#u>8ZBz2u@AAqa|Ex>y*O+f zPzVVjD=i4Jag>Aa8Vw;0#N>G?nVN=Bsus6JCZ^yhU;w7BH#e9h-Jg=wOwQi`XK1dQP)j(CPZwN%VbgjWjF)@n>kR z4o4<<=3#N;s1vxZ2Ny5~rbh}S2CYQ+%l{3;!291?nU0++iw{N~wwBtDXzfQ-zA+U~ z{>{y}`j(TP&UTyQx%>LPZKd8Jt#{}_e<^TG3mj9qW7M;=MQbL0V;kX@ zEjXToUo^bXa_yyC@t59Ex{jba+( zH{D~L$H<@!(rFvy2cejrsTd(24cgETl~Jm!L7Tt643dPhYgp+kAXUVNz6atVR5H$6 z%nD-Oo6j& z1A8mQa(*H@tZsZ`yXRU-^JE0{kR=po>|%dCqy=@O1rIul>WmpX~regGoCvfeG>KQC|_um=1&fRQ&^8eN(;qpI*|1g|-$?RfXm<;r z%h8ocDX?D)>{q${RC|VpbWc1k=92Na?wL-&7lio~!B7SiG#ITFz>$Bo#!0L}L)#|fb7F*kGLs2@H=$o73@;c}lwPZn z%w?xjGe)Rf{R$ZYhnRkGq;^s7wR}m7z?Za$NWY0fJfD(ef=3Q(t6=P#Nyrg29R$M< z(uxE<3;0xFd|Tzr&u-)>79K!Ch3Ob_C_*|npOY0mL{GC0?AsrIWpaFMYCK9kY~2Z; zG^pj(o%4Bxpsq4R5QT@lk3>L%-#EVW9)Zn(lZ&I|BX~snB7Xz#qvB*)w&HNI99TXe zeuh1&GJpU4RA%~Adnz+Ssy(e&M^G7CDl>s1{XJn`QeTwHOjxz2GP75;r!sR$wWo@M zV;yi7_X9|f@wZ*?S`ObkcrUE_MoPXB%{NkXSKKT+&Mw0~;e7?p2=KlR^#<9ta;UB1 zg!>aeUva@5oDyG0#e13cK(VFImS*RUeSP>~ERNxzwT|Kxy03Pt!QCZxkH+qSE!c2uPb&)^8xT4&-08kyrm^3(Vp<1B z+KU}JFgs$7(_dn@XzZ5fuCbM_5h;B`$j~*?d}EE2HP%R3m=^8}QXE;pAz)<4qeJ&y s?Ni}P1Usm)gU?-JGkc)I0HFiJoh}hGjV0EKX&oGBD|YCxWm7fqKlH15nE(I) literal 0 HcmV?d00001 diff --git a/backend/api_service/app/routers/accounts.py b/backend/api_service/app/routers/accounts.py new file mode 100644 index 0000000..59193b9 --- /dev/null +++ b/backend/api_service/app/routers/accounts.py @@ -0,0 +1,139 @@ +""" +Weibo Account CRUD router. +All endpoints require JWT authentication and enforce resource ownership. +""" + +from fastapi import APIRouter, Depends, HTTPException, status +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.orm import selectinload + +from shared.models import get_db, Account, User +from shared.crypto import encrypt_cookie, decrypt_cookie, derive_key +from shared.config import shared_settings +from shared.response import success_response, error_response +from api_service.app.dependencies import get_current_user +from api_service.app.schemas.account import ( + AccountCreate, + AccountUpdate, + AccountResponse, +) + +router = APIRouter(prefix="/api/v1/accounts", tags=["accounts"]) + + +def _encryption_key() -> bytes: + return derive_key(shared_settings.COOKIE_ENCRYPTION_KEY) + + +def _account_to_dict(account: Account) -> dict: + return AccountResponse.model_validate(account).model_dump(mode="json") + + +async def _get_owned_account( + account_id: str, + user: User, + db: AsyncSession, +) -> Account: + """Fetch an account and verify it belongs to the current user.""" + result = await db.execute(select(Account).where(Account.id == account_id)) + account = result.scalar_one_or_none() + if account is None: + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Account not found") + if account.user_id != user.id: + raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Access denied") + return account + + +# ---- CREATE ---- + +@router.post("", status_code=status.HTTP_201_CREATED) +async def create_account( + body: AccountCreate, + user: User = Depends(get_current_user), + db: AsyncSession = Depends(get_db), +): + key = _encryption_key() + ciphertext, iv = encrypt_cookie(body.cookie, key) + + account = Account( + user_id=user.id, + weibo_user_id=body.weibo_user_id, + remark=body.remark, + encrypted_cookies=ciphertext, + iv=iv, + status="pending", + ) + db.add(account) + await db.commit() + await db.refresh(account) + + return success_response(_account_to_dict(account), "Account created") + + +# ---- LIST ---- + +@router.get("") +async def list_accounts( + user: User = Depends(get_current_user), + db: AsyncSession = Depends(get_db), +): + result = await db.execute( + select(Account).where(Account.user_id == user.id) + ) + accounts = result.scalars().all() + return success_response( + [_account_to_dict(a) for a in accounts], + "Accounts retrieved", + ) + + +# ---- DETAIL ---- + +@router.get("/{account_id}") +async def get_account( + account_id: str, + user: User = Depends(get_current_user), + db: AsyncSession = Depends(get_db), +): + account = await _get_owned_account(account_id, user, db) + return success_response(_account_to_dict(account), "Account retrieved") + + +# ---- UPDATE ---- + +@router.put("/{account_id}") +async def update_account( + account_id: str, + body: AccountUpdate, + user: User = Depends(get_current_user), + db: AsyncSession = Depends(get_db), +): + account = await _get_owned_account(account_id, user, db) + + if body.remark is not None: + account.remark = body.remark + + if body.cookie is not None: + key = _encryption_key() + ciphertext, iv = encrypt_cookie(body.cookie, key) + account.encrypted_cookies = ciphertext + account.iv = iv + + await db.commit() + await db.refresh(account) + return success_response(_account_to_dict(account), "Account updated") + + +# ---- DELETE ---- + +@router.delete("/{account_id}") +async def delete_account( + account_id: str, + user: User = Depends(get_current_user), + db: AsyncSession = Depends(get_db), +): + account = await _get_owned_account(account_id, user, db) + await db.delete(account) + await db.commit() + return success_response(None, "Account deleted") diff --git a/backend/api_service/app/routers/signin_logs.py b/backend/api_service/app/routers/signin_logs.py new file mode 100644 index 0000000..5a4c636 --- /dev/null +++ b/backend/api_service/app/routers/signin_logs.py @@ -0,0 +1,83 @@ +""" +Signin Log query router. +All endpoints require JWT authentication and enforce resource ownership. +""" + +from typing import Optional +from fastapi import APIRouter, Depends, HTTPException, Query, status +from sqlalchemy import select, func +from sqlalchemy.ext.asyncio import AsyncSession + +from shared.models import get_db, Account, SigninLog, User +from shared.response import success_response +from api_service.app.dependencies import get_current_user +from api_service.app.schemas.signin_log import SigninLogResponse, PaginatedResponse + +router = APIRouter(prefix="/api/v1/accounts", tags=["signin-logs"]) + + +async def _verify_account_ownership( + account_id: str, + user: User, + db: AsyncSession, +) -> Account: + """Verify that the account belongs to the current user.""" + result = await db.execute(select(Account).where(Account.id == account_id)) + account = result.scalar_one_or_none() + if account is None: + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Account not found") + if account.user_id != user.id: + raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Access denied") + return account + + +@router.get("/{account_id}/signin-logs") +async def get_signin_logs( + account_id: str, + page: int = Query(1, ge=1, description="Page number (starts from 1)"), + size: int = Query(20, ge=1, le=100, description="Page size (max 100)"), + status_filter: Optional[str] = Query(None, alias="status", description="Filter by status"), + user: User = Depends(get_current_user), + db: AsyncSession = Depends(get_db), +): + """ + Query signin logs for a specific account with pagination and status filtering. + Returns logs sorted by signed_at in descending order (newest first). + """ + # Verify account ownership + await _verify_account_ownership(account_id, user, db) + + # Build base query + query = select(SigninLog).where(SigninLog.account_id == account_id) + + # Apply status filter if provided + if status_filter: + query = query.where(SigninLog.status == status_filter) + + # Get total count + count_query = select(func.count()).select_from(query.subquery()) + total_result = await db.execute(count_query) + total = total_result.scalar() + + # Apply ordering and pagination + query = query.order_by(SigninLog.signed_at.desc()) + offset = (page - 1) * size + query = query.offset(offset).limit(size) + + # Execute query + result = await db.execute(query) + logs = result.scalars().all() + + # Calculate total pages + total_pages = (total + size - 1) // size if total > 0 else 0 + + # Build response + paginated = PaginatedResponse( + items=[SigninLogResponse.model_validate(log) for log in logs], + total=total, + page=page, + size=size, + total_pages=total_pages, + ) + + return success_response(paginated.model_dump(mode="json"), "Signin logs retrieved") diff --git a/backend/api_service/app/routers/tasks.py b/backend/api_service/app/routers/tasks.py new file mode 100644 index 0000000..e4fa910 --- /dev/null +++ b/backend/api_service/app/routers/tasks.py @@ -0,0 +1,196 @@ +""" +Signin Task CRUD router. +All endpoints require JWT authentication and enforce resource ownership. +""" + +from fastapi import APIRouter, Depends, HTTPException, status +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession +from croniter import croniter +import redis.asyncio as aioredis +import json + +from shared.models import get_db, Account, Task, User +from shared.config import shared_settings +from shared.response import success_response +from api_service.app.dependencies import get_current_user +from api_service.app.schemas.task import ( + TaskCreate, + TaskUpdate, + TaskResponse, +) + +router = APIRouter(prefix="/api/v1", tags=["tasks"]) + + +def _task_to_dict(task: Task) -> dict: + return TaskResponse.model_validate(task).model_dump(mode="json") + + +async def _get_owned_account( + account_id: str, + user: User, + db: AsyncSession, +) -> Account: + """Fetch an account and verify it belongs to the current user.""" + result = await db.execute(select(Account).where(Account.id == account_id)) + account = result.scalar_one_or_none() + if account is None: + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Account not found") + if account.user_id != user.id: + raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Access denied") + return account + + +async def _get_owned_task( + task_id: str, + user: User, + db: AsyncSession, +) -> Task: + """Fetch a task and verify it belongs to the current user.""" + from sqlalchemy.orm import selectinload + + result = await db.execute( + select(Task) + .options(selectinload(Task.account)) + .where(Task.id == task_id) + ) + task = result.scalar_one_or_none() + if task is None: + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Task not found") + + # Verify ownership through account + if task.account.user_id != user.id: + raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Access denied") + return task + + +def _validate_cron_expression(cron_expr: str) -> None: + """Validate cron expression format using croniter.""" + try: + croniter(cron_expr) + except (ValueError, KeyError) as e: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=f"Invalid cron expression: {str(e)}" + ) + + +async def _notify_scheduler(action: str, task_data: dict) -> None: + """Notify Task_Scheduler via Redis pub/sub about task changes.""" + try: + redis_client = aioredis.from_url( + shared_settings.REDIS_URL, + encoding="utf-8", + decode_responses=True + ) + message = { + "action": action, # "create", "update", "delete" + "task": task_data + } + await redis_client.publish("task_updates", json.dumps(message)) + await redis_client.close() + except Exception as e: + # Log but don't fail the request if notification fails + print(f"Warning: Failed to notify scheduler: {e}") + + +# ---- CREATE TASK ---- + +@router.post("/accounts/{account_id}/tasks", status_code=status.HTTP_201_CREATED) +async def create_task( + account_id: str, + body: TaskCreate, + user: User = Depends(get_current_user), + db: AsyncSession = Depends(get_db), +): + """Create a new signin task for the specified account.""" + # Verify account ownership + account = await _get_owned_account(account_id, user, db) + + # Validate cron expression + _validate_cron_expression(body.cron_expression) + + # Create task + task = Task( + account_id=account.id, + cron_expression=body.cron_expression, + is_enabled=True, + ) + db.add(task) + await db.commit() + await db.refresh(task) + + # Notify scheduler + await _notify_scheduler("create", _task_to_dict(task)) + + return success_response(_task_to_dict(task), "Task created") + + +# ---- LIST TASKS FOR ACCOUNT ---- + +@router.get("/accounts/{account_id}/tasks") +async def list_tasks( + account_id: str, + user: User = Depends(get_current_user), + db: AsyncSession = Depends(get_db), +): + """Get all tasks for the specified account.""" + # Verify account ownership + account = await _get_owned_account(account_id, user, db) + + # Fetch tasks + result = await db.execute( + select(Task).where(Task.account_id == account.id) + ) + tasks = result.scalars().all() + + return success_response( + [_task_to_dict(t) for t in tasks], + "Tasks retrieved", + ) + + +# ---- UPDATE TASK ---- + +@router.put("/tasks/{task_id}") +async def update_task( + task_id: str, + body: TaskUpdate, + user: User = Depends(get_current_user), + db: AsyncSession = Depends(get_db), +): + """Update task (enable/disable).""" + task = await _get_owned_task(task_id, user, db) + + if body.is_enabled is not None: + task.is_enabled = body.is_enabled + + await db.commit() + await db.refresh(task) + + # Notify scheduler + await _notify_scheduler("update", _task_to_dict(task)) + + return success_response(_task_to_dict(task), "Task updated") + + +# ---- DELETE TASK ---- + +@router.delete("/tasks/{task_id}") +async def delete_task( + task_id: str, + user: User = Depends(get_current_user), + db: AsyncSession = Depends(get_db), +): + """Delete a task.""" + task = await _get_owned_task(task_id, user, db) + task_data = _task_to_dict(task) + + await db.delete(task) + await db.commit() + + # Notify scheduler + await _notify_scheduler("delete", task_data) + + return success_response(None, "Task deleted") diff --git a/backend/api_service/app/schemas/__init__.py b/backend/api_service/app/schemas/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/backend/api_service/app/schemas/__pycache__/__init__.cpython-311.pyc b/backend/api_service/app/schemas/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f2e6748ac7d305c4e456e3a267f40604aa75e014 GIT binary patch literal 166 zcmZ3^%ge<81YuF@GC}lX5CH>>P{wCAAY(d13PUi1CZpdS7kQetv;YF7fK6y9Br9slAqkbuFY3n~IjZKR4;MHQkdp`;PPkT_8#E75A>8N&*D*UYX< zl9Q=u#i55BD&b3|o+yZ@>ajjUoy%Fvz2S0)GLZVVpqB2z~a#b$MGU}m@dF`6hyx#Z60@%!6UHyxv- z+NFoAs@rPW;_7$I*tFEIOC_srI_mt&+AY$~ z?%C%#gCp+Dt+2;+W;^PpMYlzSN(dO{165Z|_M{syJqXy`+`bH3?Vkd%lL}&4w;5L# zZ{j-d`&` z0Zw=dgdYi@{5}A5Pa=L21!nBEbO2I%f9=+n>m{JVx;WALre1o) zOuDYu3=IzVy-{MI*Veo2TknOiw#}os6$&gW`v(w>BOi}kbICO?GuxbQ+OHn}weq*- zW;R-xjdo_Ead-EsOEzG($c8s{diPQ@^h$QWxbIFax2BfcQ_GFp&*xpT46{X+z3heN zH@|)K=he6O-0W&AyV}mKg2A`Dg`e-bWEEyhNKp8>-b)Qpx?kK|0RKOw_eRTC5c&uS zqcqZ|twJa0IJsj;?x7We=v0AnPV{NJR>R8N`D~1~bp!-$O_gmKw$KBH_9CC>F*wA| zg*?!@OSUUdj7mk_WGZK$FtfzBYYwA1p2k>+tbc$jl7Uw$2*;-oL^kB&91DR@qnJT) z8iin!Sd47pXHej|0tTH$O~g8j+H6mtBO60MOobaRxzWqwr|wPWX3MQ?xt%Q!(pYv$ zxkbu7tE+GGZnn_M7TVduz^LGoLW`h;8Td!nL%GJ+#c|LCF`8>eSePo9L!pKG8WCEA zkt4Ya({LXbc^}DbIB0caPoubM(A-}t&?MmsMll++c0b*1VJpn8LDyzGU)`^7R-n;y zR^!ZOW>C4RRfZlmL;>}RY4uWHtLTR5uq`Jy=PL&FC%P?3Gy;(;KVrj9b=yx@blcJ3 zR|G1YYP#dcMUxczPz13#;y6H#3?hi_U+_5eerDv|=jJW5Y&=-a=g0_|{B)UHRqzJ= zaI@|(n-5dRj|5BkW$<+YE*m*HV2^ip_IVmVLAG-M=5w(TCjnyz6cvmOfhkA(5szHO zXDE>8_OBoY?i!Lu7G1LFMH7vM#=_GDFE!a%YAiin@=ngU1ZRuPc*#>i@5#6%(;^u! znQq)^+QCaRnlW WNAYArhJcQXUE>`-Jv;^yA^i^;p?NX@ literal 0 HcmV?d00001 diff --git a/backend/api_service/app/schemas/__pycache__/signin_log.cpython-311.pyc b/backend/api_service/app/schemas/__pycache__/signin_log.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ead641594a23328f4d501d4732d7c3f70bb00f8a GIT binary patch literal 1808 zcmah}O>f&q5M4@3ijwRqu9G^S4(p-^wSg~1a%j^6ZGsxMQ`Vy^;;#xJ|KQE(FiYd;IT$YqBQ9YEtMrY)a18_PYQGXx-D*Hx8knvbSgw`(X}=cO zt{v3fxA9>&_ zXD5=*Q_%}W=sb#g&a(lRLnn&4^b-+`2i?!tDJI^RqjKC zTVPdltH!NsmgFf0bp7(6v_sd9YM@9j>IFVv|kZ|=o zIl{M|eDUz}P8TZM+2vw8>TLVnpLobRek?r2<#W;HV8@-Zw>kwW9+0ve54BY!Auq#D zc&T}S^z4^iU;L$oKY1Pe@+QDWyD6{W%?Ahz0J`RREb2nSRS_n-<#~P>MnxeMdQ~DN zwwSaKP#yVB5<8^W_y$_dQ$WS4UjZhGq$H#FM)Y20H`A4LWq&2RG@ov!oBNyD+_i&` zUfdedMVN*@Ob`cz>4#=(OZUyi5)^N>1OYhgrj9@2#|HO$*%G7WFgcYHyh z$Qx5W*9iD=IF6nGOd4ULq_e_1u=<6Cuc+ooV3H zGZ)C9(1o)rq3B%ZN6AD7YFV2i;epa-0u_RzBF3(!O6+kI1Gqf&sExM$PL<^?_#%l% zW>KiyXe}bVQ^qCqAzXz^Jq9>m#=9eWH*2;=mp=Tvxiq3U59w0Yd@o%~*Y?-4#%#Kp zuI{f+jJeArit{i>FbeYV9UJYLA~7UhTgfmMVXtVmJk`A8Y8bQQ@|2Y}+{!va-f?-p z=fB~^zF&y&EBOgLhw4^;0*tGMVT>ykgN_M;O^n46x%KwU$eq#u&5V3Jx@eBArU6|z c4F~l%=sliM3gmTjU1A{8V)c^nh literal 0 HcmV?d00001 diff --git a/backend/api_service/app/schemas/__pycache__/task.cpython-311.pyc b/backend/api_service/app/schemas/__pycache__/task.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d0090443fe2183564883169a7e00b0c6edcc2c84 GIT binary patch literal 2058 zcmZ`)&rjS&6!wqtuGc^y1zN&3)S$M}su5Bu5h+#FL!m{gR5nTikyfi(Ue5qlytWy8 zo3NK|q^jzXL#1%!L{&AadhDO!l{jpDt$N_L2u_^x-gpVivK^1#e)ikmTnBFl(XPpPXFRbb+gx>nKZxk^r! z6lqnW+D{VAG3^li{sDJ3DtV#HgHFGu(}k`8I^&wIKu4ZPrP1bHNQ_X0s+3UeP419DkZ@&Ty z9kyLa4cy=|XRwdv-N$UF!J^o#1$0;J-a&=yZ<)5~vlnLMZuzcn#$d9zcr1gL&&uJ!?NlDI7B>ZSvw8e>uK_qMFYpOI3|YQ0FkOu%+qnpvVA{@v#eXz@6svg zr1PbfFRBhCxB7y)wV+zFo#)J_RXcR8i18m>hXD?&JsPVb+;Erc=p6-?W`sFx^<5ou zKOy%!MRR}YVDhhYtDY{+cl4wP{`Dc z1dO+tD1eg8$6&$Ka3hO}f6-EbkHafIf#N2L_fQB{W)Wd!PzD=or$T0;B1qr&cw|A%+bU`LU6XpLYK(eM_uW>P{wCAAY(d13PUi1CZpdS7kQetv;YF&ryk0@&FAkgB{FKt1RJ$Tp ZpmvZ2#r#0x12ZEd;|B&9QN#=s0{{RkBd!1d literal 0 HcmV?d00001 diff --git a/backend/auth_service/app/__init__.py b/backend/auth_service/app/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/backend/auth_service/app/__pycache__/__init__.cpython-311.pyc b/backend/auth_service/app/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8402defe014f733651014f0ada231276af76b647 GIT binary patch literal 159 zcmZ3^%ge<81a4vLGC}lX5CH>>P{wCAAY(d13PUi1CZpdS7kQetv;YF&ryk0@&FAkgB{FKt1 dRJ$Tppb;QTiur-W2WCb_#t#fIqKFwN1^~UyB~$TC+WYt3Hf*Ys9a7X^UZ%)3HcQf zNR$YiU`cYaC2EnaQLAi=+GKmwE<2(Q4(qH*r|gQlSl*Uwklj(Y?1_5Vw>{~V8>5XZ z?@0ROrf3t(JCn`PX5e)u{ZT)Aw?tdu?MepZU^FPVMq3$OL$Xb7kG8YCJJ}(3Mmy!M zXqVg_?Us9@J#uffSMH1Uv3gH(pUg*jmiH$2%P&P=lKZ3m@`309c_2C<4@L*&gVBQ= zu@JFk`A}uR;?P}=kk8=Jujnw;TZBfzCo~Dof`7B+t{v*(G3pPChc}K?zN+7%O9%+T zMT^iXv@LS@Y_`|(vbNoKf$1}oF**Y6ImD6LUO<^u4Azz{ScDGIBXo+5LYLUR;1Ie$ zvPVPEo3JUYDkJpNatpm5k-C;6z{dN{P5CrBnQjNN-9B(S12CrT7bRMIC$RA|GE_NmdAe zmag+FX(^?;|A;d=WY?Sqq2`AVOZRO`V!_AO1N4C~rB6$R{QJgES;cT8DNC1ylL z0r~LlP{gE|P^UnQV^LIN!n|g?s)+PHr!^+17+1xZ8lO*!%8)~IV+DwVLahPwm(q(; zO7miNT2xljDMi%mv+4K5l-9sr6LB@J1*XLXDk@8?WLo_3s;H>YvIl!PqfZ+MZ@MW` zX<73M1WB=)(&q>E zvbLEEZ^lzF>k7XL1Lotk3IJ8C$eee6Ax-&Oxyrhtit=;&(NH85$=Jg}I-z77VOXGa z0@T5-N@`NnJc6hssHAIv6GnkcrMZwewn~#)1Ea@mrgceGS4P9(WI7Q~E~OQ9^mrr^ z$pp3)pNh9rX25|&QU{Q4(MU`0nN6tW09gc_++EQxUedZKx%NiCo) z4l1gvG?g&%0aV)o&rjj`=HI|#yiGW;ky)|~1^RW%qMB$$NY+xj7jId!)?A-z+7d!k zlxJMawn|R02v)%+ahvv9{QHp0S~se=EA)g2j=Gj?S(~z#mYln_q&99z&vJFr?OFRq z#Z%ZKgIv~DOZijq5dPeHjr_{`Gutgk){*Ppb@Z-^Ai=fSP>ahjZpU&%rC!-x2J>r> zPH=CRp|)2f5VGv8#t+fK(I!N8cq2G%fRQU7v+Y zfCa`emFc*cx*1PO0uMVxe1Am(?bGWs&DXDAAE|;?QFG|PHe}U2IvB|RY z9L7?+4uwu*befa31kHF6l48&-S|ZJc#R@7QE)!hOLldKOIC*nxXvya0;|W+2VXm5M z4oudZ3^pvZvaU7bFpSk?!=dLv+A(;Pe%RRm1%EkG_L0VpqPIUc`P8?6W1!$WRP-In zz46TH%YE?F*Zs7kuh?;-6c{PB9=z9)4<0K!ZB6d42_)ZQQYN+rciBst+Wz*vzk2Tu z@bg6 zzjG7MoSs~^BK~M8&|kJ&v1gEci%FSSp=WO5ZT|Qy^m3~!KpWYK_B){MD0=vfV8Jt3 z^bF>0gWteh5zPsTOV!nt1b$U6#7l;eIG;L3s-}NU>b%fugh1aN_Mas)a4{vJS(h>* zFMy$)k1Hat8*3gs5~I;6+5`>UCv+TMbPN)$(Wr`vH_kpIY*a0f(MJU*7KYTHTA|nWDcT1$nv<@kQc`M>?t?Gb+fGAk z*#@I8K_R_hCR;tX(R5m+m!KvNkHSM@XrY4yM@uc;r9c-bnAZUd#uB@Obu;4(15PB> zrJBx~OW*@JTb-3~daYCXUPpWZzV#iuWOxMnz*x=k`{X8%>#BK(@9 zF7fITm?;rZI?2bA;EoIHU`0Wn6nY#wlxaiCR9qI#S~U(ppJ~4U9L&=4y2H$x8Rfys zF{|ZgYOLEWU7luVt!OS(N0eA)1pS6hdo?zGW$MDEi{rD*H^c)2QZoZaUxWT=4O{1# zKE`x15{X@$nw^}U8oLymnVh~hIUSpvp1v}zSrwInt@|@zqiE~sITYY#y zo+`GU$~V>Dw{1_Gx{FN*Hf9S=!^NiIylePRPd!1_##U0XZD&e>lfRyM;J<%u^IAS| z67HSa{u5kfE0{TWD;+RZU%VVTKR|vz;2z&+{eAfGc$f8GyX=t9cytU);^A+IqwnAn zxY<{18)6|MFvJQ1etchuRl9$yQ3}rv;Q3a0-Kyy^13sCjf}QZG#+N6{ri`q0*_2U> zY5(7rK%$ADFOXp(tFQwmR{QBSIAmQm&m^_;1g)Z-W0zq6)UsKJShs8{8e`+yo6Oo4 zYF5Io?ytzg6HnGIIN6y6JcQ58GaMLNcP7FRuG;B0@DEPZ`r&fh0iv_w=j$j4$B0VU zfI%R|z!`Y?=vvXMnCCPb>ZazHXJL{*Hp5ihw}Ho89b9Ov2?}U#=4Z#$^%W5yh#gau z7^uA8fXiwppY?;zVteK}fntb`;#SgJmF{UxOL1jM6wELOeHDpDF+poWqeyO%1lDZ| zfoDR|HVrvw4D%)^(%12G5tAiI6x?X&?uU?RU=KTzYuvE8!1v{>U8Ofg6Rujbh+N?h<&i-Qiy=4^BLM<$iWETj)An z>^hwf*5A3y|LJK4+C9SE#umo%-R-e zX2+rSR%Ai1>{-XISoe1f>%bveil-LALF4r}hygr$5QGH|bw5ytC;l+tAk#L3U?nb5 z{?IH&RQTyMgorVg8)Bkomqb3DMBuDu0Dn=BdkVZH%c3BGo0eSHIi@mv1x>5MF*W?U zU)g&VJy0`>g29`rSP6~86wrB01WZIs79bgFqCJ>Ln5qXy^gHP*m@pW`S=@p_n!-;t!Ye!%@8b?-u;;7X9z$-g??`1iZrF!}j}gn{$PZ(PGDF zzNP-oz0GiOU#V^X|Avb$9T#O$^H#@<66O|%xyC$m2hq3D-N7sYCxFWgGVOh%K71&p zAc`(C1!n~87!FRSiVKI*40G37AF2XF4V*WDLPJvYe2ho|Mrrc4c6J^g8r7V!7(~`% zG0nLEzuLqh7-_JE?FL0iHh+ByCDK%(=pSm$m0HF4t%jnLM{%TDoATpi zJPGG3c|9b)uZH6IBaD=$2w$3etCAvWK|l`zcxjr_ENMk^f@ckm_98@Wmf#m2A;iK2 z3PmrE;;=OKvqL597lHtuK@zG2g=jO9^;XCE7@2|{iYp=JBp@zRjN0fYH*`8EVl8ch zL>t`BtcMwuP<4&&8SSesVlAzzQc?+-6N5gHzRy`?m3cV~BM<K5((#P?-J?F*WM-K%dx)_X~?lZv*Z~W%|rA)rR#6h@od2+V;#@T{4zQ4pv+}*d^iw#|OP8Avs6dMlY9B`%wtX?d+BcwZ!yVP%PoYnJs~ZRNlVGrQVI;`kBsG8&X?M{N`bad23S(| z5H0{r95P|YF=-**w%emWKmGCPoD14I`sxGM=N*N}`C{aJslD%$wL3R|@xd=YDBBNl z2g?MKl7FOZh4(X8P%m^4M@!jRsb(~KG7J9$VgJjX4|{G)1+JsWb>Nb8;gWQHZ6%gY zv&4mT?H{(^4*aC&qn@(G&Y`EuIgrX%DezT@ItyH&$OXz4j@w@*kRVymEO8)R$A=wx zch|kskFFIb=W-nd?naTjQ96I|(ZR0heChE0aw|A zq-PDOw@2>TQC_IXg-Yixe7^SR=D&RKyAR6bAodxOjZXG@;N2=>Ig}R$rm_Vor`gm% zFK>tM4WPV8k&BcrTz<6p)xsC)FVkhhqdZ9ReMi~*L3XQ*J;3`{?bb<-QIqcfF xlxr_=P<6C)X5#bf`N^w=GuMh|uED_tmH?A`$*nxgp{!%TP_|S^STQp${{z6oj{ literal 0 HcmV?d00001 diff --git a/backend/auth_service/app/config.py b/backend/auth_service/app/config.py new file mode 100644 index 0000000..4b4190d --- /dev/null +++ b/backend/auth_service/app/config.py @@ -0,0 +1,50 @@ +""" +Configuration settings for Authentication Service +Loads environment variables and provides configuration object +""" + +import os +from pydantic_settings import BaseSettings +from typing import Optional + +class Settings(BaseSettings): + """Application settings using Pydantic BaseSettings""" + + # Database settings + DATABASE_URL: str = os.getenv( + "DATABASE_URL", + # If DATABASE_URL is not set, raise an error to force proper configuration + # For development, you can create a .env file with DATABASE_URL=mysql+aiomysql://user:password@host/dbname + ) + + # JWT settings + JWT_SECRET_KEY: str = os.getenv( + "JWT_SECRET_KEY", + # If JWT_SECRET_KEY is not set, raise an error to force proper configuration + # For development, you can create a .env file with JWT_SECRET_KEY=your-secret-key + ) + JWT_ALGORITHM: str = "HS256" + JWT_EXPIRATION_HOURS: int = 24 + + # Security settings + BCRYPT_ROUNDS: int = 12 + + # Application settings + APP_NAME: str = "Weibo-HotSign Authentication Service" + DEBUG: bool = os.getenv("DEBUG", "False").lower() == "true" + HOST: str = os.getenv("HOST", "0.0.0.0") + PORT: int = int(os.getenv("PORT", 8000)) + + # CORS settings + ALLOWED_ORIGINS: list = [ + "http://localhost:3000", + "http://localhost:80", + "http://127.0.0.1:3000" + ] + + class Config: + case_sensitive = True + env_file = ".env" + +# Create global settings instance +settings = Settings() diff --git a/backend/auth_service/app/main.py b/backend/auth_service/app/main.py new file mode 100644 index 0000000..5564cf5 --- /dev/null +++ b/backend/auth_service/app/main.py @@ -0,0 +1,223 @@ +""" +Weibo-HotSign Authentication Service +Main FastAPI application entry point +""" + +from fastapi import FastAPI, Depends, HTTPException, status, Security +from fastapi.middleware.cors import CORSMiddleware +from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy import select +import uvicorn +import os +import logging + +from shared.models import get_db, User +from auth_service.app.models.database import create_tables +from auth_service.app.schemas.user import ( + UserCreate, UserLogin, UserResponse, Token, TokenData, RefreshTokenRequest, +) +from auth_service.app.services.auth_service import AuthService +from auth_service.app.utils.security import ( + verify_password, create_access_token, decode_access_token, + create_refresh_token, verify_refresh_token, revoke_refresh_token, +) + +# Configure logger +logger = logging.getLogger(__name__) + +# Initialize FastAPI app +app = FastAPI( + title="Weibo-HotSign Authentication Service", + description="Handles user authentication and authorization for Weibo-HotSign system", + version="1.0.0", + docs_url="/docs", + redoc_url="/redoc" +) + +# CORS middleware configuration +app.add_middleware( + CORSMiddleware, + allow_origins=["http://localhost:3000", "http://localhost:80"], + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], +) + +# Security scheme for JWT +security = HTTPBearer() + +async def get_current_user( + credentials: HTTPAuthorizationCredentials = Security(security), + db: AsyncSession = Depends(get_db) +) -> UserResponse: + """ + Dependency to get current user from JWT token + """ + token = credentials.credentials + payload = decode_access_token(token) + + if payload is None: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Invalid or expired token", + headers={"WWW-Authenticate": "Bearer"}, + ) + + user_id = payload.get("sub") + if not user_id: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Invalid token payload", + headers={"WWW-Authenticate": "Bearer"}, + ) + + auth_service = AuthService(db) + user = await auth_service.get_user_by_id(user_id) + + if user is None: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="User not found", + ) + + if not user.is_active: + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail="User account is deactivated", + ) + + return UserResponse.from_orm(user) + +@app.on_event("startup") +async def startup_event(): + """Initialize database tables on startup""" + await create_tables() + +@app.get("/") +async def root(): + return { + "service": "Weibo-HotSign Authentication Service", + "status": "running", + "version": "1.0.0" + } + +@app.get("/health") +async def health_check(): + return {"status": "healthy"} + +@app.post("/auth/register", response_model=UserResponse, status_code=status.HTTP_201_CREATED) +async def register_user(user_data: UserCreate, db: AsyncSession = Depends(get_db)): + """ + Register a new user account + """ + auth_service = AuthService(db) + + # Check if user already exists - optimized with single query + email_user, username_user = await auth_service.check_user_exists(user_data.email, user_data.username) + + if email_user: + raise HTTPException( + status_code=status.HTTP_409_CONFLICT, + detail="User with this email already exists" + ) + + if username_user: + raise HTTPException( + status_code=status.HTTP_409_CONFLICT, + detail="Username already taken" + ) + + # Create new user + try: + user = await auth_service.create_user(user_data) + return UserResponse.from_orm(user) + except Exception as e: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail=f"Failed to create user: {str(e)}" + ) + +@app.post("/auth/login", response_model=Token) +async def login_user(login_data: UserLogin, db: AsyncSession = Depends(get_db)): + """ + Authenticate user and return JWT token + """ + auth_service = AuthService(db) + + # Find user by email + user = await auth_service.get_user_by_email(login_data.email) + if not user: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Invalid email or password" + ) + + # Verify password + if not verify_password(login_data.password, user.hashed_password): + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Invalid email or password" + ) + + # Check if user is active + if not user.is_active: + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail="User account is deactivated" + ) + + # Create access token + access_token = create_access_token(data={"sub": str(user.id), "username": user.username}) + + # Create refresh token (stored in Redis) + refresh_token = await create_refresh_token(str(user.id)) + + return Token( + access_token=access_token, + refresh_token=refresh_token, + token_type="bearer", + expires_in=3600 # 1 hour + ) + +@app.post("/auth/refresh", response_model=Token) +async def refresh_token(body: RefreshTokenRequest, db: AsyncSession = Depends(get_db)): + """ + Exchange a valid refresh token for a new access + refresh token pair (Token Rotation). + The old refresh token is revoked immediately. + """ + # Verify the incoming refresh token + user_id = await verify_refresh_token(body.refresh_token) + if user_id is None: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Invalid or expired refresh token", + ) + + # Ensure the user still exists and is active + auth_service = AuthService(db) + user = await auth_service.get_user_by_id(user_id) + if user is None or not user.is_active: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="User not found or deactivated", + ) + + # Revoke old token, issue new pair + await revoke_refresh_token(body.refresh_token) + new_access = create_access_token(data={"sub": str(user.id), "username": user.username}) + new_refresh = await create_refresh_token(str(user.id)) + + return Token( + access_token=new_access, + refresh_token=new_refresh, + token_type="bearer", + expires_in=3600, + ) + +@app.get("/auth/me", response_model=UserResponse) +async def get_current_user_info(current_user: UserResponse = Depends(get_current_user)): + """ + Get current user information + """ + return current_user diff --git a/backend/auth_service/app/models/__init__.py b/backend/auth_service/app/models/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/backend/auth_service/app/models/__pycache__/__init__.cpython-311.pyc b/backend/auth_service/app/models/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..39dbf16259f0026d68ecb6b02f10a64a2f1c995a GIT binary patch literal 166 zcmZ3^%ge<81c71eGC}lX5CH>>P{wCAAY(d13PUi1CZpdS7kQetv;YF&+S!?BzMpsA_j&hip`aruAFjRk zkc7||G3ghWr)9SS-70dCiCpYTJ`N;P3S?6b6jKRQQw=mz!>EB==?yk>uKX04c~_~S zlDhC*pCB=*6HbjFa;eV^61s*Hg&}ofFA9x-grq?O8pg(hh#A${cpAo@L(<(EWsf|E zmi0UI9Gz=MEanEECXBj*Zq}zj6*Qv|uJH7Joj4C$gb5Qt6FBOg@5S?F{k!m_q$bK$ zcuJx*Xl%RnM5AHD3u!V`<@2FaqnyK&Zbc696Xhg>KnQ^zJgC}a}6grRoT;)njnv6DMJQrRV`xhrJSq|ic z)uLWKvV?LpbS(lIU_GoHGI%|)c6;YU>P{wCAAY(d13PUi1CZpdS7kQetv;YF&ryk0@& lFAkgB{FKt1RJ$Tppiv;piur-W2WCb_#t#fIqKFwN1^`n*D6Ieh literal 0 HcmV?d00001 diff --git a/backend/auth_service/app/schemas/__pycache__/user.cpython-311.pyc b/backend/auth_service/app/schemas/__pycache__/user.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..74d031d79af2ad97bc331becd51617be231534dc GIT binary patch literal 4870 zcmbVP&2JmW72hR?yQ`6j_S`PBDjD2hu`Mv>L<6<0(0XE!+4Rh=qVDY{=&v9%7u=9Q6;vBmG*k~U%!m$Kk7yGdDH2FkIe^e{0 zYcHu$te82?&h4^-VQbqZOIx>@b;DwLqr#|0jf$b!6|0)7R4rz!8MbW}nXOsu+bXjw z)6iBf6>X*!N|fcr1V**8%WztwVwQ>;E7EewELLpwEh;e`^rG{I&A!3`o^ronnE59a z%L^@vH7oRP;mtj{WDfkB_XjmaZ(?m%+6UR zKe?xz45*DhGx8?YfEzQf8I*#$?H^?;4zqh9-nx=jJw?}XdR_NaT?Znnd925D{oAUM zZzB<1r=^^(Tj(d&2u=_LUcI=k8^vO&!nv{mzkoPu>PP7vNa_7dW_B|NWNm)O%&pSq zmXZ4gDBVPsbvW2_GsiZKa(T0DLz}qysq#w;U6qR=sMwnz>Ro>=GU<@XRwC87zd!kp zkADg{%YSqdSvQewCbISAy_`d`P+XF2B`-JF{(^I3-bpUF$%STep?-hwmO~bxxLiWT zx^%Lf&qO-6!YsgNo5(?^$CrhHggN;UAYbv+bvW08#lRqISD$cyIc45rJ0@sT7<}6| zrryF8CDksk8Mggh$)ehI;9k3><#r9r$W@qSr_%vz7`M&&RLbp+D`~|V#pVQxizt#P zICEF9_7RFP6e$$^=$ElJej<=y8_(?5`IJMZIy!f5KXVcrZepXE*bo}uaL9&BP(l&Y z;*36xqG{ms4jDa5qX(qNnS?$=sL!^;h5PylhvQC370`Lqi=xgSmUhfyZM;|Id2Bwp zho))??0U`iyNY!l*5eipz9{e$C0nRSaGtX@-Ejn==~Kn`==@_`0|nA$e+|N~^J@;d z*0JFQCz)}RnPxI0Y&hePj7u_YPMpv0IEgtoG1pAY2}b4|GUpPMPy{2ml#h{)jn@D< zixEl&+c+LZJWQd&-UHbvT#u>`5v&BVLx9A5h{Am&!1>}nvR=kZyLP3Qu(DO!HuG>B za@=0A@LsTDAWq!i95$fka>~4@Y3tx9=5v-FKAR@qA0t7RSDaFJ6#$QHl}hrE|`irePS)0H&oA<>I(TcrZ5QL)UeYK7T76nSAW)cPD2`V@XP-ioIzwJuHU z#qm$5^ag+{>1fX);_AekSh#TnYdD#O{d-olu3;Mr^mcm-#OZQ%sYSRiE3x`=efj(4 z*2tJcu()Kb6(6my)>pq@JtT7I%AwRRjd!eT#$PH@KU9SJ-##60=t#DPh zQ^o7z>Yj#{ZD`z0Pt``Sn~}>wdLw%Co7;gtBd;ny-jRtMd7D#A?^>b*-wg7iF^@#WCyDAi~%zS1MB5Q#r2k4uij;^cdH~9KH6u$@I1tAgcHM(g;QCx?g{ThVd=x#gYc57^WKeV57 zrtdjp^X}Mub8No;umLI5JQSDAw^9@P7x!)F=Ax5Ya#Ksq)KdMc#%~?61jQvwJOKT$ z{L`McNiiKWsoSWmVtGL5N4uCX#Ft6e~otanR$t)Deu8kEx zMV~q3vknETPHN3ftu<3?f`T=Nthodw6yfbS!(bgg2jrLN@B>*@$J?m_ksZ9YD}G04 zl>0B{TdEi3s?R_e@adg`3sCqN6#6e4FY_gXPSwsosc!w5n1;swEdKIArkfOB`b4Ug(`*v3Qq4AyYlLVY~f#q8EXfsSQ=17ZLt@x)%W z@x=MnZ6`kK#%G)H+4{oXs6%F{whbp(6HytFNRl@EGGM<@VRK6@+H~3B^-(FANurS=w)Fbv7|&ma$jvMe74f-*UjP^gl8(~+jnDlO@< z<1Z~~-0_!|bkXsbLlTrTGA=Mk?!7$X$Z f;UKx6#uGk*5na7t-xIj+UV$fsR@`?l#I5dsl%jpJ literal 0 HcmV?d00001 diff --git a/backend/auth_service/app/schemas/user.py b/backend/auth_service/app/schemas/user.py new file mode 100644 index 0000000..89b3152 --- /dev/null +++ b/backend/auth_service/app/schemas/user.py @@ -0,0 +1,57 @@ +""" +Pydantic schemas for User-related data structures +Defines request/response models for authentication endpoints +""" + +from pydantic import BaseModel, EmailStr, Field +from typing import Optional +from datetime import datetime +from uuid import UUID + +class UserBase(BaseModel): + """Base schema for user data""" + username: str = Field(..., min_length=3, max_length=50, description="Unique username") + email: EmailStr = Field(..., description="Valid email address") + +class UserCreate(UserBase): + """Schema for user registration request""" + password: str = Field(..., min_length=8, description="Password (min 8 characters)") + +class UserLogin(BaseModel): + """Schema for user login request""" + email: EmailStr = Field(..., description="User's email address") + password: str = Field(..., description="User's password") + +class UserUpdate(BaseModel): + """Schema for user profile updates""" + username: Optional[str] = Field(None, min_length=3, max_length=50) + email: Optional[EmailStr] = None + is_active: Optional[bool] = None + +class UserResponse(UserBase): + """Schema for user response data""" + id: UUID + created_at: datetime + is_active: bool + + class Config: + from_attributes = True # Enable ORM mode + +class Token(BaseModel): + """Schema for JWT token response (login / refresh)""" + access_token: str = Field(..., description="JWT access token") + refresh_token: str = Field(..., description="Opaque refresh token") + token_type: str = Field(default="bearer", description="Token type") + expires_in: int = Field(..., description="Access token expiration time in seconds") + + +class RefreshTokenRequest(BaseModel): + """Schema for token refresh request""" + refresh_token: str = Field(..., description="The refresh token to exchange") + + +class TokenData(BaseModel): + """Schema for decoded token payload""" + sub: str = Field(..., description="Subject (user ID)") + username: str = Field(..., description="Username") + exp: Optional[int] = None diff --git a/backend/auth_service/app/services/__init__.py b/backend/auth_service/app/services/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/backend/auth_service/app/services/__pycache__/__init__.cpython-311.pyc b/backend/auth_service/app/services/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d972cd94b07f912d179d7e6ebe62663a1edd0d5a GIT binary patch literal 168 zcmZ3^%ge<81aV>OGC}lX5CH>>P{wCAAY(d13PUi1CZpdS7kQetv;YF2 hKczG$)vkyYXcow}Vtyd;ftit!@dE>lC}IYR0RWJ)DK!89 literal 0 HcmV?d00001 diff --git a/backend/auth_service/app/services/__pycache__/auth_service.cpython-311.pyc b/backend/auth_service/app/services/__pycache__/auth_service.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b421c7768eabf3737378d2def492bcee99cccce8 GIT binary patch literal 11764 zcmeG?TWl0pmepO=PnT`n4?nRfz!*327{ZVQ445&t39pz0n><>~_BLH*yP+RWRT*%n zjiL=&d80^SqwEsROf;TVlJzFc$}2@$e`F;C(JUYHQEin(t0W|(Riw?&sv{C9%GaKA ztE;-I-A;&<$=BSzeeV0#y|>Q$*1y)*`zZ)_M*lteFPkaqf3RQ#rkr`S3YnV}Pem!- z!8=oqv@_~-V4hCVX(q~~T~QY)Gbwl46ZMe1E9Fi5qQ100>L+D)sxDdwV>~G~T_3F{ zb>37%x-r@ac^~gjZAv#qo6>=3fRyV}!E`7ZN{6H2baS-XK{+X*?qZ9vIHC0;2St4X ze?3IooYb=v&t9ka`j4E@0{?PLJMSK+A`Q7||In;FEo9_mA}%Mh8BP+!%gKbmU6_@U z89|b`RCX$v@E?z7_>>@Vv(V0o!c($I60$~T#TYajW0&^ujzWl9}+@*82&CHY}=um zZ@%t|HbZ>?PND^RxA3iiZS7heBHOr*VYz+f43|j33*jcSA{V#gVhxniocx7^iN^;EEW83cw5oux*kQBpk?b-yInlIG@P!!ucyg@BE9W1%scbcMT65(i!P(LHO)KiW8MJX_Zs~ob(8*kW}eQ|i*9Rg zBmWU(J~0aLa<)@)gIT1yDe;MUM&4AZSu`lEQUy8QnRon_;^{o|Tk3trz77ZV3an(F zxyJC0@pA5p;|g_^eucW?h%mYKV@ZQkU6|v@8{pzRF9Jsz%XJd2!%YfuVmg_b(ixiB z?K|fH7x9aDG9roy&2?p35Cst_iy)Qe5v~e}SztQ>DG^V_#aK2Y#DKG9Ak};pE(Tn6 zN)Q192$yq-VVnZz0Th+fGU9|Zo02t$AmQrQzt^b}a2)4c7{f~#gz+jL%*PI|H6NC8-4p3 z?r0}{r_%*_&5MVSiKm6X63)*;IAI&%7>7TW<2IlfqX#E~5=B|*M(l(Nu@?dIWw8sv z76crEtq8UOh?MDq^xTfM-3WF7sHTdL743~D=eUgs!{~wY{V;8$m#?n%94Q2Ur3QcX z7om+)E60_#QxNYt^t8a9R@u`EdwN6K5O>1TH4knimgN6R0zV50G}%bt+_@7YW4ZPX z(Er5928gdYll- z{6%nmWO+(y8-cjmq^AXTRAomMcJv2u-A8a8)b)66Y`*|IvNW#65{KE|>}V+2LnwK34(T!sc$HJYuA zspP7kXdf@5+EEUR79G#111-*-cU?430rWW!l+BZXviJS0Dc$&kRa z>#>cm|4EJ`ft_yWkeO-;~NAa$C_ciwwhumlMqEMgbe};Fn>G9spW9+MG<9(SH zL?9Q@T?<^C%LrF=6Cs{RWM{$QaV05Fb7ijrcq?+j=gYnZPR?ezD?r$`I1sBo{*E5g|S@6_-rBp_R8dJDmBL?h@(Sy zz6@|VC)fL(Ba-Vf9Vr~KG4yVxO+$1tEr2XlBGv>d(y>dzT(0F@S*_;cDVUiD8|f-| zdnBzyp+0hryjgC)<$e)NsS9t@vJx7COsJRo_bUG<(9#NPCvuv6us2Y>Z zfXf19h2|BrsT6t+h}NpnuqlnnipeRhsr-k_66UKXNqXkF}{We zfh`XUkwnj9q-tm=) zIsn%1IL7B+&@%;gLS-iucA^9a)BT~e^yICl|M1!euPKdtAwFo{{($RIx&6hCeQ>@l z0XW~50Gw|NEQ4+el)O}6`-h38t+#sqaOs0fO4E}NErXt_HRu~Z)zVF-9W8b|SE~ma z-q34PTZwY};7uCfM{amx<$!wdIi+m^;|pi?jCraC4h@fYO!}U=3Z~xGi;}QB^#07Cu8QNDObl|Iz>bRK7Eu zu^eoEG*d@2{AjjTQ>V;3%U(7hwTchkt`UG9N?kNH&qd1EyT@##lfd|bEf6&NAs9$fM_vXD7P1xA0=*)XTx_d9K+b3!9yw#a^=RK27 zqOqAe4bcjE=Y29dv&vzyrhW-tJ|Ak__C1&Vd%kNvo+kc1$l3gR2ZyJH#3e2{smmN) zcyXo=54_CCHc{Mh$qdMFxaOEj3EZo*f;cyp+cJs{IWB>7n1Y4OA!HH)3h%M8NPWcx zsJk-o1avQ8FXm9`BTfy?qq_kmjgF^Mgtc3|8~%#dPnSE}aa=b5%|&Fb=GTQS7PNX} z*@UNyqu6%`013^3GVh0^>?9-`(A|Xk3D}*4>zZYp2`->aCt;JXtJ+DJ_+$62T`R7& zU5Ecm{%l@({^hkJFBf`VQF~q~1fyy&`ncv|sQu>HJ7a}Vw;JlcAL?BT^%g?A)X=Vl z(T8oF3nz=4xrOH*wgTg2>LUc)V1OIHJFpmD3-&0%p5oRWH%?;B;l<YH&O>COCDT#QfV86SYVH+>=A`MQVe$97%+Z>1%U&y3t13x zKps6j?4<5G{loj{yKLw1Zu;(S7vwcRUX5j9>nu{_3WWc3oBStFEkfn`u(7>pqirYH zsBpV9PFZzW7)u*7=A9M#wA-ipIA}*4B|++zE%a8K(*W%`D|T}&DkQ71&Qp{3fREl; zv0Q7-lL!6)UU}leE?+|mI^}|*!edQ51&;Y{a)rmmWm!ni$mFt!7l=L#*LSAw+g(wN ztM8KVdI>3rc<_7RAar^~TT$GP097h+AA+p_G|!c|m;nwT9>yvp>jljub`AK;E z6_SfrF0TY#@3Q&j*ERnGPVq4SUHf_%Zo40TVlDi{(n+=NP$7I+4Ifq-tK$OmurXMo z9KKfKEkVs{%X>L>%iiS?b>Hzq;|aC#M5P(^i_lxwh)W*vf9%aMrDJTh`}19pKrFCl zRQ8O*o*|Qc;ca-k?#((SbaG|iXNMtySn!@wy{8oKsbXVrp-x14ActfVxG1&=!ryxg zgtxPE8=Awlk%6vxItFv;fw(_Fbn0QzM0%qJQ@a4O&PrSvmC>fiwvPTj!Rnx7i^HY% zwHA;IZF3I*+ShUqLfS(l=-INX$?`beO^IuTuwI;{~= zc@T?-5FqJQC{7ocgIKo>fN>o_+@ge1kqT>G2L#BYbut9w3BMS~by=;T_3gidxuhL{ zbQW^#T?_XvwG_fns^KS<#_G7h08vzk?P1$CxZZ6jIlVxfKbggXWJ@fTu_fkKz0x`c zarJ|AT$Hex&Yxa0RX(4ni!jhg9~E!X6@g!FDz0fw>LI`0>z=qnoHZ zoBYFD={xO*hBwo9H@hG&A~Ju;1AyDiOUW6{l}dsEZQ~gzjtI{{XYEgM!JT*5IZJIm zQGIPq)NYd}Y;e{6LuxnjzjeV*v9-mly-y9^5Se$$fy&giUBAG6T(zisib^;Mg}bhS z+Z5D!NG+-|o$Gm4khv5%cev~%xN%ZMKuTP1oKJ&lJUN>Ie-pSjgeZe66F!&)+CJLb9(>;NF)$%2H1 zpzF2uc-;XjCGu6uwOL8p*7Qdl{TYDDrFXMMpX~una#1b>`_y3H{ovlU;NIn@e(asc z&B1pD3&HJbaQpq>&b8pqLNKBRBMKWal+R|AyE-tH&w=Gvm9_zh1@@rI9#q(a#b9{h zMCE6wpUp^QczD-`jvQsEJBH`&j2c@-DW{;{GMYmUI_N8 z!CtV)h#>kQw-{mb2HJuH$m!F8iMEGV;G>Ph5DV;}$_^^*U@_Qw<0<23AZ?o=8Lkg` zhU%%?_5PtF^zD|;p#l2#fD7_t5hoE2;smTe(uyBX5yx{(^T%T8EI*qPVlj=4#a^9_ zr^+p!Sd7mmVlfeqPsH!3#3Kk$Ptu*XJ($~tfG8Gdm59dxXfAnnCMAf^VFlj4i7#L+ zb0M2eiQ|~1!S}2&$jHQ3u>CxMf1`9Yw;3yt`4SLCZ?aKbuka(F zk|d4QU{{kXv_DraB(GuYx#RSmXzUBD*+ zEWr>+R`g_*zqjx6i=?I+z?rozO<*lx>IOi6jray59pc|YqBTpers64}s`OmHa8>S) z3EkoEnT&%{Ig_+pEO_vXF3@!~w>&q4b|a#vRxeG* z!Hn$()D}{bE+bh97Z8{F(Hs>CvLbG34YP7GCG|`20U3V(s$WXrMTUs>g60GH21YPA z|HWTpvS^<`5hr>~F(hhhX2>g^|k3#LX#v-*% zsTGS`+qztfvW%j literal 0 HcmV?d00001 diff --git a/backend/auth_service/app/services/auth_service.py b/backend/auth_service/app/services/auth_service.py new file mode 100644 index 0000000..d30a5ff --- /dev/null +++ b/backend/auth_service/app/services/auth_service.py @@ -0,0 +1,191 @@ +""" +Authentication service business logic +Handles user registration, login, and user management operations +""" + +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy import select, or_ +from sqlalchemy.exc import IntegrityError +from fastapi import HTTPException, status +import logging +from typing import Optional + +from shared.models import User +from ..schemas.user import UserCreate, UserLogin +from ..utils.security import hash_password, validate_password_strength, verify_password + +# Configure logger +logger = logging.getLogger(__name__) + +class AuthService: + """Service class for authentication and user management""" + + def __init__(self, db: AsyncSession): + self.db = db + + async def get_user_by_email(self, email: str) -> Optional[User]: + """Find user by email address""" + try: + stmt = select(User).where(User.email == email) + result = await self.db.execute(stmt) + return result.scalar_one_or_none() + except Exception as e: + logger.error(f"Error fetching user by email {email}: {e}") + return None + + async def get_user_by_username(self, username: str) -> Optional[User]: + """Find user by username""" + try: + stmt = select(User).where(User.username == username) + result = await self.db.execute(stmt) + return result.scalar_one_or_none() + except Exception as e: + logger.error(f"Error fetching user by username {username}: {e}") + return None + + async def get_user_by_id(self, user_id: str) -> Optional[User]: + """Find user by UUID""" + try: + # For MySQL, user_id is already a string, no need to convert to UUID + stmt = select(User).where(User.id == user_id) + result = await self.db.execute(stmt) + return result.scalar_one_or_none() + except Exception as e: + logger.error(f"Error fetching user by ID {user_id}: {e}") + return None + + async def create_user(self, user_data: UserCreate) -> User: + """Create a new user account with validation""" + + # Validate password strength + is_strong, message = validate_password_strength(user_data.password) + if not is_strong: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=f"Password too weak: {message}" + ) + + # Hash password + hashed_password = hash_password(user_data.password) + + # Create user instance + user = User( + username=user_data.username, + email=user_data.email, + hashed_password=hashed_password, + is_active=True + ) + + try: + self.db.add(user) + await self.db.commit() + await self.db.refresh(user) + + logger.info(f"Successfully created user: {user.username} ({user.email})") + return user + + except IntegrityError as e: + await self.db.rollback() + logger.error(f"Integrity error creating user {user_data.username}: {e}") + + # Check which constraint was violated + if "users_username_key" in str(e.orig): + raise HTTPException( + status_code=status.HTTP_409_CONFLICT, + detail="Username already exists" + ) + elif "users_email_key" in str(e.orig): + raise HTTPException( + status_code=status.HTTP_409_CONFLICT, + detail="Email already registered" + ) + else: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail="Failed to create user due to database constraint" + ) + + except Exception as e: + await self.db.rollback() + logger.error(f"Unexpected error creating user {user_data.username}: {e}") + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail="Internal server error during user creation" + ) + + async def check_user_exists(self, email: str, username: str) -> tuple[Optional[User], Optional[User]]: + """Check if user exists by email or username in a single query""" + try: + stmt = select(User).where(or_(User.email == email, User.username == username)) + result = await self.db.execute(stmt) + users = result.scalars().all() + + email_user = None + username_user = None + + for user in users: + if user.email == email: + email_user = user + if user.username == username: + username_user = user + + return email_user, username_user + except Exception as e: + logger.error(f"Error checking user existence: {e}") + return None, None + + async def authenticate_user(self, login_data: UserLogin) -> Optional[User]: + """Authenticate user credentials""" + user = await self.get_user_by_email(login_data.email) + + if not user: + return None + + # Verify password + if not verify_password(login_data.password, user.hashed_password): + return None + + # Check if user is active + if not user.is_active: + logger.warning(f"Login attempt for deactivated user: {user.email}") + return None + + logger.info(f"Successful authentication for user: {user.username}") + return user + + async def update_user_status(self, user_id: str, is_active: bool) -> Optional[User]: + """Update user active status""" + user = await self.get_user_by_id(user_id) + if not user: + return None + + user.is_active = is_active + try: + await self.db.commit() + await self.db.refresh(user) + logger.info(f"Updated user {user.username} status to: {is_active}") + return user + except Exception as e: + await self.db.rollback() + logger.error(f"Error updating user status: {e}") + return None + + async def get_all_users(self, skip: int = 0, limit: int = 100) -> list[User]: + """Get list of all users (admin function)""" + try: + stmt = select(User).offset(skip).limit(limit) + result = await self.db.execute(stmt) + return result.scalars().all() + except Exception as e: + logger.error(f"Error fetching users list: {e}") + return [] + + async def check_database_health(self) -> bool: + """Check if database connection is healthy""" + try: + stmt = select(User).limit(1) + await self.db.execute(stmt) + return True + except Exception as e: + logger.error(f"Database health check failed: {e}") + return False diff --git a/backend/auth_service/app/utils/__init__.py b/backend/auth_service/app/utils/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/backend/auth_service/app/utils/__pycache__/__init__.cpython-311.pyc b/backend/auth_service/app/utils/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..bba665dbba9b906f0317a8c2eb8c0d7fa25a9f85 GIT binary patch literal 165 zcmZ3^%ge<81j%9RGC}lX5CH>>P{wCAAY(d13PUi1CZpdS7kQetv;YF-vz7GrR?k^Y`6GRw5fPqDT%Qr1_3mE=% zRXrrdq3nYe$Q^LaVRcV;S3jz%tE+1MB^(YCNTI>sr2eg*kU!!}Ir&P3%>qZr19Fwf zWSq#H>`HUv9LrtfE|$B;-7M$FdC1*qPsTIu$#}=T9QNnazKnm|&&sZJAQK!HSlN>f zWy0fOD0{~vvTr;pyQE!;UlFG3%1dY|7l=|X`_I6e9H?%eaLK`s`0)l=P-4&$o8aXT zyf?~Wc#p8UrU|bcg}UAHUU`?&EZ4yd-mP&NmRGL-$TQv|H!uvp(!A9UBgO!`Rc@@z z&;;*or5X1qO)#s=(SJ9z?=6jmD{6-KeR4|$zZKqJsmv8~;I=`VXt&8Oz}*Ay?Xqi> z#P`j=6&zKPd79Ga#JrwLr}UJfi4!UnbCRabs8klGByB2{ofM_4EWY#8F;Q1oBPH zeF$(B*$n#9)AAN2wR0HiT1)pNt??i#SvVom{hY_C5e0M z9v*|u?UI$GDl4!LEvIHRMT>i=ALg?BQc7i;v-}fO%_Qs?DU}T{G;%MUP$k110 z6Ib=Tr@!|G4sv5gN!?O!+>(+&ZTW_j*QbDJ^lmDt+>mm)8^|o}hF0QTcW%xKOe(rf zLJEhDG{awe9l(FWuR#9nCH_Xkw`aXsG@Ik=p*Azr@tEImyIO*OA^`pi!BQByg<1^0 zneGBi%U>3PBplXC@<0jcytxVhI8fC#T`B7oIur6W_a)Rnsht_9Z$u4-GV#RQ(Z=SPAfqzfR{z}E5(#k__>ek)1maysi^b6ykmDTSQ@ zMc(QxX($mo1r*j7nD3=(J}Ya_J$Zeit0(TYy!Kd@Z&Jx>Qd+mdZ}p8_zcQ8>xqNlF zf7J5g!OG28UL}hRx4gFW#ND(3_F?f1hjmh94QT|6rl`cMFk&XkE2pnOT|4}>0RX`1 zAPLpqJNLz2LpW9xj+w%-LjQWW;oi;Vo?pN5t2Z8=F1DRA+fJ>H6~nz|xVJF4Ce(d; z_LH;AyNg1*DYP4WyUqPbE-j_9m3gcvP77zZJaMvea9~~(XI$jw=Kvm%z)QpeSX_+- zeeERIh{yuBvwluN7$pZt^2k zdKH5wp-$7G)!uZa#UUDFG_Q!MiAtjeYEe&49Tf+qw5EtEhz^}eX$nK4O|aF$I8S#& zZuyc^O7eDYhPL5bU|=?>FfAE(Q4wk=V zpM)AW20VzGH!VN!_i3=8= zRC99_N4C6qJ(*Q!DC%7MI*P%7iGiP8xj1ru?BeC&#D&XOM?g{Kb9k{^?&%rb7S{;& z8y)By85m1k8n|wS@%{YJ+m}Z!j$L>cZoOA=$U`_uKvxrG1!v2NCD`wB0>;yj_Mq6x zQr?hcdrR26O0Zqg)6fV1+Q$Gu(*2~a@j>91fqxJ_6ABlf`eGo(p7*%*x|YvgDc0>b z>-MkQEY_V|@-BJ5+p7EG`oAa4*RLBLU%OL&81aIMxDsej*7!-wpI~>NCqn+sSU?{^aL!O?m>X7qbCuZ0$?=` zFx4WDrY5rzn1jjyr7E@RE@s*AV<_~j3e;u~BKS%x(5h8uL&+|42-P|0J_vs;1He|F zF`MgGzqo83eCwMl3G+(ExSTcW)ndJB)~idtwP@2)bf0c1ROa1Z89M0svH@=oEeK za@tS}T(ts}-Cy%gi>daSjCC ze*>0f@%mg&`5tsUp>Y6EjpvnVmQoj{5*%FF3w9BkyK<%3W0nqmKMXJmf2|7uJM>L$ z#=+MN;dOYxq7n8j>t9@7{fW_Wy4Z2r1Ss^FLXW}su!HHKA)X>eL+}@YhSk>R0&!AG z=59l0Si1|21}9ZN0_yjZFQIkovT|Z;E*TWH1Ix1`EN=~#Z%0`E8mz#Mu!5sR7T`Jw z&EI4)aIJj6%e)mcilS?1-zwQjeM+RtojhFd=qkQB$!Cfnza|I7(A_G?uxP@`n z>wCQY;;zB@BUj44WF`+b)h$pZx|miZDEEj-a0sQOu24-(tJz5h-;igh2R8Ar+Wy`G zx!Jr3nr{VawkIcVu8{{ENA9)AAV^%N1A~~C* z=jOZEw%}lF31^uc-DlEj670Kk-IeZ1Sji^ZVz#ekM7rQvzO!`a)45OPK7Rk+`*uw= zWI~F>Iprh{_>?-DqZJCF{$y)TN?=l^!EaXR{}z!b>KL0$X>|rz`VPETlZd_x6@MXd zCIE#FBS4>Wr(0(8|KDy|PEDrtA7sm^ZCmc#0hihQFJ|pN7_cJ$ew2Ti+$AOX4#;2! z#9i~P?Qb49*zwn|9`1}Mj&z-U@8(jR1RX&6ZB1W=asJ|oh% z`je$QM(nr|Jq|B5PlfHT?SKYaPD!Su^p;kLyT<10D~J$ufnx}20-{asw34;lQg$xR zF^^fp>!;))x4!)N(reJ|J>0~90ATL2Us&q-`Qk^5_wPLMH5wP>Fa?fXuM*avpy z0_4R=mkChlGKDU@)Q7mW=p`e1>4)fb9>#jph`!0jLh?QC1W=6hnE-`8Q|QAP--TX7 zMs(<@&|wH2+b_9_uZ8BG^OD;N%k0$uc3x{MsED^Er?O=t_~X$F=eth6{#VRv7S#z+ zg3Dt9Oogc`yF!yDlu`ukwlW6{RVmyV-ON9Z^Og@i?sV#w<%N(Nj8*cGr<7SZAd02~ zyV2CEX19=F5fR&4K;9w=G*rk1@|^&|kmZhFAN|$Qm6>AeF|!q-BF9bP__spu6QQ># zoH2zn27iVT)biya$d*XSj$??F{Sf|}e*jPlJEE|a(iU}VqZnYMReVcAAZ!I#k1Cjp zu4<3eNzD%-X7Roo4!Ha8_(k`Edj(CHZAo@7xc-s+lS{97jxY;xx(U@Iaj*vFNB%v# zpsS2O7WPsnsg#>a!R3~o6Dh=d)r^yUM;Wsqe*t$Qhdw!E)E`^v1pv=CC*Jzq+r{9J85}bB7d&giuENmv6OJoqa(E94+vTvrx`dxe;@vOc6U4o7FyNh&RnC}qj4F4* z+XoksT_+_c5si`iP9}Z#7o5t@Pdbf;<16m~fTtKdVFpha{7WCkB-=*bZ}}1lSxqJq zESyV!28*!67=ME$Ja^3dwY=aN0|_YJ16G)Q1nOqrkW#AUD-|ey1hc~S7p89T047qC zb|eHpCtRcu#@`x=6xeT#1Pkm}RnuvZ7d&gE&2TVrzCDTpA9dmYk>J;?FOlX zmm|jt;icJS|K|}iddMK%@Dxe+hS$fnEH{6?Z-YRg7S#K={mWowLAGLNtXu`^1+EdB zAbWg(WxszpmDyGPpFd#5 zTF~m@e)O4q literal 0 HcmV?d00001 diff --git a/backend/auth_service/app/utils/security.py b/backend/auth_service/app/utils/security.py new file mode 100644 index 0000000..8f5f294 --- /dev/null +++ b/backend/auth_service/app/utils/security.py @@ -0,0 +1,148 @@ +""" +Security utilities for password hashing and JWT token management +""" + +import bcrypt +import hashlib +import jwt +import secrets +from datetime import datetime, timedelta +from typing import Optional + +import redis.asyncio as aioredis + +from shared.config import shared_settings + +# Auth-specific defaults +BCRYPT_ROUNDS = 12 +REFRESH_TOKEN_TTL = 7 * 24 * 3600 # 7 days in seconds + +# Lazy-initialised async Redis client +_redis_client: Optional[aioredis.Redis] = None + + +async def get_redis() -> aioredis.Redis: + """Return a shared async Redis connection.""" + global _redis_client + if _redis_client is None: + _redis_client = aioredis.from_url( + shared_settings.REDIS_URL, decode_responses=True + ) + return _redis_client + +def hash_password(password: str) -> str: + """ + Hash a password using bcrypt + Returns the hashed password as a string + """ + salt = bcrypt.gensalt(rounds=BCRYPT_ROUNDS) + hashed = bcrypt.hashpw(password.encode('utf-8'), salt) + return hashed.decode('utf-8') + +def verify_password(plain_password: str, hashed_password: str) -> bool: + """ + Verify a plain text password against a hashed password + Returns True if passwords match, False otherwise + """ + try: + return bcrypt.checkpw( + plain_password.encode('utf-8'), + hashed_password.encode('utf-8') + ) + except Exception: + return False + +def create_access_token(data: dict, expires_delta: Optional[timedelta] = None) -> str: + """ + Create a JWT access token + """ + to_encode = data.copy() + + if expires_delta: + expire = datetime.utcnow() + expires_delta + else: + expire = datetime.utcnow() + timedelta(hours=shared_settings.JWT_EXPIRATION_HOURS) + + to_encode.update({"exp": expire}) + encoded_jwt = jwt.encode(to_encode, shared_settings.JWT_SECRET_KEY, algorithm=shared_settings.JWT_ALGORITHM) + return encoded_jwt + +def decode_access_token(token: str) -> Optional[dict]: + """ + Decode and validate a JWT access token + Returns the payload if valid, None otherwise + """ + try: + payload = jwt.decode(token, shared_settings.JWT_SECRET_KEY, algorithms=[shared_settings.JWT_ALGORITHM]) + return payload + except jwt.ExpiredSignatureError: + return None + except jwt.InvalidTokenError: + return None + +def generate_password_reset_token(email: str) -> str: + """ + Generate a secure token for password reset + """ + data = {"email": email, "type": "password_reset"} + return create_access_token(data, timedelta(hours=1)) + +# Password strength validation +def validate_password_strength(password: str) -> tuple[bool, str]: + """ + Validate password meets strength requirements + Returns (is_valid, error_message) + """ + if len(password) < 8: + return False, "Password must be at least 8 characters long" + + if not any(c.isupper() for c in password): + return False, "Password must contain at least one uppercase letter" + + if not any(c.islower() for c in password): + return False, "Password must contain at least one lowercase letter" + + if not any(c.isdigit() for c in password): + return False, "Password must contain at least one digit" + + if not any(c in "!@#$%^&*()_+-=[]{}|;:,.<>?" for c in password): + return False, "Password must contain at least one special character" + + return True, "Password is strong" + + +# --------------- Refresh Token helpers --------------- + +def _hash_token(token: str) -> str: + """SHA-256 hash of a refresh token for safe Redis key storage.""" + return hashlib.sha256(token.encode("utf-8")).hexdigest() + + +async def create_refresh_token(user_id: str) -> str: + """ + Generate a cryptographically random refresh token, store its hash in Redis + with a 7-day TTL, and return the raw token string. + """ + token = secrets.token_urlsafe(48) + token_hash = _hash_token(token) + r = await get_redis() + await r.setex(f"refresh_token:{token_hash}", REFRESH_TOKEN_TTL, user_id) + return token + + +async def verify_refresh_token(token: str) -> Optional[str]: + """ + Verify a refresh token by looking up its hash in Redis. + Returns the associated user_id if valid, None otherwise. + """ + token_hash = _hash_token(token) + r = await get_redis() + user_id = await r.get(f"refresh_token:{token_hash}") + return user_id + + +async def revoke_refresh_token(token: str) -> None: + """Delete a refresh token from Redis (used during rotation).""" + token_hash = _hash_token(token) + r = await get_redis() + await r.delete(f"refresh_token:{token_hash}") diff --git a/backend/auth_service/requirements.txt b/backend/auth_service/requirements.txt new file mode 100644 index 0000000..f31887f --- /dev/null +++ b/backend/auth_service/requirements.txt @@ -0,0 +1,31 @@ +# Weibo-HotSign Authentication Service Requirements +# Web Framework +fastapi==0.104.1 +uvicorn[standard]==0.24.0 +pydantic-settings==2.0.3 + +# Database +sqlalchemy==2.0.23 +aiomysql==0.2.0 +PyMySQL==1.1.0 + +# Security +bcrypt==4.1.2 +PyJWT[crypto]==2.8.0 + +# Validation and Serialization +pydantic==2.5.0 +python-multipart==0.0.6 + +# Utilities +python-dotenv==1.0.0 +requests==2.31.0 + +# Logging and Monitoring +structlog==23.2.0 + +# Development tools (optional) +# pytest==7.4.3 +# pytest-asyncio==0.21.1 +# black==23.11.0 +# flake8==6.1.0 diff --git a/backend/requirements.txt b/backend/requirements.txt new file mode 100644 index 0000000..fc0c210 --- /dev/null +++ b/backend/requirements.txt @@ -0,0 +1,33 @@ +# Weibo-HotSign Unified Backend Requirements + +# Web Framework & Server +fastapi==0.104.1 +uvicorn[standard]==0.24.0 + +# Task Queue +celery==5.3.6 +redis==5.0.1 + +# Database +sqlalchemy==2.0.23 +aiomysql==0.2.0 +PyMySQL==1.1.0 + +# Configuration, Validation, and Serialization +pydantic-settings==2.0.3 +pydantic==2.5.0 +python-multipart==0.0.6 + +# Security +bcrypt==4.1.2 +PyJWT[crypto]==2.8.0 +pycryptodome==3.19.0 + +# HTTP & Utilities +httpx==0.25.2 +requests==2.31.0 +python-dotenv==1.0.0 +croniter==2.0.1 + +# Logging and Monitoring +structlog==23.2.0 diff --git a/backend/shared/__init__.py b/backend/shared/__init__.py new file mode 100644 index 0000000..980d9ff --- /dev/null +++ b/backend/shared/__init__.py @@ -0,0 +1 @@ +"""Shared module for Weibo-HotSign backend services.""" diff --git a/backend/shared/__pycache__/__init__.cpython-311.pyc b/backend/shared/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..057a5fc582860066c15756e4d5369340244edaa2 GIT binary patch literal 221 zcmZ3^%ge<81jo$RWm*F1#~=<2FhUuhK}x1Gq%cG=q%a0EXfjn924^G|rKTw4=BJeA zq$;H47b%3NW+vt9dgPY`XQt;VBqb(ir{<+76sH!IWhSQ<>-lLi-eQlBPsvY?k6+2~ z8D#P=Ll>)<m~xQb7^t3@lGNgo;ut->m}0OcG4b)4d6^~g@p=W7zc_3lR@xP@ b0ZjlouvidCd|+l|WW2#(egTGxSb(Ac^PD~d literal 0 HcmV?d00001 diff --git a/backend/shared/__pycache__/config.cpython-311.pyc b/backend/shared/__pycache__/config.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2d6f344035987083f8b6b8732e970b17f742a677 GIT binary patch literal 1465 zcmZux&2Jh<6rWuNj0qo!T_+}0)h#`_2@usSQE60_U>cJ+-~v{f71DIQJGNI{cAc5E zBXgov4?c2B{sLE3r1t;l38Ws-UVF-o$T|7cHv`zU+}YX3`@N50-tW!MZ;3<dArs@ggzJYf#^pJ{Z=mfx< znF;CG3OCV8r2BKMxo0qHX{P7y*t-YJ@NLi4c08sTj-x%J_O_Q@_xz^4>uTGE`4e?5 zjZ=1Lo0R8b8=hftz`k$0yIk90UPq(uq0KzELtS4xG?;B{JCth&93r&+Bg=4o+sw{V za$ zC2>}jW|++5TvB+2v~Hnu#9ugf4BL}RF`s9i=NI=4&R=@W`quGG!`bt=pMNRnCwrdH zf&L)AR%(?VmYNl^)!67RL)UB$$>-t4!s`96^7n%IMrEzqltH)5J;U9l*$&OxZg!t} z)`2M+93&qJB~-w#pBi)kdjR zt=GtUeXG$7mdf?|lWK)jYURfBP3eGjF7-{B(sp_il#9O2x@LudOMJF;naf;R9>i7` z=ingKR>DD)5ZCBXLV_3}9jL$&dYq6K2Zl5CBuzLxgj1K>zI{l8DCCkI+o8XsQ?ZM) z{93VXdKPUw0+8b-p=LPTz&-*BEEv${!tK_?1QBs`!>8>aM3$|M(K}^lcUA- zaa24^RLDd`Z6(RB3vfe-1rWg;A(m$n!Xg5NxX*$VA%^REzWmrYxaLH3R0weau(*u0 zU7sZc6brK@A>{wu5}LRUq?iYV{|Vw^b3HV|g#j*%BPjj7(%a~NI>4JShj0&uO2hX4Qo literal 0 HcmV?d00001 diff --git a/backend/shared/__pycache__/crypto.cpython-311.pyc b/backend/shared/__pycache__/crypto.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8f093533bd1935d535d5e638f8f4d48fd821546e GIT binary patch literal 2590 zcmbVO%}*Ow5Z`Ybua`PdLxH5-w5G%a3?Y;xfTAFvX^=vs5~UKdWUal=VTt2KZ`V+e zUAby|%ORB_#cEU4hlo;9RUCTY81*mkqLqkNLW)}T&>K?JOD^fWwY@P;$)V5o%=?%( zZ{EE5&G;ut3Q(k7{8c;PDe6ymP$T{te~zK^kW#5dN~KjMLod=UXBHXYY=*tdEpjxa za)?)XB&0c2_?lbvsXiF{kpOdk#3MeG+F9SzASo5JfWrlx1*Az4MB3;P`( zR@M@C(7GdFWae--MSFHo6kM8O;rHAG@{lqc)|S0~o+>j9d%UhnZ-UIWCwFvICVF3M zn=rHO&B8UAjma5=wN)f5^7)a$WqIP*#nbAZWy6u{h|!NR7Mh)nhB`PC#|7B%Z_)s1S*+ejr8aFR|_3O+OkmZ z<@u&C@YPoudGlEX;TUbqavg`%|I+Kr3wd9f;EBn71r`KkXtPxeLByr1psIz|nr1G^ z4Va4sT=7p`+!*8Tfb2h$(pHubCU2NXj*Sk;+A85u`H$iQIjMt^q++14(ZOm2s`j%R)cp!$*PHJ++vcV9HFYwX@PURxJGV#^I1Kc zLQy|{3j}ed`X@515Ah%7B$<~HRQ11xU>lnoI#9-2Jkmzl#n^*N4 zeh((efyTE$iqt=?RO^X*pWR>h;e$s5meglUeZ`NTcAVVr7~1O?+G*JzzOpxbVC ze9a!dW_8Tj9dlpaD9+typLIvdhSl9);W`h(J$AUa^5G>bJZ^`__rp_r;i)HmR(Q@1 z&lTqnq;^|6RZ2g~SkhTrI=e58>`5a#7c6PQmL}jhv8yyz5l>gR)3_C^!lwv%7f6aY zyxu&dwY>>xO)*zsk4cbCiIAX#)QE2yaz4yJy=a^%2!J7CN3AV;_T?F~0inxYKhOQb zZi3XdCt2|2edPunbzKyd=kr2SM)2qTWe=yXBq^v}N3jKH;Wxf_gLS4_VdQ40tYEaN z={X~_Uav?Z$RmD^WWMrmd(JF-IwZoS$UqVKI2d+`YlMjcX@8SdWLS}dIh1-ZgJ6UO~9p^%8 z6_R;+9e2V4AkhnvDtcI`m{rZ)WOolCq3k{jKr6SG7s@xwHy?N1XNz+O zQmBrnEmV<4Eos!2Mj^^W9s8l^UMRZNzmu{;<92AgcNeVql^1!e(%+*dSAcSZz`1{pwuq^ z9X})hpUq(|rJ1q2fyN79&<18PyKfEbwml$iGN5g086W981Gnv>eT2E(W*+yL&;6du z3eTK&p#TM7)!bXcn=+@IaR21I4jfvn!iyN`_KIu)BTI}vfX3p+`QoD_?!}x1*YEur z2rh?dB6-kD#UNC{NX7kF#auRvUrf#h(d2qE*7R;HmG)PMDo&HI|JKsw+YeS(?$5$k z_UfxcE<$q~$Wat=DSM;7rYuyU_6B$i*SU5DS0j-`_CdP<6m4V+b{7zk8EXO)WM+$C zw`Ak&{X8NF1F6D|0a zC}q6!Cy&?C&RQ5f<_YU4E|u&wn;rRkC^+lPnQ0Ekx;jhUojXMpbD0wI6v9);T&OQ? zSL@qHmotx+-3<1@ad+_^zHC|mnwqNGV_b!CU+NN0q(-u9ypdsHE-o55BUjNdj4<19 zGO>r&IxwiA{D{68*JWML9&iwLQzL( zP*E}V0hijzWAM!}^lL9l6U7rHOtTFqr~5E^{|#MB*;qRGATT11n81ZR%^{J?=rmR3 z4Fn{z3iXrwMD8G?Cr)oZdG%A~kXkVV0|mc^=oh9bj|3p_PSk`M%}J<)p!^u>%tH$4C}%)s26)7u45QC=Np#7GkzkKz`r<- zRK9XeVw~CZEX?nkSIg(x^REt4HtO@1ID;2vHS!}uMl8$9Y}=w481TV5H-)qR_wB%i z>Gxmex)x51@bXfw{vo-V{`M%>|Ai&^xv_m>IV)7HYZ)MHSE?C7y;H1Yj&ZuMaD0=! g{N?!{pDJfE&$xW39ovs6f?LZ<#%E%G1o_Ng0fH4hsQ>@~ literal 0 HcmV?d00001 diff --git a/backend/shared/config.py b/backend/shared/config.py new file mode 100644 index 0000000..7a80a83 --- /dev/null +++ b/backend/shared/config.py @@ -0,0 +1,31 @@ +""" +Shared configuration for all Weibo-HotSign backend services. +Loads settings from environment variables using pydantic-settings. +""" + +from pydantic_settings import BaseSettings + + +class SharedSettings(BaseSettings): + """Shared settings across all backend services.""" + + # Database + DATABASE_URL: str = "mysql+aiomysql://root:password@localhost/weibo_hotsign" + + # Redis + REDIS_URL: str = "redis://localhost:6379/0" + + # JWT + JWT_SECRET_KEY: str = "change-me-in-production" + JWT_ALGORITHM: str = "HS256" + JWT_EXPIRATION_HOURS: int = 24 + + # Cookie encryption + COOKIE_ENCRYPTION_KEY: str = "change-me-in-production" + + class Config: + case_sensitive = True + env_file = ".env" + + +shared_settings = SharedSettings() diff --git a/backend/shared/crypto.py b/backend/shared/crypto.py new file mode 100644 index 0000000..9ff196a --- /dev/null +++ b/backend/shared/crypto.py @@ -0,0 +1,44 @@ +""" +AES-256-GCM Cookie encryption / decryption utilities. +""" + +import base64 +import hashlib + +from Crypto.Cipher import AES + + +def derive_key(raw_key: str) -> bytes: + """Derive a 32-byte key from an arbitrary string using SHA-256.""" + return hashlib.sha256(raw_key.encode("utf-8")).digest() + + +def encrypt_cookie(plaintext: str, key: bytes) -> tuple[str, str]: + """ + Encrypt a cookie string with AES-256-GCM. + + Returns: + (ciphertext_b64, iv_b64) — both base64-encoded strings. + """ + cipher = AES.new(key, AES.MODE_GCM) + ciphertext, tag = cipher.encrypt_and_digest(plaintext.encode("utf-8")) + # Append the 16-byte tag to the ciphertext so decryption can verify it + ciphertext_with_tag = ciphertext + tag + ciphertext_b64 = base64.b64encode(ciphertext_with_tag).decode("utf-8") + iv_b64 = base64.b64encode(cipher.nonce).decode("utf-8") + return ciphertext_b64, iv_b64 + + +def decrypt_cookie(ciphertext_b64: str, iv_b64: str, key: bytes) -> str: + """ + Decrypt a cookie string previously encrypted with encrypt_cookie. + + Raises ValueError on decryption failure (wrong key, corrupted data, etc.). + """ + raw = base64.b64decode(ciphertext_b64) + nonce = base64.b64decode(iv_b64) + # Last 16 bytes are the GCM tag + ciphertext, tag = raw[:-16], raw[-16:] + cipher = AES.new(key, AES.MODE_GCM, nonce=nonce) + plaintext = cipher.decrypt_and_verify(ciphertext, tag) + return plaintext.decode("utf-8") diff --git a/backend/shared/models/__init__.py b/backend/shared/models/__init__.py new file mode 100644 index 0000000..d8c6290 --- /dev/null +++ b/backend/shared/models/__init__.py @@ -0,0 +1,18 @@ +"""Shared ORM models for Weibo-HotSign.""" + +from .base import Base, get_db, engine, AsyncSessionLocal +from .user import User +from .account import Account +from .task import Task +from .signin_log import SigninLog + +__all__ = [ + "Base", + "get_db", + "engine", + "AsyncSessionLocal", + "User", + "Account", + "Task", + "SigninLog", +] diff --git a/backend/shared/models/__pycache__/__init__.cpython-311.pyc b/backend/shared/models/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8f360034fbdb0de2a8120ea156dbfb97f1637752 GIT binary patch literal 636 zcmah_J!>2>5FPF6_Tw%WV@y(|xImgU7^lh) zNblZ_Nt55Q<)JhNSMCnnxr!vm#epHx@bokqy&2y5csvBV{=E2IO9XJ~E_W;VN3Mw_ zr@(*!Lo5(T1|*On2_+_24#)txd?+Frlh|`C5}A_J^MM%35gBv& zys7Gjo6TK)(v&lGbI2CV69% z3&D5VM^?z0rhi&kg$SV~Z1=Ez0k8W1#S$L(>#>9<{d(M2^R$PxSmAUOtl$oQ#BR4+ GcA&rOE35AS literal 0 HcmV?d00001 diff --git a/backend/shared/models/__pycache__/account.cpython-311.pyc b/backend/shared/models/__pycache__/account.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3d114785585520cea169825eac768ced83987be3 GIT binary patch literal 2430 zcma)7O-vg{6rQ!$KkGkY#~6d!)M+CnPACUTNQ)>EzmXups3a6^Ev**M01mreXLg+w zw+A0`;AkYIxN0hKqEtZ@t#ZhvQY)qRwd8}fC#3emjf$Rf>YKH3VkeP0JNrEEXWz_w z?|b7PgFzpH@%N3Nvl@@kUyL~$uIA!22aCrDBN1T^I}}dj;Mb`*RHx`vd68FLqDys) zZq*}tIA+5uUR4kUd+kzus$cZO+KoL*Kn;pP4mr?GguRat7D#ZH88~Jp^GTQGp460rnzOu9vO(@-Rbu&OG)l5-xjC|FxfczZ z&8=Dd9rCr2FkB}J0y#TyE z%hLp&<&_%<)Q>O0S?;uf1bPKA%$`mFd5nw}6c936SVrhxtF-MQwI>(0<>r%4%c&dG z;tPc=wp@%3{e(`zJ$iX#Y9yV37DzuN*%d9lB4;*84yO&G8+v+RAg!;^q3ND2Lx`Sa3?tAF&r=&z2YYGbJqqTQfKIxK&l zW>uMPN*mBu9!yr{f?^cC1)WfR0FLRI9ABIqpPGh7@@hHgQDTr8#^ef$BCjaqnC) z6Ep_Y@=6jVc`8Y-YI7!eAM|}1g(V;Kp+t1FPkQtPQlFO+T{30+&UvKt<{ z?c4F~^X71hO<|!bEYyUB(nLKJ-C8X#m6s|L+v1M6+qc`dKl^*Hd2z0Kajtf8&RqJ+ z45h1~bS;!F-Ki5VZN#)oS0A(QW}x1VeGt|?rr3fF4FH8@er&*1j#&g{Nm1_!Ib z!CG(-oW3h~^hDWR6(OP)4G<)Rl z*&405_lu@~r0O53`A14q^+0^gy7%{sblw$D2Hz-*sKC}y_L6L*AT8GAe3!vENsf8U|EGx{HUB6EWP literal 0 HcmV?d00001 diff --git a/backend/shared/models/__pycache__/base.cpython-311.pyc b/backend/shared/models/__pycache__/base.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..aa1acf73343de79b30a90ebc3006b895f215e58e GIT binary patch literal 1893 zcmbVM&2QsG6rZsjJ6}!um2R_J7Prgp5~-VNr3xrofl?JX5caU$g4T+#T+gI6^+&MX zrYRsQ6(LS2AVlGUazI6>E3`+r(f`0jk%)YXgplYhs2rj_AfA(^N~N+zjGxDE-p6m= zjDIuV4GcsPv@e%_&}u9~zXgL{3!Ny(2qUzMmJmS_BA75bHW-P)fzIlz!AYDEl0uy^ zr}Ks&2}W27!#G5EJz_+qDChzSYgmfWI2|C7I{;0hH2O}WH`xZZ$7!sV?2Y@sBu3(7 z;9+79D%^*++c07f4fSIbv4&{uTl^`O2EixLLCTTv!{pgKX$XAbJpkG7K_O|F3{nQ7 zhl))4)LLZ7@JF09LWUMmW@KwFx}dnqvf@yYnk$+~r$vW4j%Jx+QBhsXu1t%HNkl?b zU9lBc+n{0)osAv^kQwYJa!$omU!x`kw`FG-Ki;t{c47Px)uxI|WhDrcyORV#C({_p z8nykwK72XAKVrJ*NoQ5DDUluOx|+G-_#9PNtylfT36i{4R%|fy&bzwi(xXvGJGu(p z{S50zN|vR|jfcEI*yV}OHt(u{dD?i;Sg7#$Z%|$bD(xZ{_|$7>|HCZW!34j@zKM3(kGT&}1ig>mLNLM@ zv|3+YFCU?^2s^;aUOqZPagd zxH8j4z~}+2IXD}@A!h6L0xeOKP*bgl?yBO76^-h|0p5t6pDvOf#ZLZt_AYGsP(MrZ zma6D}NYyQe0;hzNKquH7@$u}!iv`spv{0tnvQ=1C)HRr|;8MqR3fXL-%aMWsA-V%@ zpl7M#^DESq$+CSJFs{JsWB~0Cc(&2+N%1f_RZmW}5Er_jtF5#cHZs*1PJi-7_h}BN zcW-@u>$90JX1w_1xzeuQyfCEQhSXN%OaXre_55{lQU9UtCx^7#?I#B~*I~G zEZuao9WrQ^&4JSnE@D5?7h&0k9h@s$2t2%MnMG~Ijs%uiVBv_Sx_(lY71Oj_xQQ&& zff?fqx%;{K+~O+o^-4iIKtr!U#h z!Z2W(GYNf*#Td6326HWR+JzCG^3Y_zG|_^0o;1*$cb+uSGu~Ngj$f^XsvmAAs+@;z zfE=P54K`MNxjr)Ev4J$W;a|D*FI>7d|LES&uE(VhxqO|=dtAO5neZYLU&-~S=DbKA z^jOo^FsC2k6e zV2X;!F`uZBFeOC_c-}~wDJ2Db495-GOe<*)MbJHjh35#1B)tvi?!pr;C4&?5s35K9 z=JjPuw?4L)#~yw9aLlwZF^WgbsUY~meaj`wg!=KaZ8S~G7iTq>Ea)cjr8%1t2z@}- ze7-XOsOrb(U8-BlKEFVoxdqNoQDSJWZd=ZZ-T;|jYFhOpj%5bnuI7+>c$bjIPlb~T zxV%6FDIBySf;lA`L4+qU9Kn&j=(#fD%m$Bu`E7{XW$A+jIuXYPbvV8!oQqHNJxxb)|#%ZTXjlI zV!21-aPoRmtJm$Op)=gxv$V;;KKDC!V!@B{vb!ck3m6vbv=EjU($6Q zPp?Zi#vE63o6hvadUT>N1C>Q|?8ieT8#jj-OAgC5_WJhh?OGkiytYd8MZ3oCM=V@} z%ehXiSgbiK8YQ?Eya`V2O~ZdNbg()(n7sVd5ix+@G@oC z`?n6JLXJ_fe8LK>7Ot>tmSz%F^(9q>^O^?ZQ>yy3sTrphSyf%u?@**Xgi_WsI?9O1 z2zzsU=@cjx>Q1_LL$9l@?iz%?&0GZLlB8r+qgd6gC7ULgMkbWnR5gj?Xz-ddJZO>` zqL#htCs^$StC~yM*UJ}9kD#o^pAa2`Kl%&iPE)2=-@LqPZ zlb!5lC&52A0=~C)#=TsrlPh&|CAip7z8!Br+ZXrw1MwH>K=OvBIzv<4p{dsW!}Q38 zvH8LN7hbyDNte6na_inP&&6iBV{|bMTf(|gEe!dBs$#p&&~=^~no(aN=30ndfMU>X zhFyS82xWju`i*kwf%}_y2bfO?ZR5hpu{q`w->5!F?Q;Io7(1X`BBuu>AEO_MohP065j; A;Q#;t literal 0 HcmV?d00001 diff --git a/backend/shared/models/__pycache__/task.cpython-311.pyc b/backend/shared/models/__pycache__/task.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..04bd74cd4c4f7f0db37cbcbb914c75a88b8cbcce GIT binary patch literal 1921 zcma)6O=ufO6rRfeA%{8O707QwRG8OO5sN10tU zwP-;HAAEG7V28Tklj{;u+>&ddh4j7x9mGPxP$>B3;+}Hqn_XE%B7x4%Jk6W;=9_un zd!rwcNeMyw`|eMr7?03jOu261_Tl!xVO=ufavbIn9Muy>QciLxgzh0M?jjr`$rCvDDU6`WDI8iuIjNdn zSMBYg<m`GD(t<@v>7n^4+3_N4 z4lS7vbDWo;L|2`XW!jsiGU&oa#ndi1fMd#xY7;FuJCM<~ay0}l2Z$gD7s(;a$$SVA z0Y(^yPQdn*LHG^xSlC7Na8D?a*ZISGl+jTaPeOnLqm2`*OFbB32P58dCiCsuEytKI zkuYCR^jPA)B>^(ImAXFd1SU_f6;x05>`8<74V=PUJ>6r;;2XP0?!y^KRG%lXM0sHc zqZ|shz@`u7xLm|S~66+qijRfMKRe> zE4ovStD0t2Ovlc{9iy|;YqQgH^H6W1WkNL*hb%C~tmwL0)XBvM42z?0`afRECiDwJ zl{T0coescYONxc0l&x|FYMR*EOLQXkV126ZzyuyF_Z8ZLSiI0SmfA9VivFc2y% zRbvw@>^mhqSrtczG-{cXBUOGRHwEE^;Qnd0q>wMml-M>rB=&A>qHov?ZHQ(;dZMCG zQl^U1=I~m*2}XMeL=Cml^M_+ccir?n^wYWK=qJIHNparib1C-*Lo!u^D2gX33S_FH zvw1>M9#vGmZAmJM!-xx1%8HD#hfCjOA_Bsb+8Izvn7+qW88x0%vK80`TQOyP(HL9h zP3y5IX_Nq`SWz7+vejrCf!=10tPcD;`ti~vQ<+G>u>S;6>wK;Lw`%v#W9hx@egr=H zjr6T_D!3mmvmRS%#8#TImD)@zb8|m?7=e#Ik&dL}#pA`(uYcQdhgTcJtIgq6x1hS2 zVk1**W{S1NR`%`vjl&0r4~}P^$Vc+oZ8tmK$c{I&^`Ia0gI91rvLx| literal 0 HcmV?d00001 diff --git a/backend/shared/models/__pycache__/user.cpython-311.pyc b/backend/shared/models/__pycache__/user.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9ebac97f106c61c32dba3aff4d5c7e6e9daa6a0a GIT binary patch literal 1976 zcma)7O-vg{6rT02?X|I4n}8dINZFt)1yDkBSS+oUouMq*A7^$q z6qLgra^Og$NTF&f`@4KTi(vdV_JcmnBlJ5HPDk>DIbH$g3BpK5n8OK!lR0?vM#AJ}-V|iPOv*_! zC8s#17mTzi%3{PPjf|O0^Q$BEmhoL!f` za0u<2y>+wCv@tP;51D?ZEQINbZ5u?j!qhd}@JuTdud6PZ*G&?pZo5>szASTLjuJz4 zb=z{5^%}^+l4og$9JpXYMRkZ4pBK~AiEumtCQlGSkRFl~n3MSgA_A-=PHcm2hq;Ib z=CSY)?IzEJpK95s{$-!W{I0kyfEQTtkkc*u%>S|%-)f)U&2A^aH>?xi8D>keKs#1) zr(d^$**O)^ZpWED`DjlkM4CIJkw8%p;CVWRgMp5GkB-4T zdNp$Wld1+csJcq@MZ3DFYIli+t1fX|r#d`bb(U31a5cJQPL;(xTw8l3jvK1Eh}DVp zJWFQyRFuG_(+$K&KWo4Czx?2r!Iy*eiP^@)tdA&DloMe`jq0XK*Oa?tElgvwqGwPmik4>>>Y_oyq;6sI)#2Z4H%EJ;SJ(;1-X~N!*$#^Hpa|2SYMSj? zF60UZ>s_j04E8~P5SI+uw6?5T5I`r(QmNTB51N2DVOmoiO~pie4Qj+%J1WB^V4$_R z2Z%L!Jib9V0dmV8Wd7SEfGy0dXT}*Mmvnq}JvZ3L!mw16Tpd{F2g(z0dID-7o_=QS z<`84aVXpdU571Cl6iRATQBJ@;6P9FntIjgPN=&D(vFewMpLS0&DNDKbdk-#OdaYyLQG=_r#Q0@%V zih^woieCgd%sTfB)zFrSxfY=tS1Kw2D;M-bOFwK=Ge-RNkD!l>B`m_vJ7xcAG3M=c zfYy$0$^;z)6Lu|~T_8t1$8k;66QG_0)Zs_Z0s1)juh~QcLHlf?i$VK5K&7C49;Nae TgXzt5W8{xW^VeJS5sd!;tXcIo literal 0 HcmV?d00001 diff --git a/backend/shared/models/account.py b/backend/shared/models/account.py new file mode 100644 index 0000000..35a62cc --- /dev/null +++ b/backend/shared/models/account.py @@ -0,0 +1,30 @@ +"""Account ORM model.""" + +import uuid + +from sqlalchemy import Column, DateTime, ForeignKey, String, Text +from sqlalchemy.orm import relationship +from sqlalchemy.sql import func + +from .base import Base + + +class Account(Base): + __tablename__ = "accounts" + + id = Column(String(36), primary_key=True, default=lambda: str(uuid.uuid4())) + user_id = Column(String(36), ForeignKey("users.id", ondelete="CASCADE"), nullable=False) + weibo_user_id = Column(String(20), nullable=False) + remark = Column(String(100)) + encrypted_cookies = Column(Text, nullable=False) + iv = Column(String(32), nullable=False) + status = Column(String(20), default="pending") + last_checked_at = Column(DateTime, nullable=True) + created_at = Column(DateTime, server_default=func.now()) + + user = relationship("User", back_populates="accounts") + tasks = relationship("Task", back_populates="account", cascade="all, delete-orphan") + signin_logs = relationship("SigninLog", back_populates="account") + + def __repr__(self): + return f"" diff --git a/backend/shared/models/base.py b/backend/shared/models/base.py new file mode 100644 index 0000000..22c2085 --- /dev/null +++ b/backend/shared/models/base.py @@ -0,0 +1,33 @@ +""" +Database engine, session factory, and declarative base. +""" + +from typing import AsyncGenerator + +from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine +from sqlalchemy.orm import sessionmaker, declarative_base + +from ..config import shared_settings + +_engine_kwargs: dict = {"echo": False} +if "sqlite" not in shared_settings.DATABASE_URL: + _engine_kwargs.update(pool_size=20, max_overflow=30, pool_pre_ping=True) + +engine = create_async_engine(shared_settings.DATABASE_URL, **_engine_kwargs) + +AsyncSessionLocal = sessionmaker( + engine, + class_=AsyncSession, + expire_on_commit=False, +) + +Base = declarative_base() + + +async def get_db() -> AsyncGenerator[AsyncSession, None]: + """Dependency that yields an async database session.""" + async with AsyncSessionLocal() as session: + try: + yield session + finally: + await session.close() diff --git a/backend/shared/models/signin_log.py b/backend/shared/models/signin_log.py new file mode 100644 index 0000000..4f6a5ea --- /dev/null +++ b/backend/shared/models/signin_log.py @@ -0,0 +1,23 @@ +"""SigninLog ORM model.""" + +from sqlalchemy import Integer, Column, DateTime, ForeignKey, JSON, String, Text +from sqlalchemy.orm import relationship +from sqlalchemy.sql import func + +from .base import Base + + +class SigninLog(Base): + __tablename__ = "signin_logs" + id = Column(Integer, primary_key=True, autoincrement=True) + account_id = Column(String(36), ForeignKey("accounts.id"), nullable=False) + topic_title = Column(String(100)) + status = Column(String(20), nullable=False) + reward_info = Column(JSON, nullable=True) + error_message = Column(Text, nullable=True) + signed_at = Column(DateTime, server_default=func.now()) + + account = relationship("Account", back_populates="signin_logs") + + def __repr__(self): + return f"" diff --git a/backend/shared/models/task.py b/backend/shared/models/task.py new file mode 100644 index 0000000..fceee62 --- /dev/null +++ b/backend/shared/models/task.py @@ -0,0 +1,24 @@ +"""Task ORM model.""" + +import uuid + +from sqlalchemy import Boolean, Column, DateTime, ForeignKey, String +from sqlalchemy.orm import relationship +from sqlalchemy.sql import func + +from .base import Base + + +class Task(Base): + __tablename__ = "tasks" + + id = Column(String(36), primary_key=True, default=lambda: str(uuid.uuid4())) + account_id = Column(String(36), ForeignKey("accounts.id", ondelete="CASCADE"), nullable=False) + cron_expression = Column(String(50), nullable=False) + is_enabled = Column(Boolean, default=True) + created_at = Column(DateTime, server_default=func.now()) + + account = relationship("Account", back_populates="tasks") + + def __repr__(self): + return f"" diff --git a/backend/shared/models/user.py b/backend/shared/models/user.py new file mode 100644 index 0000000..616d1aa --- /dev/null +++ b/backend/shared/models/user.py @@ -0,0 +1,25 @@ +"""User ORM model.""" + +import uuid + +from sqlalchemy import Boolean, Column, DateTime, String +from sqlalchemy.orm import relationship +from sqlalchemy.sql import func + +from .base import Base + + +class User(Base): + __tablename__ = "users" + + id = Column(String(36), primary_key=True, default=lambda: str(uuid.uuid4())) + username = Column(String(50), unique=True, nullable=False, index=True) + email = Column(String(255), unique=True, nullable=False, index=True) + hashed_password = Column(String(255), nullable=False) + created_at = Column(DateTime, server_default=func.now()) + is_active = Column(Boolean, default=True) + + accounts = relationship("Account", back_populates="user", cascade="all, delete-orphan") + + def __repr__(self): + return f"" diff --git a/backend/shared/response.py b/backend/shared/response.py new file mode 100644 index 0000000..6cee047 --- /dev/null +++ b/backend/shared/response.py @@ -0,0 +1,35 @@ +""" +Unified API response format utilities. +""" + +from typing import Any, List, Optional + +from fastapi.responses import JSONResponse + + +def success_response(data: Any = None, message: str = "Operation successful") -> dict: + """Return a standardised success payload.""" + return { + "success": True, + "data": data, + "message": message, + } + + +def error_response( + message: str, + code: str, + details: Optional[List[dict]] = None, + status_code: int = 400, +) -> JSONResponse: + """Return a standardised error JSONResponse.""" + body: dict = { + "success": False, + "data": None, + "message": message, + "error": { + "code": code, + "details": details or [], + }, + } + return JSONResponse(status_code=status_code, content=body) diff --git a/backend/signin_executor/Dockerfile b/backend/signin_executor/Dockerfile new file mode 100644 index 0000000..0858081 --- /dev/null +++ b/backend/signin_executor/Dockerfile @@ -0,0 +1,34 @@ +# Weibo-HotSign Sign-in Executor Service Dockerfile +FROM python:3.11-slim + +# Set working directory +WORKDIR /app + +# Install system dependencies +RUN apt-get update && apt-get install -y \ + gcc \ + default-libmysqlclient-dev \ + && rm -rf /var/lib/apt/lists/* + +# Copy requirements first for better caching +COPY requirements.txt . + +# Install Python dependencies +RUN pip install --no-cache-dir -r requirements.txt + +# Copy application code +COPY app/ ./app/ + +# Create non-root user for security +RUN groupadd -r appuser && useradd -r -g appuser appuser +USER appuser + +# Expose port +EXPOSE 8000 + +# Health check +HEALTHCHECK --interval=30s --timeout=30s --start-period=5s --retries=3 \ + CMD curl -f http://localhost:8000/health || exit 1 + +# Start application +CMD ["python", "-m", "app.main"] diff --git a/backend/signin_executor/app/config.py b/backend/signin_executor/app/config.py new file mode 100644 index 0000000..91d1488 --- /dev/null +++ b/backend/signin_executor/app/config.py @@ -0,0 +1,56 @@ +""" +Configuration for Sign-in Executor Service +""" + +import os +from pydantic_settings import BaseSettings + +class Settings(BaseSettings): + """Sign-in Executor settings""" + + # Server settings + HOST: str = os.getenv("HOST", "0.0.0.0") + PORT: int = int(os.getenv("PORT", 8000)) + DEBUG: bool = os.getenv("DEBUG", "False").lower() == "true" + + # Database settings + DATABASE_URL: str = os.getenv( + "DATABASE_URL", + "mysql+aiomysql://weibo:123456789@118.195.133.163/weibo" + ) + REDIS_URL: str = os.getenv("REDIS_URL", "redis://redis:6379") + + # External service URLs + PROXY_POOL_URL: str = os.getenv("PROXY_POOL_URL", "http://proxy-pool:8080") + BROWSER_AUTOMATION_URL: str = os.getenv("BROWSER_AUTOMATION_URL", "http://browser-automation:3001") + TASK_SCHEDULER_URL: str = os.getenv("TASK_SCHEDULER_URL", "http://task-scheduler:8000") + + # Weibo API settings + WEIBO_LOGIN_URL: str = "https://weibo.com/login.php" + WEIBO_SUPER_TOPIC_URL: str = "https://weibo.com/p/aj/general/button" + + # Anti-bot protection settings + RANDOM_DELAY_MIN: float = float(os.getenv("RANDOM_DELAY_MIN", "1.0")) + RANDOM_DELAY_MAX: float = float(os.getenv("RANDOM_DELAY_MAX", "3.0")) + USER_AGENT_ROTATION: bool = os.getenv("USER_AGENT_ROTATION", "True").lower() == "true" + + # Cookie and session settings + COOKIE_ENCRYPTION_KEY: str = os.getenv("COOKIE_ENCRYPTION_KEY", "your-cookie-encryption-key") + SESSION_TIMEOUT_MINUTES: int = int(os.getenv("SESSION_TIMEOUT_MINUTES", "30")) + + # Browser automation settings + BROWSER_HEADLESS: bool = os.getenv("BROWSER_HEADLESS", "True").lower() == "true" + BROWSER_TIMEOUT_SECONDS: int = int(os.getenv("BROWSER_TIMEOUT_SECONDS", "30")) + + # Task execution settings + MAX_CONCURRENT_SIGNIN: int = int(os.getenv("MAX_CONCURRENT_SIGNIN", "5")) + TASK_TIMEOUT_SECONDS: int = int(os.getenv("TASK_TIMEOUT_SECONDS", "300")) + + # Logging + LOG_LEVEL: str = os.getenv("LOG_LEVEL", "INFO") + + class Config: + case_sensitive = True + env_file = ".env" + +settings = Settings() diff --git a/backend/signin_executor/app/main.py b/backend/signin_executor/app/main.py new file mode 100644 index 0000000..63e84b0 --- /dev/null +++ b/backend/signin_executor/app/main.py @@ -0,0 +1,226 @@ +""" +Weibo-HotSign Sign-in Executor Service +Core service that executes sign-in tasks and handles Weibo interactions +""" + +from fastapi import FastAPI, BackgroundTasks, HTTPException, status, Depends, Request +from fastapi.middleware.cors import CORSMiddleware +from fastapi.responses import JSONResponse +import uvicorn +import asyncio +import httpx +import logging +from datetime import datetime +from typing import Dict, Any, Optional +import os + +from app.config import settings +from app.services.signin_service import SignInService +from app.services.weibo_client import WeiboClient +from app.models.signin_models import SignInRequest, SignInResult, TaskStatus + +# Initialize FastAPI app +app = FastAPI( + title="Weibo-HotSign Sign-in Executor", + description="Core service for executing Weibo super topic sign-in tasks", + version="1.0.0", + docs_url="/docs", + redoc_url="/redoc" +) + +# CORS middleware +app.add_middleware( + CORSMiddleware, + allow_origins=["*"], # In production, specify actual origins + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], +) + +# Initialize services +signin_service = SignInService() +weibo_client = WeiboClient() + +@app.on_event("startup") +async def startup_event(): + """Initialize executor service on startup""" + print("🚀 Weibo-HotSign Sign-in Executor starting up...") + print(f"📡 Service Documentation: http://{settings.HOST}:{settings.PORT}/docs") + print("🔧 Ready to process sign-in tasks...") + +@app.on_event("shutdown") +async def shutdown_event(): + """Cleanup on shutdown""" + print("👋 Weibo-HotSign Sign-in Executor shutting down...") + +@app.get("/") +async def root(): + return { + "service": "Weibo-HotSign Sign-in Executor", + "status": "running", + "version": "1.0.0", + "description": "Core sign-in execution service for Weibo super topics", + "capabilities": [ + "Weibo login and verification", + "Super topic sign-in automation", + "Anti-bot protection handling", + "Proxy integration", + "Browser fingerprint simulation" + ] + } + +@app.get("/health") +async def health_check(): + """Health check endpoint""" + return { + "status": "healthy", + "service": "signin-executor", + "timestamp": datetime.now().isoformat(), + "dependencies": { + "database": "connected", + "redis": "connected", + "proxy_pool": f"{settings.PROXY_POOL_URL}", + "browser_automation": f"{settings.BROWSER_AUTOMATION_URL}" + } + } + +@app.post("/api/v1/signin/execute", response_model=SignInResult) +async def execute_signin_task( + signin_request: SignInRequest, + background_tasks: BackgroundTasks +): + """ + Execute sign-in task for specified account + This endpoint is called by the task scheduler + """ + try: + logger.info(f"🎯 Received sign-in request for account: {signin_request.account_id}") + + # Execute sign-in in background to avoid timeout + background_tasks.add_task( + signin_service.execute_signin_task, + signin_request.account_id, + signin_request.task_id + ) + + # Return immediate response + return SignInResult( + task_id=signin_request.task_id, + account_id=signin_request.account_id, + status="accepted", + message="Sign-in task accepted and queued for execution", + started_at=datetime.now(), + estimated_completion=None + ) + + except Exception as e: + logger.error(f"❌ Failed to accept sign-in task: {e}") + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail=f"Failed to accept sign-in task: {str(e)}" + ) + +@app.get("/api/v1/signin/status/{task_id}", response_model=TaskStatus) +async def get_task_status(task_id: str): + """Get status of a sign-in task""" + try: + status_info = await signin_service.get_task_status(task_id) + if not status_info: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail=f"Task {task_id} not found" + ) + return status_info + except HTTPException: + raise + except Exception as e: + logger.error(f"❌ Error getting task status: {e}") + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail="Internal server error" + ) + +@app.post("/api/v1/signin/test") +async def test_signin_capability(): + """Test sign-in service capabilities (for debugging)""" + try: + # Test basic service connectivity + tests = { + "weibo_connectivity": await _test_weibo_connectivity(), + "proxy_pool_access": await _test_proxy_pool(), + "browser_automation": await _test_browser_automation(), + "database_connection": await _test_database_connection() + } + + return { + "test_timestamp": datetime.now().isoformat(), + "tests": tests, + "overall_status": "operational" if all(tests.values()) else "degraded" + } + except Exception as e: + logger.error(f"❌ Capability test failed: {e}") + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail=f"Capability test failed: {str(e)}" + ) + +async def _test_weibo_connectivity() -> bool: + """Test connectivity to Weibo""" + try: + async with httpx.AsyncClient(timeout=10.0) as client: + response = await client.get("https://weibo.com", follow_redirects=True) + return response.status_code == 200 + except: + return False + +async def _test_proxy_pool() -> bool: + """Test proxy pool service availability""" + try: + async with httpx.AsyncClient(timeout=5.0) as client: + response = await client.get(f"{settings.PROXY_POOL_URL}/health", timeout=5.0) + return response.status_code == 200 + except: + return False + +async def _test_browser_automation() -> bool: + """Test browser automation service availability""" + try: + async with httpx.AsyncClient(timeout=5.0) as client: + response = await client.get(f"{settings.BROWSER_AUTOMATION_URL}/health", timeout=5.0) + return response.status_code == 200 + except: + return False + +async def _test_database_connection() -> bool: + """Test database connectivity""" + try: + # Simple database ping test + return True # Simplified for demo + except: + return False + +@app.exception_handler(HTTPException) +async def http_exception_handler(request: Request, exc: HTTPException): + """Global HTTP exception handler""" + return JSONResponse( + status_code=exc.status_code, + content={ + "success": False, + "data": None, + "message": exc.detail, + "error": { + "code": f"HTTP_{exc.status_code}", + "details": [] + } + } + ) + +if __name__ == "__main__": + host = os.getenv("HOST", settings.HOST) + port = int(os.getenv("PORT", settings.PORT)) + uvicorn.run( + app, + host=host, + port=port, + log_level="info" if not settings.DEBUG else "debug" + ) diff --git a/backend/signin_executor/app/models/signin_models.py b/backend/signin_executor/app/models/signin_models.py new file mode 100644 index 0000000..b576ac9 --- /dev/null +++ b/backend/signin_executor/app/models/signin_models.py @@ -0,0 +1,89 @@ +""" +Data models for Sign-in Executor Service +""" + +from pydantic import BaseModel, Field +from typing import Optional, Dict, Any, List +from datetime import datetime +from uuid import UUID + +class SignInRequest(BaseModel): + """Request model for sign-in task execution""" + task_id: str = Field(..., description="Unique task identifier") + account_id: str = Field(..., description="Weibo account identifier") + timestamp: Optional[datetime] = Field(default_factory=datetime.now, description="Request timestamp") + requested_by: Optional[str] = Field(default="task_scheduler", description="Request source") + +class SignInResult(BaseModel): + """Result model for sign-in task execution""" + task_id: str = Field(..., description="Task identifier") + account_id: str = Field(..., description="Account identifier") + status: str = Field(..., description="Task status: accepted, running, success, failed") + message: str = Field(..., description="Human readable result message") + started_at: datetime = Field(..., description="Task start timestamp") + completed_at: Optional[datetime] = Field(None, description="Task completion timestamp") + estimated_completion: Optional[datetime] = Field(None, description="Estimated completion time") + reward_info: Optional[Dict[str, Any]] = Field(None, description="Reward details like exp, credits") + error_message: Optional[str] = Field(None, description="Error details if failed") + signed_topics: Optional[List[str]] = Field(None, description="List of successfully signed topics") + total_topics: Optional[int] = Field(None, description="Total number of topics attempted") + +class TaskStatus(BaseModel): + """Status model for tracking sign-in task progress""" + task_id: str = Field(..., description="Task identifier") + account_id: str = Field(..., description="Account identifier") + status: str = Field(..., description="Current status: pending, running, success, failed") + progress_percentage: int = Field(default=0, ge=0, le=100, description="Progress percentage") + current_step: Optional[str] = Field(None, description="Current execution step") + steps_completed: List[str] = Field(default_factory=list, description="Completed steps") + steps_remaining: List[str] = Field(default_factory=list, description="Remaining steps") + started_at: datetime = Field(..., description="Start timestamp") + updated_at: datetime = Field(default_factory=datetime.now, description="Last update timestamp") + estimated_completion: Optional[datetime] = Field(None, description="Estimated completion") + +class WeiboAccount(BaseModel): + """Weibo account information for sign-in""" + id: UUID = Field(..., description="Account UUID") + user_id: UUID = Field(..., description="Owner user UUID") + weibo_user_id: str = Field(..., description="Weibo user ID") + remark: Optional[str] = Field(None, description="User remark") + encrypted_cookies: str = Field(..., description="Encrypted Weibo cookies") + iv: str = Field(..., description="Encryption initialization vector") + status: str = Field(default="active", description="Account status: active, invalid_cookie, banned") + last_checked_at: Optional[datetime] = Field(None, description="Last validation timestamp") + +class SignInLog(BaseModel): + """Sign-in operation log entry""" + id: Optional[int] = Field(None, description="Log entry ID") + account_id: UUID = Field(..., description="Account UUID") + topic_title: Optional[str] = Field(None, description="Signed topic title") + status: str = Field(..., description="Sign-in status") + reward_info: Optional[Dict[str, Any]] = Field(None, description="Reward information") + error_message: Optional[str] = Field(None, description="Error details") + signed_at: datetime = Field(default_factory=datetime.now, description="Sign-in timestamp") + execution_time_ms: Optional[int] = Field(None, description="Execution time in milliseconds") + +class WeiboSuperTopic(BaseModel): + """Weibo super topic information""" + id: str = Field(..., description="Topic ID") + title: str = Field(..., description="Topic title") + url: str = Field(..., description="Topic URL") + is_signed: bool = Field(default=False, description="Whether already signed") + sign_url: Optional[str] = Field(None, description="Sign-in API URL") + reward_exp: Optional[int] = Field(None, description="Experience points reward") + reward_credit: Optional[int] = Field(None, description="Credit points reward") + +class AntiBotConfig(BaseModel): + """Anti-bot protection configuration""" + random_delay_min: float = Field(default=1.0, description="Minimum random delay seconds") + random_delay_max: float = Field(default=3.0, description="Maximum random delay seconds") + user_agent_rotation: bool = Field(default=True, description="Enable user agent rotation") + proxy_enabled: bool = Field(default=True, description="Enable proxy usage") + fingerprint_simulation: bool = Field(default=True, description="Enable browser fingerprint simulation") + +class BrowserAutomationRequest(BaseModel): + """Request for browser automation service""" + target_url: str = Field(..., description="Target URL to automate") + action_type: str = Field(..., description="Action type: signin, extract, click") + context_data: Optional[Dict[str, Any]] = Field(None, description="Additional context data") + timeout_seconds: int = Field(default=30, description="Operation timeout") diff --git a/backend/signin_executor/app/services/signin_service.py b/backend/signin_executor/app/services/signin_service.py new file mode 100644 index 0000000..4e996af --- /dev/null +++ b/backend/signin_executor/app/services/signin_service.py @@ -0,0 +1,271 @@ +""" +Core sign-in business logic service +Handles Weibo super topic sign-in operations +""" + +import asyncio +import httpx +import logging +import random +from datetime import datetime, timedelta +from typing import Dict, Any, List, Optional +from uuid import UUID + +from app.config import settings +from app.models.signin_models import SignInRequest, SignInResult, TaskStatus, WeiboAccount, WeiboSuperTopic, AntiBotConfig +from app.services.weibo_client import WeiboClient + +logger = logging.getLogger(__name__) + +class SignInService: + """Main service for handling sign-in operations""" + + def __init__(self): + self.weibo_client = WeiboClient() + self.active_tasks: Dict[str, TaskStatus] = {} + self.antibot_config = AntiBotConfig( + random_delay_min=settings.RANDOM_DELAY_MIN, + random_delay_max=settings.RANDOM_DELAY_MAX, + user_agent_rotation=settings.USER_AGENT_ROTATION, + proxy_enabled=True, + fingerprint_simulation=True + ) + + async def execute_signin_task(self, account_id: str, task_id: str): + """ + Execute complete sign-in workflow for an account + This is the main business logic method + """ + logger.info(f"🎯 Starting sign-in execution for account {account_id}, task {task_id}") + + # Initialize task status + task_status = TaskStatus( + task_id=task_id, + account_id=account_id, + status="running", + progress_percentage=0, + current_step="initializing", + steps_completed=[], + steps_remaining=[ + "validate_account", + "setup_session", + "get_super_topics", + "execute_signin", + "record_results" + ], + started_at=datetime.now() + ) + self.active_tasks[task_id] = task_status + + try: + # Step 1: Validate account + task_status.current_step = "validate_account" + await self._update_task_progress(task_id, 10) + + account = await self._get_account_info(account_id) + if not account or account.status != "active": + raise Exception(f"Account {account_id} not found or inactive") + + task_status.steps_completed.append("validate_account") + task_status.steps_remaining.remove("validate_account") + task_status.progress_percentage = 20 + + # Step 2: Setup session with proxy and fingerprint + task_status.current_step = "setup_session" + await self._apply_anti_bot_protection() + + task_status.steps_completed.append("setup_session") + task_status.steps_remaining.remove("setup_session") + task_status.progress_percentage = 30 + + # Step 3: Get super topics list + task_status.current_step = "get_super_topics" + await self._update_task_progress(task_id, 40) + + super_topics = await self._get_super_topics_list(account) + if not super_topics: + logger.warning(f"No super topics found for account {account_id}") + + task_status.steps_completed.append("get_super_topics") + task_status.steps_remaining.remove("get_super_topics") + task_status.progress_percentage = 50 + + # Step 4: Execute signin for each topic + task_status.current_step = "execute_signin" + signin_results = await self._execute_topic_signin(account, super_topics, task_id) + + task_status.steps_completed.append("execute_signin") + task_status.steps_remaining.remove("execute_signin") + task_status.progress_percentage = 80 + + # Step 5: Record results + task_status.current_step = "record_results" + await self._update_task_progress(task_id, 90) + + result = SignInResult( + task_id=task_id, + account_id=account_id, + status="success", + message=f"Successfully processed {len(signin_results['signed'])} topics", + started_at=task_status.started_at, + completed_at=datetime.now(), + signed_topics=signin_results['signed'], + total_topics=len(super_topics) if super_topics else 0, + reward_info={ + "topics_signed": len(signin_results['signed']), + "topics_already_signed": len(signin_results['already_signed']), + "errors": len(signin_results['errors']) + } + ) + + task_status.status = "success" + task_status.progress_percentage = 100 + task_status.current_step = "completed" + + logger.info(f"✅ Sign-in task {task_id} completed successfully") + return result + + except Exception as e: + logger.error(f"❌ Sign-in task {task_id} failed: {e}") + + # Update task status to failed + if task_id in self.active_tasks: + task_status = self.active_tasks[task_id] + task_status.status = "failed" + task_status.error_message = str(e) + + # Return failed result + return SignInResult( + task_id=task_id, + account_id=account_id, + status="failed", + message=f"Sign-in failed: {str(e)}", + started_at=task_status.started_at if task_id in self.active_tasks else datetime.now(), + completed_at=datetime.now(), + error_message=str(e) + ) + + async def get_task_status(self, task_id: str) -> Optional[TaskStatus]: + """Get current status of a sign-in task""" + return self.active_tasks.get(task_id) + + async def _update_task_progress(self, task_id: str, percentage: int): + """Update task progress percentage""" + if task_id in self.active_tasks: + self.active_tasks[task_id].progress_percentage = percentage + self.active_tasks[task_id].updated_at = datetime.now() + + async def _get_account_info(self, account_id: str) -> Optional[WeiboAccount]: + """Get Weibo account information from database""" + try: + # Mock implementation - in real system, query database + # For demo, return mock account + return WeiboAccount( + id=UUID(account_id), + user_id=UUID("12345678-1234-5678-9012-123456789012"), + weibo_user_id="1234567890", + remark="Demo Account", + encrypted_cookies="mock_encrypted_cookies", + iv="mock_iv_16_bytes", + status="active", + last_checked_at=datetime.now() - timedelta(hours=1) + ) + except Exception as e: + logger.error(f"Error fetching account {account_id}: {e}") + return None + + async def _apply_anti_bot_protection(self): + """Apply anti-bot protection measures""" + # Random delay to mimic human behavior + delay = random.uniform( + self.antibot_config.random_delay_min, + self.antibot_config.random_delay_max + ) + logger.debug(f"Applying random delay: {delay:.2f}s") + await asyncio.sleep(delay) + + # Additional anti-bot measures would go here: + # - User agent rotation + # - Proxy selection + # - Browser fingerprint simulation + # - Request header randomization + + async def _get_super_topics_list(self, account: WeiboAccount) -> List[WeiboSuperTopic]: + """Get list of super topics for account""" + try: + # Mock implementation - in real system, fetch from Weibo API + # Simulate API call delay + await asyncio.sleep(1) + + # Return mock super topics + return [ + WeiboSuperTopic( + id="topic_001", + title="Python编程", + url="https://weibo.com/p/100808xxx", + is_signed=False, + sign_url="https://weibo.com/p/aj/general/button", + reward_exp=2, + reward_credit=1 + ), + WeiboSuperTopic( + id="topic_002", + title="人工智能", + url="https://weibo.com/p/100808yyy", + is_signed=False, + sign_url="https://weibo.com/p/aj/general/button", + reward_exp=2, + reward_credit=1 + ), + WeiboSuperTopic( + id="topic_003", + title="机器学习", + url="https://weibo.com/p/100808zzz", + is_signed=True, # Already signed + sign_url="https://weibo.com/p/aj/general/button", + reward_exp=2, + reward_credit=1 + ) + ] + except Exception as e: + logger.error(f"Error fetching super topics: {e}") + return [] + + async def _execute_topic_signin(self, account: WeiboAccount, topics: List[WeiboSuperTopic], task_id: str) -> Dict[str, List[str]]: + """Execute sign-in for each super topic""" + signed = [] + already_signed = [] + errors = [] + + for topic in topics: + try: + # Add small delay between requests + await asyncio.sleep(random.uniform(0.5, 1.5)) + + if topic.is_signed: + already_signed.append(topic.title) + continue + + # Execute signin for this topic + success = await self.weibo_client.sign_super_topic( + account=account, + topic=topic, + task_id=task_id + ) + + if success: + signed.append(topic.title) + logger.info(f"✅ Successfully signed topic: {topic.title}") + else: + errors.append(f"Failed to sign topic: {topic.title}") + + except Exception as e: + error_msg = f"Error signing topic {topic.title}: {str(e)}" + logger.error(error_msg) + errors.append(error_msg) + + return { + "signed": signed, + "already_signed": already_signed, + "errors": errors + } diff --git a/backend/signin_executor/app/services/weibo_client.py b/backend/signin_executor/app/services/weibo_client.py new file mode 100644 index 0000000..041324c --- /dev/null +++ b/backend/signin_executor/app/services/weibo_client.py @@ -0,0 +1,167 @@ +""" +Weibo API Client +Handles all interactions with Weibo.com, including login, sign-in, and data fetching +""" + +import httpx +import asyncio +import logging +import random +from typing import Dict, Any, Optional, List + +from app.config import settings +from app.models.signin_models import WeiboAccount, WeiboSuperTopic + +logger = logging.getLogger(__name__) + +class WeiboClient: + """Client for interacting with Weibo API""" + + def __init__(self): + self.base_headers = { + "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36", + "Accept": "application/json, text/plain, */*", + "Accept-Language": "en-US,en;q=0.9,zh-CN;q=0.8,zh;q=0.7", + "Connection": "keep-alive", + "Referer": "https://weibo.com/" + } + + async def verify_cookies(self, account: WeiboAccount) -> bool: + """Verify if Weibo cookies are still valid""" + try: + # Decrypt cookies + cookies = self._decrypt_cookies(account.encrypted_cookies, account.iv) + + async with httpx.AsyncClient(cookies=cookies, headers=self.base_headers) as client: + response = await client.get("https://weibo.com/mygroups", follow_redirects=True) + + if response.status_code == 200 and "我的首页" in response.text: + logger.info(f"Cookies for account {account.weibo_user_id} are valid") + return True + else: + logger.warning(f"Cookies for account {account.weibo_user_id} are invalid") + return False + except Exception as e: + logger.error(f"Error verifying cookies: {e}") + return False + + async def get_super_topics(self, account: WeiboAccount) -> List[WeiboSuperTopic]: + """Get list of super topics for an account""" + try: + # Mock implementation - in real system, this would involve complex API calls + # Simulate API call delay + await asyncio.sleep(random.uniform(1.0, 2.0)) + + # Return mock data + return [ + WeiboSuperTopic(id="topic_001", title="Python编程", url="...", is_signed=False), + WeiboSuperTopic(id="topic_002", title="人工智能", url="...", is_signed=False), + WeiboSuperTopic(id="topic_003", title="机器学习", url="...", is_signed=True) + ] + except Exception as e: + logger.error(f"Error fetching super topics: {e}") + return [] + + async def sign_super_topic(self, account: WeiboAccount, topic: WeiboSuperTopic, task_id: str) -> bool: + """ + Execute sign-in for a single super topic + """ + try: + # Decrypt cookies + cookies = self._decrypt_cookies(account.encrypted_cookies, account.iv) + + # Prepare request payload + payload = { + "ajwvr": "6", + "api": "http://i.huati.weibo.com/aj/super/checkin", + "id": topic.id, + "location": "page_100808_super_index", + "refer_flag": "100808_-_1", + "refer_lflag": "100808_-_1", + "ua": self.base_headers["User-Agent"], + "is_new": "1", + "is_from_ad": "0", + "ext": "mi_898_1_0_0" + } + + # In a real scenario, we might need to call browser automation service + # to get signed parameters or handle JS challenges + + # Simulate API call + await asyncio.sleep(random.uniform(0.5, 1.5)) + + # Mock response - assume success + response_data = { + "code": "100000", + "msg": "签到成功", + "data": { + "tip": "签到成功", + "alert_title": "签到成功", + "alert_subtitle": "恭喜你成为今天第12345位签到的人", + "reward": {"exp": 2, "credit": 1} + } + } + + if response_data.get("code") == "100000": + logger.info(f"Successfully signed topic: {topic.title}") + return True + elif response_data.get("code") == "382004": + logger.info(f"Topic {topic.title} already signed today") + return True # Treat as success + else: + logger.error(f"Failed to sign topic {topic.title}: {response_data.get('msg')}") + return False + + except Exception as e: + logger.error(f"Exception signing topic {topic.title}: {e}") + return False + + def _decrypt_cookies(self, encrypted_cookies: str, iv: str) -> Dict[str, str]: + """ + Decrypt cookies using AES-256-GCM + In a real system, this would use a proper crypto library + """ + try: + # Mock implementation - return dummy cookies + return { + "SUB": "_2A25z...", + "SUBP": "0033Wr...", + "ALF": "16...", + "SSOLoginState": "16...", + "SCF": "...", + "UN": "testuser" + } + except Exception as e: + logger.error(f"Failed to decrypt cookies: {e}") + return {} + + async def get_proxy(self) -> Optional[Dict[str, str]]: + """Get a proxy from the proxy pool service""" + try: + async with httpx.AsyncClient(timeout=5.0) as client: + response = await client.get(f"{settings.PROXY_POOL_URL}/get") + if response.status_code == 200: + proxy_info = response.json() + return { + "http://": f"http://{proxy_info['proxy']}", + "https://": f"https://{proxy_info['proxy']}" + } + else: + return None + except Exception as e: + logger.error(f"Failed to get proxy: {e}") + return None + + async def get_browser_fingerprint(self) -> Dict[str, Any]: + """Get a browser fingerprint from the generator service""" + try: + # Mock implementation + return { + "user_agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36", + "screen_resolution": "1920x1080", + "timezone": "Asia/Shanghai", + "plugins": ["PDF Viewer", "Chrome PDF Viewer", "Native Client"] + } + except Exception as e: + logger.error(f"Failed to get browser fingerprint: {e}") + return {} diff --git a/backend/signin_executor/requirements.txt b/backend/signin_executor/requirements.txt new file mode 100644 index 0000000..b94e148 --- /dev/null +++ b/backend/signin_executor/requirements.txt @@ -0,0 +1,23 @@ +# Weibo-HotSign Sign-in Executor Service Requirements +# Web Framework +fastapi==0.104.1 +uvicorn[standard]==0.24.0 + +# Database +sqlalchemy==2.0.23 +aiomysql==0.2.0 +PyMySQL==1.1.0 +redis==5.0.1 + +# Configuration +pydantic-settings==2.0.3 +pydantic==2.5.0 + +# HTTP Client +httpx==0.25.2 + +# Utilities +python-dotenv==1.0.0 + +# Security (for cookie decryption) +pycryptodome==3.19.0 diff --git a/backend/task_scheduler/Dockerfile b/backend/task_scheduler/Dockerfile new file mode 100644 index 0000000..9404bf1 --- /dev/null +++ b/backend/task_scheduler/Dockerfile @@ -0,0 +1,30 @@ +# Weibo-HotSign Task Scheduler Service Dockerfile +FROM python:3.11-slim + +# Set working directory +WORKDIR /app + +# Install system dependencies +RUN apt-get update && apt-get install -y \ + gcc \ + default-libmysqlclient-dev \ + && rm -rf /var/lib/apt/lists/* + +# Copy requirements first for better caching +COPY requirements.txt . + +# Install Python dependencies +RUN pip install --no-cache-dir -r requirements.txt + +# Copy application code +COPY app/ ./app/ + +# Create non-root user for security +RUN groupadd -r appuser && useradd -r -g appuser appuser +USER appuser + +# Expose port (optional, as scheduler doesn't need external access) +# EXPOSE 8000 + +# Start Celery Beat scheduler +CMD ["celery", "-A", "app.celery_app", "beat", "--loglevel=info"] diff --git a/backend/task_scheduler/app/celery_app.py b/backend/task_scheduler/app/celery_app.py new file mode 100644 index 0000000..e463d0c --- /dev/null +++ b/backend/task_scheduler/app/celery_app.py @@ -0,0 +1,97 @@ +""" +Weibo-HotSign Task Scheduler Service +Celery Beat configuration for scheduled sign-in tasks +""" + +import os +from celery import Celery +from celery.schedules import crontab +from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine +from sqlalchemy.orm import sessionmaker +from sqlalchemy import select +import asyncio +from datetime import datetime + +from ..config import settings + +# Create Celery app +celery_app = Celery( + "weibo_hot_sign_scheduler", + broker=settings.CELERY_BROKER_URL, + backend=settings.CELERY_RESULT_BACKEND, + include=["app.tasks.signin_tasks"] +) + +# Celery configuration +celery_app.conf.update( + task_serializer="json", + accept_content=["json"], + result_serializer="json", + timezone="Asia/Shanghai", + enable_utc=True, + beat_schedule_filename="celerybeat-schedule", + beat_max_loop_interval=5, +) + +# Database configuration for task scheduler +engine = create_async_engine( + settings.DATABASE_URL, + echo=settings.DEBUG, + pool_size=10, + max_overflow=20 +) + +AsyncSessionLocal = sessionmaker( + engine, + class_=AsyncSession, + expire_on_commit=False +) + +async def get_db(): + """Get database session for task scheduler""" + async with AsyncSessionLocal() as session: + try: + yield session + finally: + await session.close() + +class TaskSchedulerService: + """Service to manage scheduled tasks from database""" + + def __init__(self): + self.engine = engine + + async def load_scheduled_tasks(self): + """Load enabled tasks from database and schedule them""" + from app.models.task_models import Task + + try: + async with AsyncSessionLocal() as session: + # Query all enabled tasks + stmt = select(Task).where(Task.is_enabled == True) + result = await session.execute(stmt) + tasks = result.scalars().all() + + print(f"📅 Loaded {len(tasks)} enabled tasks from database") + + # Here we would dynamically add tasks to Celery Beat + # For now, we'll use static configuration in celery_config.py + return tasks + + except Exception as e: + print(f"❌ Error loading tasks from database: {e}") + return [] + +# Synchronous wrapper for async function +def sync_load_tasks(): + """Synchronous wrapper to load tasks""" + service = TaskSchedulerService() + loop = asyncio.new_event_loop() + asyncio.set_event_loop(loop) + try: + return loop.run_until_complete(service.load_scheduled_tasks()) + finally: + loop.close() + +# Import task modules to register them +from app.tasks import signin_tasks diff --git a/backend/task_scheduler/app/config.py b/backend/task_scheduler/app/config.py new file mode 100644 index 0000000..6cd4a5a --- /dev/null +++ b/backend/task_scheduler/app/config.py @@ -0,0 +1,47 @@ +""" +Configuration for Task Scheduler Service +""" + +import os +from pydantic_settings import BaseSettings +from typing import List + +class Settings(BaseSettings): + """Task Scheduler settings""" + + # Database settings + DATABASE_URL: str = os.getenv( + "DATABASE_URL", + "mysql+aiomysql://weibo:123456789@43.134.68.207/weibo" + ) + + # Celery settings + CELERY_BROKER_URL: str = os.getenv("CELERY_BROKER_URL", "redis://redis:6379/0") + CELERY_RESULT_BACKEND: str = os.getenv("CELERY_RESULT_BACKEND", "redis://redis:6379/0") + + # Task execution settings + MAX_CONCURRENT_TASKS: int = int(os.getenv("MAX_CONCURRENT_TASKS", "10")) + TASK_TIMEOUT_SECONDS: int = int(os.getenv("TASK_TIMEOUT_SECONDS", "300")) + + # Scheduler settings + SCHEDULER_TIMEZONE: str = os.getenv("SCHEDULER_TIMEZONE", "Asia/Shanghai") + BEAT_SCHEDULE_FILE: str = os.getenv("BEAT_SCHEDULE_FILE", "/tmp/celerybeat-schedule") + + # Retry settings + MAX_RETRY_ATTEMPTS: int = int(os.getenv("MAX_RETRY_ATTEMPTS", "3")) + RETRY_DELAY_SECONDS: int = int(os.getenv("RETRY_DELAY_SECONDS", "60")) + + # Logging + LOG_LEVEL: str = os.getenv("LOG_LEVEL", "INFO") + DEBUG: bool = os.getenv("DEBUG", "False").lower() == "true" + + # Service URLs + SIGNIN_EXECUTOR_URL: str = os.getenv("SIGNIN_EXECUTOR_URL", "http://signin-executor:8000") + PROXY_POOL_URL: str = os.getenv("PROXY_POOL_URL", "http://proxy-pool:8080") + BROWSER_AUTOMATION_URL: str = os.getenv("BROWSER_AUTOMATION_URL", "http://browser-automation:3001") + + class Config: + case_sensitive = True + env_file = ".env" + +settings = Settings() diff --git a/backend/task_scheduler/app/tasks/signin_tasks.py b/backend/task_scheduler/app/tasks/signin_tasks.py new file mode 100644 index 0000000..7484089 --- /dev/null +++ b/backend/task_scheduler/app/tasks/signin_tasks.py @@ -0,0 +1,196 @@ +""" +Weibo-HotSign Sign-in Task Definitions +Celery tasks for scheduled sign-in operations +""" + +import asyncio +import httpx +import json +import logging +from datetime import datetime +from typing import Dict, Any, Optional + +from celery import current_task +from ..celery_app import celery_app +from ..config import settings + +# Configure logger +logger = logging.getLogger(__name__) + +@celery_app.task(bind=True, max_retries=3, default_retry_delay=60) +def execute_signin_task(self, task_id: str, account_id: str, cron_expression: str): + """ + Execute scheduled sign-in task for a specific account + This task is triggered by Celery Beat based on cron schedule + """ + logger.info(f"🎯 Starting sign-in task {task_id} for account {account_id}") + + try: + # Update task status + current_task.update_state( + state="PROGRESS", + meta={ + "current": 10, + "total": 100, + "status": "Initializing sign-in process...", + "account_id": account_id + } + ) + + # Call signin executor service + result = _call_signin_executor(account_id, task_id) + + # Update task status + current_task.update_state( + state="SUCCESS", + meta={ + "current": 100, + "total": 100, + "status": "Sign-in completed successfully", + "result": result, + "account_id": account_id + } + ) + + logger.info(f"✅ Sign-in task {task_id} completed successfully for account {account_id}") + return result + + except Exception as exc: + logger.error(f"❌ Sign-in task {task_id} failed for account {account_id}: {exc}") + + # Retry logic + if self.request.retries < settings.MAX_RETRY_ATTEMPTS: + logger.info(f"🔄 Retrying task {task_id} (attempt {self.request.retries + 1})") + raise self.retry(exc=exc, countdown=settings.RETRY_DELAY_SECONDS) + + # Final failure + current_task.update_state( + state="FAILURE", + meta={ + "current": 100, + "total": 100, + "status": f"Task failed after {settings.MAX_RETRY_ATTEMPTS} attempts", + "error": str(exc), + "account_id": account_id + } + ) + raise exc + +@celery_app.task +def schedule_daily_signin(): + """ + Daily sign-in task - example of scheduled task + Can be configured in Celery Beat schedule + """ + logger.info("📅 Executing daily sign-in schedule") + + # This would typically query database for accounts that need daily sign-in + # For demo purposes, we'll simulate processing multiple accounts + + accounts = ["account_1", "account_2", "account_3"] # Mock account IDs + results = [] + + for account_id in accounts: + try: + # Submit individual sign-in task for each account + task = execute_signin_task.delay( + task_id=f"daily_{datetime.now().strftime('%Y%m%d_%H%M%S')}", + account_id=account_id, + cron_expression="0 8 * * *" # Daily at 8 AM + ) + results.append({ + "account_id": account_id, + "task_id": task.id, + "status": "submitted" + }) + except Exception as e: + logger.error(f"Failed to submit task for account {account_id}: {e}") + results.append({ + "account_id": account_id, + "status": "failed", + "error": str(e) + }) + + return { + "scheduled_date": datetime.now().isoformat(), + "accounts_processed": len(accounts), + "results": results + } + +@celery_app.task +def process_pending_tasks(): + """ + Process pending sign-in tasks from database + This can be called manually or via external trigger + """ + logger.info("🔄 Processing pending sign-in tasks from database") + + # In real implementation, this would: + # 1. Query database for tasks that need to be executed + # 2. Check if they're due based on cron expressions + # 3. Submit them to Celery for execution + + try: + # Mock implementation - query enabled tasks + result = { + "processed_at": datetime.now().isoformat(), + "tasks_found": 5, # Mock number + "tasks_submitted": 3, + "tasks_skipped": 2, + "status": "completed" + } + + logger.info(f"✅ Processed pending tasks: {result}") + return result + + except Exception as e: + logger.error(f"❌ Failed to process pending tasks: {e}") + raise + +def _call_signin_executor(account_id: str, task_id: str) -> Dict[str, Any]: + """ + Call the signin executor service to perform actual sign-in + """ + try: + signin_data = { + "task_id": task_id, + "account_id": account_id, + "timestamp": datetime.now().isoformat(), + "requested_by": "task_scheduler" + } + + # Call signin executor service + with httpx.Client(timeout=30.0) as client: + response = client.post( + f"{settings.SIGNIN_EXECUTOR_URL}/api/v1/signin/execute", + json=signin_data + ) + + if response.status_code == 200: + result = response.json() + logger.info(f"Sign-in executor response: {result}") + return result + else: + raise Exception(f"Sign-in executor returned error: {response.status_code} - {response.text}") + + except httpx.RequestError as e: + logger.error(f"Network error calling signin executor: {e}") + raise Exception(f"Failed to connect to signin executor: {e}") + except Exception as e: + logger.error(f"Error calling signin executor: {e}") + raise + +# Periodic task definitions for Celery Beat +celery_app.conf.beat_schedule = { + "daily-signin-at-8am": { + "task": "app.tasks.signin_tasks.schedule_daily_signin", + "schedule": { + "hour": 8, + "minute": 0, + }, + }, + "process-pending-every-15-minutes": { + "task": "app.tasks.signin_tasks.process_pending_tasks", + "schedule": 900.0, # Every 15 minutes + }, +} diff --git a/backend/task_scheduler/requirements.txt b/backend/task_scheduler/requirements.txt new file mode 100644 index 0000000..03ac96a --- /dev/null +++ b/backend/task_scheduler/requirements.txt @@ -0,0 +1,18 @@ +# Weibo-HotSign Task Scheduler Service Requirements +# Task Queue +celery==5.3.6 +redis==5.0.1 + +# Database +sqlalchemy==2.0.23 +aiomysql==0.2.0 +PyMySQL==1.1.0 + +# Configuration +pydantic-settings==2.0.3 + +# HTTP Client +httpx==0.25.2 + +# Utilities +python-dotenv==1.0.0 diff --git a/backend/tests/__init__.py b/backend/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/backend/tests/__pycache__/__init__.cpython-311.pyc b/backend/tests/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ed1b0dad39a0620c78d6e01e72752c70327beae3 GIT binary patch literal 148 zcmZ3^%ge<81dl`3WrArC0RxOs#%DGlV>&|$LokCTqu)w~A|@dJGf486x{Fmza(+r` zOnGW%QhrQQVsdtBUP??!YH>+%OniK1US>&ryk0@&FAkgB{FKt1RJ$Tppgxev#r#0x R12ZEd;|B&9QN#=s0{|hsA$R}) literal 0 HcmV?d00001 diff --git a/backend/tests/__pycache__/conftest.cpython-311-pytest-8.3.3.pyc b/backend/tests/__pycache__/conftest.cpython-311-pytest-8.3.3.pyc new file mode 100644 index 0000000000000000000000000000000000000000..28196b1901b5fd7d88ec2402b8c400c3ccc40ecc GIT binary patch literal 5547 zcmcf_ZEO?Cb@s#GiEUyCkmQn04B=de;UEX%Lx_;%C|}`;_1V z)44}CC8VlS`_Cv>3Ei*0ne{q$61eMC z>g@W>+xOnQc{B6o%^QEw-X2C!KDzJ+Z4t1)(+{r%>&Wsm9-#+FMOmbBDz9@{p4CEL z%<&w``?7w{VE{IeF8tXw0?n& z0noX{zM*q!fT^Z!=17v^g#?v?vI3ETx=lJ^DD9eQ`ZKikjdIH3>Zli4Ik`)<3kts%AX z>84e4(Yc5o0sDz3g0kD8O?y+D%}=+iYO{4so57YAT{r?`;%E4+4Xrc0+Px^Q>S+g$ zP9U{s5vgyjnwhs-tnOXYUrOaODAl*{Je-+T2v#KHlZ80=`P7KCBu~P4I2|f(IpF$Yg!Ka1&bvE zV->6E@W1F3O{6#{I%3Tmxf9sHM6peh;+-}|$zaRUOv6d!2v%$?D^yIz#%0aG(5wvx zLiu^Hhb-{v^g_b=K!;)NR5a7Y4(;CUw2h5SjvOD!jLR3!pIO)fQ>Ix!*Ug-w&zhEf zX#c(g2Y2srLg&ZFPG*=;jLzwhg`r=2=qjFfqNgrS%9-)e^W&59>G7!rS0HDaS2fH8 zHjSP;clzYGJU%gce(L?nljkO=+cBt zFtN0TwINQRJ}w~mtejcExrQ$RLicVGe#^fAa2jzCyX?mPM(9cdZ}qs3&`tDH^nl}_ zUTW-rP|+x@@^mkOpYaR(N7>{vPcSZnC*W(?uqE9z3(RTkSu9!oTUCNm%zI2_s@1?riUZPqpU>GwLOBd#y z@QNlvV^s=%)*b-=gr|u9?MH2iN~FIWNme4s;#f5tEuNxJRSbT1>XTEyJhOPF$ev|r zgC=2`1InCj&cS+9f>->22ryI2e+K*N6YcS!UTREq6LD2Q8Ai7_mHSYDi5ET<=@P6z z^)~IRUap7@%;#aPLFg7Nji-}^Jm{k}h}?oBY(>J@kVNmGW|}2l=Whs8=sLK=6u1!8 zacYM)=`S&<92=;_25N}&?SC^^QmfI9 zd-*&0-;MqDY^7(o9DTPEeYX@`=dB4X+FnxS-+XruZ|?ui+*hw&ttaq83>C-S=WnsD z)gvCJOB4(h)Z)q9Y!rBW|L-xvw9{tq=S4-B@(>Ki@ghCp_;9 zkE8<6cX0s!v~lNXhwybrc(f<*bsq=N@xy^MM~wC3)Tf|i?Sb$Js4FSX+KA-sdq|_c zBaZx6d#rCt!#d3$SaKk-lRz3f4w(QuvtdthPJ9v~dd4N3p~rEGBXPP1*n!~gkgz%} z*V~XCz&lL&4uF+C0=uKX66>dn)A?qwbg>%Ubi3Vss5dmpQ*Y>`-q1vQv$h||t$V}k zyRq6}cn|!0glFMHfj@GG0&A8V_-J{IN+FE09Nb+X;AaJuUqo3^6|hegv0wFJ-;7`N zFN$z)0UG3?0l$&tsV4*G78PaCV{YOx=lEp{ay^#ySXiHFUh|+V zgAukYFLX2vOMA2vpvY_mm+;Wp+*BG#n>;wwg1t2AoG{tFpL1=s?djVKfuLQ;QQ6EHcd}ATD zVk{$^qLMU0ab30AEm3RlkCua5O6+Au=bDqThK+}yS>47a3)er7Z*r|?XWjl3PliB? zBV5MzV}4~A7VWTZI@u5SLi>v0AbS=_Ru_P}*|Egk5AS_^=i`S~IhL%%lBHmBt>JV+ z*yJ7qdlnf5)+GE`Oe>;)L^l5F@ZG~Dv6IQCengG}(+N(`X}YZ$meY2uFfTX0 zw#tQhmi5EU2@@OcdTD~M6S(3zp$5qbc(gDyrsA?f2>p8nmTBH{c++zH_IyDzE)%-H z9KS2&L_KMm!oe-26Yh+Po}0z_`83>I)Aid6p}tLMF78B|ButVgG*;h!W z74=ES@7{Zz4I|Xt=2O$nKHFQAkr?eDQ2$$iU6WXf$<9!B)#%cY)}{M4c*J%8qlAWR z8r7|zgIB;&$#FH_$B8vWK@@S^iZ8FiqDA&pQMkySDhd_ZQ$_uy*7pT!FS4hKHkG`$ zipEOoy;ZcM)cV%^h!YospT$0j-RJJ_{mp>~M}B=|iT{1zi`eI}XTh>`up%A&YPg&@ zR!JNyqmc?4DWMT))w{3AFNSX)Eu*e#*D%8mm(k{G-yVvWBHd-wQyo0uehidR657ND zO40t(z=5xDDLPh;j#Z*##Yi>LcmK#zwKNlcYcoLUCf4zByYNc0v% z6%?=brfJ>%0wqhWZx!{HT3=Ae$BUhdz4x^eCzUy=!bvq=sj9t<8 literal 0 HcmV?d00001 diff --git a/backend/tests/__pycache__/test_api_accounts.cpython-311-pytest-8.3.3.pyc b/backend/tests/__pycache__/test_api_accounts.cpython-311-pytest-8.3.3.pyc new file mode 100644 index 0000000000000000000000000000000000000000..08b52ec7cb5dcfe0b0e9712853813e8c8b31dd85 GIT binary patch literal 27161 zcmeHQe~cSdexI?&_V~xz`_tJZY;Yhy)(|$?B%2UILIMc^E^Ijh$FT%l$KxapUVEAG zCd7Ev6zDZj+5?U@Q4f0C_KuPRUHPZdsnSVzSI}MmILTf)iaQb#QhQQSrPJmTDunt+ zKi~K4=b5p+*}&y~ag*7v-;dAxG4H*3@B8D;%RhB>1teVWJ@&oK6GM{ppX6y=zPjVm zFW@*MY0`wG$(k!GPq^5xds0ccWNFef;gxxeZ^Fl7)Cm>8?yP?@FcDyJN;Wtdng~sH zO>{B8CmWuOOhlO5o9&+Lndq79o#>tHo9LVDpXisRw0dlyQ4!7ey!0kKZ#M#@iKy16 z`O_gSkY1t%(@PF}wb1jPi9xN4`IjD6wD9xF#4?w(PtqdKOIr7vE~LjFzb2Nmm>$IR zI%2NVTnD6he{p}{p|nvjVu$m3EIE})7-{`@CY6pQQ>pxPt`NKH!H4(6(m8D^pUD-B zk-%4y*^HJfq>Wf1X*_AfHjj+Pk~s~(8%F{kQL*E3+4M{$3#p@Vml@n`oXn-}%4X8J zg4wnEz`lDQ(vvx3Dz6s+^BzeT6564-Vg?`Dd*GqO-tm3+j_)JvU0Dk~PdwPPm?SKPsJ&W$95oDROaP2>K?~8H6ymA8rg-O?h6Lai8`b zmNn1w>Worym*ivYHK4i`qKz6#((P~}*3~(-u(XNRyd~uwpS{*&s7GOW6H~xT*0@Tp zcii??(A0PQ_Vo7F_DeIKj=9M#xdpD6o44dCc@MiDk+{l!)<-Y@*WkF)tIvj+)OfwvRgEKQBZ&JhP-1qw21Apebj zr=$=1ZmjgJuJ)~-mt_C4g~?k>YQ0NO-_QTeyKVT3l>G9Q^H{iqMZ|vc$tMe=j1V5~ zjCX_k-N0?WAIP`)Yy*PY`5XAVM5w=(`H*-4-J*vPaE|&UK*KG8v70eix17?u5oJaa zES#E|#kIDy18@UU8C@Ws@TRUsmX_6}mk=No<4KI+#%el(Q9CkS$YhNXBb}PoGli2Q zq8J+0x%7!*kY2Fc)~~08SULTL#zJYThp_r3ruA$w(7;S(;@vt;<~p%2rsq)lNG4~h z`gATq3$E^`P<1k0NMf;0nt_y_#-g1_X0v9$(} zmpYkf3^qfjH&3TEQCEo2+Gl!F*>qBmtEQ65=W?cx4d$sSQ)L;W74#m$63b%-Sr{M8 zW`K~~t@k6=px1}qd|bPD_jv2kYY{_JpYanEUT^u2LsIYZSC+rF{M3Cvk1VT3VwK1Z z)yNGPN1iPUlUc16?*01Yi<58cdHw$Cb(<^Uv1)j%thTxFLRfRd3*p&<7s490a9LL$ z%io385>&~vxvXrm-Zf>cY`^Cfmj^GrAgWB>HpDBf+kR?aRf&BkT2Y2;@Z3{XR(@wq zMT!61AFipJYN6%x9(TZdQ3AP4WL^q*Zerx)fYg8e2hkhfQp?fNN_4au9sU0O^KMsv z;GzU_naI54@dJHD@?qED8#hy4zH)R!CAy&+-SGX}k=Js{YdPh$JTULMVm7=aRSFN* zdWX&y%f0b(sCmx^Z5dpoYF)l^-4;>(ju6(3wo=@%Gx<)ErwQAiI*F<@g!d8w{PA(?EcYk zVE0DfkH%!sAK$)a?<)7d-MD72&+}eD-LuN~UT-hCR|WQr1m7ERlY3Mq_eSO(>jnOO zUtn*K|NR~}x%*{u4>0#CW$&=>{hMV_Qy$eB6+0O9^QJku>ux8f3<*FL42TQdnUD=;+?t

B^I=7Bilb$EX&BOd(0fXtrgS7ASeZ*9XD1D~DZgqJD4FB+TPN1)BZqS5qdjbB4zy zqu-V{P3PNtuVIEIqIRYK;HlWEhF1~Zs7)hWcB$m<&TR^^zH&y9cd?|4bh$)CD*>^LVEH?1C? zI=L}!1fHooVmo5w+7JixTS%hh1PMTanb-~#6isAhf3sIA2 zWh#s_4)MlHFljR$GL@;kQ80s+nwo)KQzuD4OC)u(Cjpr&3;AG5M{7+b^|Tp6bmGu- zCR@nl4AaY{GIKCt9L-N>H3m%_$>t9wvkBIY{)CY{oTeO*8`N%-N#x9)Y0@~Nk5Yxh zyO|&Auf4jS*Ui2LF8%aWHknHn5EBou6-eOL8=JPY1Hc3Z1rs79$xMitLIjI z2jvoAjhHx;*G?KFU@#V(5zdD9;feU*OF(u{Ftq+<0B^;gAz^ai!qA7^%P(}t=DK6$ zm7}%(wP%l)``6;G^{hMVE%&VBcdh@1d7rWk>?BCJJ9bf0dVH6OASBqCoVsg1AbIx4 zRpqX-{hqUU&ntJe0yw_Xh~gOGMXX3A0$W2`VQn$U!HR_Yf^rvXC~`)KaC0z1cpxb9 zbYv^Q9A>4pp6f33teopvIeSZM8++Va(Z(RL$%@s&mt#|STCMJf^48qx}Di$M-nBrh6NmvhzN>awz0C)KLyVKYifuy+Wfg_r&= zk-7nv24Wf|T6{93a&wPEK@QnJuV;$v4KMocgfNZSUGD=PDy0YU!}Un~RSYGV2z@l2 z)SzB_7_s{aLB*evAhVT+MrNl$=nk$gE9+U!Qiv8;gWtV$2tjA0v@{{Z0OCr!j|qOV z$w6vzi-E%10qJT%`ZPBlQZai;xxf2?dk=6!11a{~bKRFww>sA?AB20xHRCS1XGPVm z4rp`H&^A50P||@LQY`$DM{~_EeQ=%2_LY3bcRTP$?h+w9BWtS#y+-VU3=~8kg$U`Cgls9A z?G&)rv;)e)??W1Kv33e+VJ%V$)!V6S5$)7vFVF6)w^IP^6rAyq`Z(S_Ui7WUOPwto z)yXuYZz3{AgjQ&ME6Ar|I~$~LMEgxaRMlI` z4GKrw`+R2r&p`m%dU>9Ra%9d!kk*l$XW{@`y?n95!XYl20-nR)C0d35u(XtZFP0W4 z%>s*DUINYKMGH3Fe3?1E@G=u(%S_ibUuME$nTbF_buE{fp7BrR^&J-&{ZYIqY*i@+ z!L7yW?3%t4!DQ5AyF}PBlZ#uInOtL;(Vw6Ul0*&>NfAkdbez8ZYq>dq?jS}6xu>G=lC8pz!|2LWj7 z6-Y;NfDwYUj^v_N4!rQP>3bZeL^Z+mCW(X@NF*ln+H^JzcPNo~YC4%EjS75TuAwoN z$)x@y%#LIO%|Nw6W6mH#5){jkaSHk)YCF#;;jNxl5b9Io;+%3|X+5w3@M0Os1 z!ruYuX+Z`$AiXWfU%4VinA3y-&ZG|l_ge?{A?U|)r4=tCywMh8X z!WM2Jq(x@bQmCXtFzqUJK`^z}q}fm1r7rYS4_a^I7Kb5_Z7JFS`GiyX!sohYD)MwV7Ma--{_MGfK+sxX{RDL&(f7-ns%ChyxN7ut3 zNjvWi(=)np)8?^a?}3N!+>tz#+8%@BfgMkuEN(A`NK|U<(TCx*i)|-OkgYb6(M)eD zpMNrwrd=(>5TA2#v!ah%3JiTCy0V>Cu_^TF6M#IVKSsdEi8!P=5(D*9Am=Q?U)`V! z9V7PDFjKGTqm6@^+!6g*V2i%`qr~M;y}5iL24k&U+}u!6@(?CJLKVsF$8gl`$D88D zFd>n9JTPOrjRFkVWTlKNdIFh~K68`)$8a%u)27+Hm1{O{jfa_~Qr|@=(g0q)j#GaD z$>P2GVM@*PoC+CX^rPe_1I@zvPW?rKWQZIiLi?WdEQnS2X;XJ@GkOtiS9dm5o_$Pa zZtU*lBd2(ygE{7UPdkXJU>wK%QrCa>FMGj%S~;EiAR2#bRVBK<8eL!RcHXBwA3{C) z)mOh-Ua_Gzw5k@psTN%Y1MaQZ;Ro`uD)q-^J(a#S)xI_Jekr;F_T1<&?77il0J=Mq z)5?5E@~{0us8u6l)<^s8Z08Tvcv+2~XSAZmtIUBA z#|=QID4t3HL?%2$czG$v1mPl(W2_;(+)il`>qu)25k)uxkpv;un}?tjUXZ|x%&lOn zyVW>BjLmcSyYvYx?+Ys}TE1PfsV%Huw`q=@k9(xYw_snXd$ys2LV?YTlo}-u+rA?kWg10=^P+m<+Igv7?GZmR{P5lJGRGm*!qDSJm+$j`jJxC2vz10*k31LWQrS z)`d@h$q&UzTO|ng%=#ga2MO(Dp|Nw-V(&gCSVFrhfGJL_c?+9%$)V-|Git>()e{aJ zn9)!Nq(ddr0p(N%H1^B}n3XzEXIX>R@AqsuiCQ|@>9#DZ56S>)Z|^LbU|TMGYiZq9 z;jeIg6Yf-W%ydD85H59aQL`xAbD~B71SEEpO3_g%!;@+MgdH zj%X>Xx~X#t^lY!zy~z65M~kSBvz5QiTDO?a^PEjbib($*A$@=r; z?zBn5BF>P!0$F1NNf3}ExNreSb8{*aF0fMq!bM|eD%%i|P3ItC(B?)aX6S!Gu0up# zBJ!6Y_!R5Q_%S{BV(UoSB3)>N*iGb4B6ktlLu40axanlh|o3){VYhlrzv9S ze?_4}!Z774Y^q>JIxb&)ol=r~p}!1b&?MI)O7P7Sy-bpzwillt7H?=PNZ2scMMe;d zKs!0&U2i42wHn=8?snd%Js*3e-X$+T^~&IDgOCmU>lit`yVg5!X86_N^7311D{q>8 zYIf=D(%P~$v!iF%on41-kjBE;v^W+<;=xt3dn-$BsxG-{-Y4}f$F9b{b=cL|hm71^ ziy{Fjx|H^Ml!w;;;JP2CE8F&0x9xxVk;=gR)q(rV5$FBIu^%=il(Sp#72Rp&19hmZ z4pr2lDs#Xs?m3J5d^-R~inJUfyolu?pdzp}glEzzEn*#Mts$Za5y*(VdrrhFde4h&U$n^fH*Mcernv`H<%u+axcv#4L|i`X^7~V;)k{>VJawjksa_ekvg6%l<)Iz8-!GLPdbF}{qPlN_*p|nt1CNy>&ijkQwy4`L zsN3h%?G<%Kkk*l0)Xjkx-UWDv|Gmb% z15`c2-q=RfbA{b3i&`Cg%@skja1r=Gpp&8q`&^hJDxhIq`^@$G1OH&%BlFwLKUhbI zWxp=?hu~H64}I61f9Mzd!vH?$ZPa&|FC2nYLF?d&OD5b6+NjP`K+e@4K45_ zSa6a5TjZK0Ld$(N7vxYlf81_=Y7`;YpO?z4&e|KvZjLIR2QvRCU({ zb=RD_tD^3zs=LZ|3a_|<5$^L1mu^>Q$xL@H|t(mIlhx;gN|yTGFVnsRLw z2EWaV{({SW;40Jw!9}YJ@EsvU&#t!Wg79xlT@dad_I@^XK}4tvx?xATR_X$Gdm9mL zT)v_zXraEvCOMlbsP|XWw|$?pZygGVj;bIB4yvHO-@U$NDvf@yBwD(m&)B+^SQ7Gg zWJwmOxyZZ*1t&}?&>d`yN1Xe$80!I5N;ZwtPp-hckcti z{j+uDM-Eo*d%Swz<1bHC2A-%6JW-A~?=OrZ0?gNvWoM4OdZfH!=esw7;Qm>t{P0BO zp2w>9JofUDO7vhgda&H>yia>-D)zvwE!*!oi}!px0LNDvQ5++@h!v?sU~5P#tStsP zII71(0CwO-Sq{ALF2MEt7v6-=V$nS0Z@yqccPA8KJ`@{zZm4($J3mH_f zS7lLZ5sNCsW>E2UREyxWgO*j)@$t?(3mH`K5s_sT#b-7UuKhwePv@}wpkYu6F@s9;BiyF;O0wl7%oxu7 z6>SYFei&4s%wVf%9}ENe6FID9_1`<|n;u3-N6A8vTzEbG=o?rpudtK*@6jLa7e3R+ z&ZtQw8;(PZT?IQXfN>_nLm`g?$P#&+3$3$F=|*EQq=T#uv4fzC+uDlsi%I$;JdyDW zJh@YGKX~(7d)^(c3~sLuZZG#b@6)~yt^~l;JronV{Tra$cPasVmA>I>-!S$Za3uha z$#f6lY*RP(A#@^FPy(RP#zq92YX7myzJt|$2T8U6cy-|Ma>RLmVOCVWb$NmH;-?r4 z&h-LwXMHBJ|9qCSf^ay%AI=H-HJlT~=0yKEn*-yn%q!?t{a?_1`coj-zqRx4)Op0{0 zI92bdX*y(3PajLC5KtUzJ3mfeA=w-%X2hCvqvKo~>~*RPY^V-wC`X((%_jAOUaZHc zSl!|X7Hc4Pi{n^0MtBh`Qi;IUkXBe*403Q3%|ifo;6+&uyzsKwWIW!b7pNZEP7-;A$Tx|6hsbw{yi4T!ME;eC<7|OvD2xb=5##?r+MO-XL&s->G>;CTkTuP- z1;U;c^AboCKq0Xvpv!anyadt&P{@iVAne&KQx?1BCWM7qGKqMgMh0mjDC7ofCTl5= zwM_s#)@r!vkOYt>pb^4?Su?ZdzyWr{XpH!f=}RQEd%PiqrKpKp=|qazp)O&`vS##zy6u0?nO<@4R2 zik3+g_*Z3*P#_xz_ObD)dOJVBVyFHSK=77I28k*-SzeasU2a)Hn}Gx*S#Ay*hpX!p z`&W|&%8k1wU0=4}HECVB-Tk2yE8FjybZ_}eH#ihmcU4|8?^-Qm*n*(9U4tUz0l7zB zgW)W%d22iS`JtQr{4~XWoMc#*H=G3mKeKpZ{5VNCEaM!FR&tTTE;l2F^RkTL*PdQv L;g>tavVr;kT|TY& literal 0 HcmV?d00001 diff --git a/backend/tests/__pycache__/test_api_signin_logs.cpython-311-pytest-8.3.3.pyc b/backend/tests/__pycache__/test_api_signin_logs.cpython-311-pytest-8.3.3.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a5984c727eec4a967baa8ce6380849d8f3e3c8e3 GIT binary patch literal 29991 zcmeHwYm6ILc4ify#Ygki-RzcHve}Z{5-q!>wj^7!EsrJZVI?!2naDc>cN{Pjt0d84 zlRZ^z$)bx{CH78QUhP`;cp@dSvD!uKm11L|{j+d>3=mId5a&l0Y%PT$3M;_CVu01J zZ9^Vl;2(?q&b?%@s>o`!0DCVaci|G2p0rdm=R=A_LRgK5Bv4b`U)OVnDx&DL>?2I39=YzM#8VR6q*gqgl5At zVHWQzMP{QjQReoS24-V3vDx@cocRN##B6dVIh&eE%?{2Cvan!jXm)sJcs4zgo*kJP znH`-O6$M3laaE^+vh=F(9z5@N0)&~>@+vv3#N>!FCP$UAqXBu~RsYNyImY~JkNV{J ztG=20GrTt%&By++`eQq9RnjNqO^p)wDQ&|y}QZ&!|GeMaQD{fD2+ z?Vmn)XnMbqpo$+>G_6=Ef4fr1mki(DyoO>A~mDbGvZ=Y>fo$YM+kF=;KmS;eXk02??UjXDEnWP>OS48i!ZW)MRlvDJ2e!9C*ee_$33=c zc5gwD1G?{npuN^RP>60 zD<_MxlF64d$E(%3mokHhf7|by`vY9c5DO#JHJ8i_<uRwWlT>2=}8btp)JE@pneJF}Qwl@PX#w151J!TD?5DJkm<7TKEqC zTk_iQKc(cK-9C@yOIShdZ+`Qe<*_uP?uEcs@73_*!Jmqc2W@?VNd05{-y*D^h`d*v zMa!ru1YDth6s8`RL-#FetYyxqLx?isITlW2W^uigb|9=rDs2GZH=NY1_^O7q>J|co zS~ibv+ zbMtDc7VhB86|)I7hJw@xfZ;!)94nR$Nu4j}=sm1btqf^aspj$C&Ku!^s^I;d%a=;V zkY%v4ip}PWWi>$whI=YaamJ9W%%P(y<-+M)r?Y7)z57&p>8Ugf)gWcyFO-zLnw1P+ zp;9gzK`IK*Zb&R+xYS|7r5B13VPU*88)3raZgmu~8s$!dAJ@~oc>?X|&4{7eYX9Ih zDgW!JkQ#eq?5(jgzw@j3>SjE%7+=?nuR}lbKe{})B)4Lz*JodweP`d>-)XMdz8Krl zjO}PheQpd0Yi<}2{zow&tYOQSb?32sEcBKj3I6R3-!|*r^6hBY?^LOFY|7C(YL7u&$CV6_`4H}zU;3;v6i&06&+jhd&7ZG1%S^8ED0h1gADvy z5{B--kzRjUYNV$Y(^Jj#)Q8_$@_L5Cp9%n<5m*xZAzM`ithAF-^4W^deNq)Z&L#u%ZI@6 zU-~v)^KC@eYWlK`&aIumfPR1To=x75Hih>*68va~2>7!nC-!gf{{8xi{XzfF!_vMD z!Jns6;Gb+;qMU$0to26NXFE~zY^X7J5Il{ z?;Wq~mjiEmoy;w%)ke8Fn6Qv(PxR_d5ftq1Kn*?oU1%F;_>-HU%e=W&9e8N*L0b!aEm~Y2 z(nBW&HKvE;p{`^r4|m;%1;p9>*>r8&vr1`BQ6JB!V70WW0=-IR9%>aP#PHdJ1f_hT z0G^-AFO#(xk{Cg7^_0l)!cmJpt-O?iIVQzrBZhCTqE(HEr4mN?>ABM+_~i1c zk<3AmDS_#pQ$dKiys8*cMCXpo7faP*Su+A$)GJC&n8)okS^#K}5B&GP0^&~m zYd^*0&&7#P5@WwiWUeJLjq$10(8R@~jiCwLt>otOCmYGl{B8}cTMGJifguBEBr=~0 zzGUz-0tkt?gEP-8g$4gQypS9A`f=4vy;a z5Fk77&9WT$@V1g`ewiGYfJSm0>}?9P13*aB9h}*>BnheXg;j5^ z;xb3e2SToE*zao=@AZBN9DlJB#c@n8V$D=$U~fn-T3;OG;HVxC0kQ+%EX#opZ!0)( zesv=_h8v>p#ffHo{L*AIKGl$>T9M)Na&oxM0|w=Y9Mywu9#ETm~K(bDDwXsN@>9~7}7b7SF|U5ujI(`UHV*5qO%wGXRFCC}*SU ze)7{8Q4bI}NZ?rlhY0)*0UAB(w+VcQz%&8&o(!ooS~cEEOwmV}^xh=B$1Mr@=tGTs{9a4C5) zdNJBsxBlJ2Zz|ys>Wk;=1ik!1VT| zkUtL9ApjJ7@$iCoDJp~_uLoWWENEvV=OPREYlWk)XJ5;n-F$9y!#C`9v(ZCzH!pAq z?79V00&p{mFeAbq!J~M)Z|Lrluq^i21q56$Jw2E~7ff#trsRT&#bA5qoC&#L`g<_L zE|`HH%!nLBCzp&ATfy(F?|+ZX4EpQUj$;~WPc7mLpl97Kv zhFvfndNmg;2j?Y6+B{rXj>-dixXr^ws>If}FI!HuaK|n^VxN5NKJaiJ@NYr+Per{4B3t!G>wgx)INrd?tMv`g4?visV!8vyNMc;={8+f@(R3^*KZUJrpv zNj;<|+f+*FDeWI@Ph+K03RFrHwv$To$O?P+DqG%W@2;-tv;}mmwimo=PhqxzdkXaU z5nCx{8mA`N1J>}82eXMOGun=3VN7`*WxPrDFwC7~^S;qz1H&suzZNj$D zyv)tMz+;dNSld2~X)T$o!b~>G6&5#1ddIoy>1iXxesf?QKTkR(mo1-1xA52g!<69_ zQT-z{jJk)w3WO7=399)LGw`?~)>5RdoXTn)1?BdvcEv-=$0-8aZEDF!xmxm3!w*Z6 zSrlB`N~w{A>p$V>>OVKNHKN(B@)K%JwTxO`@u_42rS2y1Cj_1buobn77qu%JXKFeo zyck|A;NPV{#mdbN89q%Z9aT4@IKxM(42zgWtj6+U78mX;8q2L#CFLVlz2pw7=P@f< zTYbq@o;$eKl`A%PB(VhIhF_ZxZI<&Dlv~kHVq}M`@OoK-RfE<}{2Ma!E{z4m@(nQ04 zU$c0xOB1~a9DlJB#c@n8V$D=$U~fn-T3;OG;HVxC0kQ+%EX#opZ!13d`svqBpMCk< z%NHMoEyaSbC5<{g3@`QS&ikrK;xL)VZys;M>8!PT#B)B!KBH11^>6sL8|-e zcyX&zz|NsDE4?n)9 ze#uyoholgIq+sEsb*H4@Fpq^Iz$~jC)Pr5_zb?rVtq7YOXIKwIQs~R|Lq-ULmkJYh z4=)w5>O;GRymrbmNx_=8`3z1j2a1+xy6C+|WeJkeNjW8+l}??O~cr@*E69 z6m^n`1=DQ1PM##Tn?ya8Xso_WKqs&Q>8wt8e@x(C6ZlgC#3-n*68JX+NP1F#MBvW| zECAdT$#nuC3#*i;c4sXz-BaA?1aet6)s>@|sXS8_t7j?g^8^kMI7ncHOH{lrvq+I0 zQD38kn+g0|0^PN%0(pN(U@w8+BhY67i;+-0N3s8oz`rN(I)U>9-XQQM0Y(Z9E{#-K z775AfRA`l_91@@D1x~LF*c)P^!NJph#Zn~Y zKX_{Zb3!ux6Yb)TH)?OyF70{aj~9~@&Ey0PxC0`G3wv78{f+4UrUk;w@5O%M*E+Gl zx!yrAlb8;k)MM_)i`zQ_N(!F`OuY&VSRaSm=^Kjaxq>stt z-^_8x)P?J68Ky3r%VReY+dq-PR_T!>5$z@|#`UY= zBkdaAq!rC<@=H-Feuer(*yv&l-6MvdG=h)ZUZ*(5bc$B%3y`bb__awlg<^mFi=d!Z z!m8C*p-p_8Gz#vZt4%zznA+7$?P^5(+$6+X@*r*E~v(AR5XTJwh+$bd-e zNX|2HAan0M8xw1Twxmo$$}|~-!@RFq+}HadaLh#(Wg_(TnwVzh))1t1Bdagv~Fyry4}Li|CI ziS^<5a|4=zy&+~ovl<-7&5Ljz#T_QR3u5!MUoYc4Zlb6DCAg0Mn)W{eEAr1#(|!!w zMg9{u9rT!``C&u_KDB!ci`DGwOE*k|VH^z8g13>dC5K_0)i4bQm{<(cLO6;h472@? zfm1|}xEiK~^$1LNB81&zx)WWYR?=>8W}5Ce2XhW;#(9-J-+f%mvG#fi#bz ziL#?;ItDlcD=@%uZ2PdA$g~Y`LNLCGfZo!IbR0GvyB?F17Tr@^PSqoNT#vL(t`d5} z)i5orC(v$#gzaoM7plbVwRgdCY}%2B+JOUtML#Xy-yI|MxSaWB z(ue8H#%mKyXT~>AXO7~uAw?xqx*pA0fX7Ya=un14+Ow=*JqG@T0V8%4Cp2N>by=xW zVjei$4#Wm0B`=?58)Xz({c}o17J0QXo7Rn2V|RPw_g$#n{sU#)$7ZkR6qap3yfq2X zX0e1nOw>hey|>z7iu@JA4^2MD_mFwj7pVw(>}h@~i^uRxZZ&*t3l?eNG8ty?_Ic>Z zV@eqZE~!t|_N_z{#h*RLsoOo;*39LfkhiuKfa|9k(jn{JWZrA`K#st+JTObW+C!E@ zt!}$b3|r$%rL5PnH>|dGtq)sx?w*uzTTI)8z5P(Z$r>W@>LE+UH*KJF~@6rYH_)nXa(Vl~imc zI81x#QfS!JU>+n5=0Sen(_r3}2J`E+ORhVS@>z6II>a5<*>q}pt@ACSfMbo~A%JFJ zZwPYWfpBm%hnE+DD30lfHFM@@99lF@)6{dP`EBpHUtH7l^-@Yd)sUWQN=}GC^S)+r zU+*BBv8H2@MVSbFy&M@)W-4n4(mIm!OdQBouf{Dh*Y4Tr{b*-+uP693zX;f0PxKO) zjxVGqq74h}3&)Lkm(h^;dyX4fW-B626mmOm1Ye_QZz2qKO2+0X$H8ZP4x6=aBBV7@ zcDZaKG$m3T*B0pD-ewb_)m8xdH-7cmL}<<1JdmN!CPFJ7Ej6>tiN`h(_Q!N+b*!5% zyPF(!hb-*6iIDbav@csu=C-dp*+j_Xhj{HVCO=e<=d0{2Sgf|^%jF8reB-CUR`6{M zmB(zfe0oR2Qalbz*fY(3_8Jm-NThJ=)4BO$^v>l^QOFKhR=w|T0}Zlx4Rmqrm@$Iv z0LNUu<7l4@wJaaWK89d2hUDPP;Y5|4^vZ~mV?@c(@pc+JDTcrycuIA{j1{Ra;_d@W-?GUgRX3b_CGZ6f6wL3qF7m?eR zl99!sY;!1k@1OzasRY)=!``jwG&|Ccvs)jdS)Fq{W>-Y@UN|rc*xl3KtXg;+we9Pk zQ&;!PUd+;%jZX@+EgSl6$Qqr?a@4v{XJ1j`UlFqF_~n50jJob|Uw!6GYu@I3?A$sI z{k9d)d_OqxFlPo_Fdgp^7c9roP;#)X8N`Nkyd$KxjyAM{qt+d>ubujB9W5QT8N@N} z^l7qiZL4L@EYq{H`5>D-=k288GiRG#I8U0Bsv?2RS&Z$3uYZ zz&Fcs;G14HT}@}BhLmFmkmqtnIG3BP$nzxy?r1Le!})xPjwXjMz>hjNe6xA=1U`yG zU+5`T)E`q{`U?UyU#S0;04Y7xpAsN;g)6V<93Jhb*0?SBHCG56L$6O@?1z$vLe-kN$)cl~$7$w(mx_|j7x!AXw_=e{J z!lC0Uj37Qgf=`bq{2VARuoy>&lk|&)_5o8z2+xa;G=g8mrw90MY1Sd^o#b=rN!|mW zQvU~%U?2hGJqGeO^poMv3_O g5NP=M7zdelVl1vrdpsgWK!5lt3;)a|mW_@72R;v6>i_@% literal 0 HcmV?d00001 diff --git a/backend/tests/__pycache__/test_api_tasks.cpython-311-pytest-8.3.3.pyc b/backend/tests/__pycache__/test_api_tasks.cpython-311-pytest-8.3.3.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2b82c2e0395a989133c22a7d7d325c63fc93bd35 GIT binary patch literal 21562 zcmeG^TZ|jmbu;AfHGD3~UF}0FQLmNgTDy8bY$=i~S#B+Plhkq?trasA??~c`kIf8M zkIk@MTW*>bDQX}EEI2Z<1ya?j)gnIy{L!MZoucg@DNrymKmY?pfclSyT)sWBbI-kh8;yz_9LK-=55*&69QU6XF^-TPxbv!qB1g%U^C1S;2Z729F+pIP&4e9U1@}9S&%L!#YtQmITIv_F52;B z(++geu39#2vWs^0vT0LYw3%horn_j@+=CYC3F>#Si)JnHyvSvT7Pg5eB&DWg&Q#^h zp>O;_?nOzyP%KE9T3$JyDd#Ksvr<{A)G|_q%vFn(nldSVij6Uy<+bo3<#MHPxKsoL zh(>7iMM+VL)k@Z@2`?#ha`{35P;<5Fd8v~1Xfdf$ zkT1{Ga)oO3d{NRwBFV)IQtrHT8N#P?3gfH!WvMnVR|>q*CE)6V{}uS(`4GS{j)&K# zX3NKygLrQW*S*kn;m?jbuX0N^EF+!cRQ@`ozh~z09ALuB*nK!(hASNLec$^n?k(>R z1WIGx%TDR$?xNzMS^NvB!?cnz`AiL(Co>NY`LUT|IR=Uw{@Pr?3bLg}MA(F^3&SnVh?5s*+TjE$#9B+x^4WY+LaaP_ES6)5Q6xX)IwGE-i*$&4SkKKW?aI|Oz zdz%I!^$Ps&3_-j1>O;;xQTNnrtxG(r=Q?nE&*XRtLUnIls9|O6hsw{IG{j2;)vF4` zchi5}50vm{N8Ja#O)z5itLk%>f%Y*Qe+##(Uj`ig(!7^N0lnVE9r~r6$_M>2xG?1G zm+%hbyFIxO$41}GUfs?3%F-UHzsot2ujMuW)@|E&?A)~wIRU5{V^j#tL0>9X&Sw2u z0Q*d?NVF*JXtWDq6yEr9UOul4=pzL5a&{v{^As;=ex;U&w_-XQkny7`Cxl!g zr!baAFbE*)r~R9@zUHMvOQAS-PLgZVrJB4Ji30jFDaay24|xSm-c#V0Tj(YYAfwVeG-)gO{7)hL*UYA@n%g;pF19 zJOU+m45QfT=qqp#0=-@Vm76sO#iDDFtkXeO<$uDx<0;}GCM;}vPO3;UjP;r4Up|q^ z&)3dn&PjO~*V*R=>r)@uyvqD(EqVwrs&a85UxN?G-M}+aUY6txj7j)z%Ig6@7{?UN zN52~S0Hrq%^3Un~@!57{tT8^hG!FpJ z@Ki&XqBTQXnDQyuU&a9NqjmH=gjgL>*^4Xlbr|#=ue(4$I*wss2X}?HJ`(V%`C(l&|)FJR8mGd)$bhsWG5bexai@=tMG7Vduc7q zfRke%te_K%#IJc5V05U9BtZsLvB=X{xP~v3emFRp_g2GBPR`it&@(Ra^#J@sP~ZNg zn8D3{b=ForqDDy4x^X>9QqVq8H8juFV`@w(IZ9%+Q(BEdI}IYHyPX^qR$WD%rR{?i za$=F8YaUuVak7F8t8u-q63eKogp-pqrq@*z>M91$8zCcB9gv_EhWZGx`q2CCz3L=F z>tyu*WSxvT>f~`BS!u5i`&bKaUep}vu($Ld>F^C2N9-^629Z?{IUahP??m76(1i!% z;i^7wuY06h{YXbT_Vxw|ik6v$EmSU~$|1;u8PI%C1-)1Vso_GtR3w>#T&-lJOLL&8 zpoTdqqP=$ zO)M%osggfkl87ei6$FAeU_W8p%6b6yu<#`M!)}L%EfTR#<)y4wK8`gH<=8GC!MciT zY19tkxzMaI2~9N#x@d(qkc^^_Cd^fp8kO#}u&vfK@!;HLCUVQ#K(3H4m2$E)Cxga1 z2Xdhnf$ZGr`C_S7tSFklR0Rx08_p@`s`Dj6QFCWY)zkS>j_SanoRU8yAqOpXh}JXo zp=V{eDr>16SU<}78g_ybm^@JYl%K-V4_NAgB@eRB?xo1j z;*H2|cD9r2mR@NLY;C7Aod7qw0rdV+5%m61U?`;fgC7PvUdLTO!$N$1|B~FhocGTC z;)^eOBE&e~#+ESAaGtkp(c8jAHv+?N8d(g-j6$wSWhQo~bffje0S>O>u@pdd;hXnz z;X||?PW|QnKi~gn2Yz&5@kpB*Q(C=DDyE&~wy?GzLX&2h2tA{W4`i7P?I}R(B4>XB)ZdDkfGQdz3)}iD8pX;zR1FzziSjlC+W}0= zJ2AKm!EOKx5j(ZK7m|u^g1J=Qj~E9K97J#k!LtaCB6tqKQTg{V_&kDR2&NHy9l;3% z-vw|lNpHMM($kB=WW5TIe-l}>Kx6vqWW84#13Mnedb-SI7qwhcShrZm#4#6|E6Q*l zg{-%(A*^c&2>YG4`y(`ImWj|a%Gg4di$6;NS{FHE;zG7Z}z=9m66!%ovKnDzbEtQ`rE zU`Z(Z>VBAHFb&FR6el5=5P-W|bB}bA!KAGR)WDKy__Y_0gzZlTlMIn}yfD2Gb#k+p zqaGxF{JAhZ_L7>@3uXy=OfLjyaZZOG^$>~GMKz>~x`{TdhQa8D6LxEcnd zTLQ7&MmI80!+AP`CSrt~wMwTK?7YpoR9(g|FsIq^Ok=zo56o!;eK1{LxjtB;(~L)w zy3s8{QY5WLifnozT3c;A5yv?=C5&{{WukZT1(UfSrxxHJhWZXJp%mM7IBN^15Xhj- zR2nCqdRT>4fa!&VntGyPX^qR;|XJrR{^|nm{4L`t-s886l(U zfL>S0Wz<#Dc|T`Nud6uJRRW$jM(pPEP;G-%ZEi`X9wsa6DK)I7^xm9S)5^a&p2n)B zxSFOWWWaXU5}0V0*PBNiyv^QRTW^)o`E0V2!fb*56x3jBZ)cOn{q&BqSXi!B>w}eY zmXoZ$CeW#)D9MmDYE*CA!DY1Vpp%m`W`7T$ebe<6@q+<+?ZR3r7nX_&SQO7v>oc3D z&Q~(JQFKy9li7(IUilBrf8tvCOaHB~0InfSPl?XQi6_GDg3R;3R?>1g{`Ch2T2~ zUPX{YfR5W#5#1q1|vgy0MUtRDF+f^!Ip2xbA; z4Xe5lmKkn$GLteh&u-nKp#P)IrfLf^^D1?D$zgY4zE5%;2=Io=5t#DUeGZabbl~do zYhQo=mBvda8ffm9*>moOA zZDRHeC`lMiJ~*)PnLXYQ_lSo*fscGVz}||^DVWFkGKx-r)y4VQd;qmYu+)~mu8e|{ zJ17}8D)3LY=hL8tOTl^v6|?gzgaQ?)SY@Jd3eU@3j*rA9;& zrv6kBr}Aen3-~?>_;cP9e5cGi+We`^v>pmi!18wjB`OL&DM2-)i-@8sg2IFTiTW{X zdfkv1C^|f-J%HUMB09WiW=+x0DZ*2Q$Ijal%Us@1pzzr7%z9T{cpwfY`e3?Ls6JRO zwTKMp3Qw3M35d?RpckpFG3o;p9(&15#YK%cdyz8+f}Q{ZUJ(8w)OTPBU#G6aSzGm} z8YO9)!V?39M^vE|K;en2aZq^dC9&FRP>n-74I!qxog5Tat;U?C?Sti34p-<3Pl61S z5jCOL)xa|9YQT9vXH2iF7}Ql9o;ONHtvVp06|Tp?7x4dGeGmZ@gt3JQsvykIfh&d0 zLuC_lotPp;1zjkzx{@HzL09Xk3*3zv&M4FYWs0g0N2$SBmVxM@wF0}a?s9;ZU&0&M zr1iP^FCndGlRX7>0iWlg1$6q=vq{@}kO|$)t(-Q2M8$6Tb$FmF@K?S7^+T7s6R^hM z>b`3S-apnj@$E+VWHWrS6+Vgc*s({pwqQBpx5JB}mwSOt{GPaa;@ZoN$ON42E^?Lvv@UYS#D#2+(lO&hKKsl*?}z)u!~VcWAs%3FN%>X(V@df3!h=Z4 zbj<;nnye{?uRul)FIz^AP#GEgaC^weF}tVvS1KdN`bczNPDVD?9E9nbgWtZ4Y+rM* zTp2khcBqM{4I*co(>#%wnv|O830K$lVcsMnQhE3ye%G^ul z3Tq=DuXcP(U$yJ5Olx%Va>g8QXG%>i*`^Y#d$}~9G!a0jX|;!eD?=+{VBW*{mC_n z;Vh}4Yu4jbS6H<$NOc7wl_Z<4a9`Cx{$pq!`FjW+Ruh1g)_w(UUx20A%w-(h(hZk! zaN5=)v~aMP5~X%nSVyMuo5hph>K`{)#Y#`wV}0W?whUMXKHoRi9z zCNT{cHDQ0fN3kNI+(Wd7r^7E@K-9<9Kr9Zw$j6p?lj*sZDsuJ@picj99$rN^{I$Itm7*_so8a62{racaY@)P|)qOK1N6#9x2}~HWi;2nHj?BWkjx_Pi zUhju{#lzmfM}8ilV-p0N<3(f2og@rT-LnaOHq!8b{KErP*8!U%K>8!wNW;Y)q#oP$ z8#Qx%F*r%_)@o*OU&1*AFc#Ui0`c~J0`DVz0vC7J6POl#I(y--f#AM-N(aBH%EV1{<{7Vazg8bC#Y5E1c0`Y^bNh*oxY?^@Ovg%;=Ph>HLYE zcfz1?JA#3 z`dApCLHjl~bc-4pYj~Qu+E6RH`~)P(zlY#S1Yn8a-2OFGaQ-G_FhhFr$1w2R+mN13 z(~WT4!mR1$!gif-+RNody!C^4w`60?RK5uaHIY6r+Z*RKObD|Cx|@X-Wf5Zwi)9M0 z3eM*_&E?EgzM%$7VU1mutQ%7~6c;?6E)pVD zagUxP?$MLlveWy^o#K%Lfq(Vz0JH0~KrTnBgv?j_$}}5pce~R6p-7AaGu~BV=X@2 z@eJ~N+liGOFI+!~r#e2kLfKQJodAY{TxvxpgdxrV$Z60Z?QF3)CHeJ~6I>}LxayoR z#5iGyGl0bjQ=YIm4fAU$C%95haMd|sh;hOYX8?;6rtGqFn$kH<>71raPE#hQDdfa} zozs+^(@H)=Il+~3f~(F6L##{~;tXJM!j!LBWm?N`q@3XTNphs)gKLK~fJJ4b&^-Eb F{4XiG)JOmT literal 0 HcmV?d00001 diff --git a/backend/tests/__pycache__/test_auth_service.cpython-311-pytest-8.3.3.pyc b/backend/tests/__pycache__/test_auth_service.cpython-311-pytest-8.3.3.pyc new file mode 100644 index 0000000000000000000000000000000000000000..188b0520120f6c6b9d1cda93b4718292f6db7ef7 GIT binary patch literal 43761 zcmeHw4Uinib>8g%?BDJE;0}kw-yU!{VhP}Y!#^nS2LuU-5_zHoP_&PvmW!JOu)yB! zd1j9wmU}6dNlPb{Qer48pk*pLDG3dTv`Vf@QYok8SdtSbm8y6bS(sOwr~=E8V>)r_ z5Wp!Fl5*vH-7`Hiy|ae{L5Z@2#o%4{>(@Wi)ARbh*ROkiCz*^X_#J)v+l8Smit^_a zv3@}_aDCXPC>In>8CNu)<}doj{p@dGGC1M)DU+e`u#cxi#v?369ar%;P>fE-#$%K5 z@i8FL}|`E=IE>n{{0 z^7o~U{KSl2D4$QylnX^;Yx)pUkMYEGv2?01u{E8W($a^Xd@P-x(xyv=sj@K=`*f~Y z&~oLxkuK+qvqpO7$Y}aUFMk<-ca6l}qx&4o_?w~WTzTSjGkVB4KQ-}0Y2s|g*Gzu& z`0*zneQ_c`T`rWSfCxT?wt%>kT6ipP7(hQ>n#dKK(IdID`A_Awf{_U|lc#gW>FjjQ zFwT{9t(kfuuNO|9w-Ey`m`xI>tWnnUQ>V(On>`bH9*xcBCMHmRwp==!pKA7K`H7O| zMqctYfj2|^ zsHy<|Uw<5<5ATa_)>n3)JPW$UIvdq(7O zH}wU@SXGW%rGZZ5DT{YOQQR%i-8n?3wa+nyw^|D>vpp-k+avdb?xqv1km*o&Pz&8* z-F~SD+SM)5q3)3DZtPYD+$Cz^H-iGQiYM(iaskiMUxc4Umrgc;3%!%$#n#>m8Gb>j5R zfSx@FVR+4dMTy3_Q#x#N-ho+Z}u?74j5 zMCsWRxe4$a?OAG`!C=dn3mpS< zfgg6Qd&8)AZESRHoD2U@-BM#$UER{K!W;v5g(00u90Mqjtr(uYU0$S0d7YR$%*!j4 z3Pg3lAW2FU&ES=Ccpbf^cr(c$W0Fn=smTly!iK~o{a^~nXHW0MBolbqszKvF-3DuN zGrpe|s|>ncdz)oQ%;k=mn$&N%!BB6PsfCuo)Ix4H=8h3ld(k)R$N#`=&|+@U!zEqM zPn1UoE0L%5($pyg4wy`Cs>B)H9*fZ-ATc`dH_qsG*^G`ckW5fti2Yj!bMpU{$hEyjF#{ddsD~6ZPiDfFFTc$5oau^KuF zen-?N{cl0_dfQUJ6bOJ0DfW!OzlD6ZtO@~eg{y4A)vA(+0{-B*CtFsQ=}cko&gzzU zUE8`PIxg(J>1U!;SPN^BYPjHIv51QYIq$?4tRil%HE191*Xze3kDxv%?|f)yX3bS%gqkAIHnKaxn!bx9Dnq60L`u}q^2ShkS0o# z(>Xn_uc5@X1W2IJ(*y5szv)+2j((@7-nXaGx99WmIrX){1vOJsGnW}e z0M`{Y({MyNlEb={#A!s-nM)u=IRe8Axs$vo0j2SB1mqYLE7d5~%jp^8@s?vgig(z} z31m5jaRSVFRUaf3QtI-g)Ip7Mj$OeCJl<;0H&Es`c(a^z_S-K>2lQr{Z&I+F#8WnZ zuH2%eWvM73;%rs_3(9P`8a6JtN)qp+hBR=v2vN!$&XvP%xg<$w>K*o&#ADj_m_&yj zQ?;nciy~S~i&rCN530-PLDkKD+%Z!g3eAQYKT3>Mf;5R$x+UocjEQp`;slk%{`AD@ zoSuVdp&Kxx_zP2)Ob!O6FI|{oY9FMbW?<4d1!mQcT>8BLE@lNuN{7);Fe{rW5!>?O zO!@&%zLCy-e|nNA?<|by{4^WMeDcY^+==k+!LqE16sI zF+sWyr5ew1QL2CEclOr%?`!nm_xU8)*v18QbIoX0bRP(y7s6!1eJc#QoNf7WT@Ejwe0wc0`Vu0iqGK#PhgJPw+oK;9!7=s+k z#F}cBDgW7QGnUOxmb95-9^phb`)f0~BAErUS^XXwGdlp9VQxog1}Ag+*=8J5BPhyx zp_13{C!&u5TvAx!Ov=`K2upzGPU97Tm&L!uU?8-0Q4t{K5A9u41W=W5;?-x~4XwEx zTCrK*Fv#y_=WjRkK<3k0*rm}5PVF&gi8PoZ!)8Y)Fma+&Y!fIru#?W7btF1);BXsCNw(D@ zCZ7%w&#s0{J{>N1S#3}hS4qNTS`E9~=8lwPrYa(Ofg!UYY`@&+pB|^aK?_@(z>9 z*+g~=DLl@l@DN+U(4V2KM+wkur+<{dV+4*67$UHVz;*zGWPdT0xn*)|bp)12z9v#x zXTJU1-q?~(Gpz9qK7n;^y!f#{JX#xi99O;biALuWbDx#Oe;e`ir-Adv#B8nuDOCVb@o@BgOK<9-*II&cSDBMZBz|4%TM%2p=v#PK_I!%r| z%&n1vz%wc+rR*REW|d{s zXTTJ#L;A&n7}a$JcNKj`j1ma4w8&4K*AS#$hW9?LtZ8o zH-Z1+2XU1BuJ6YmTdvIqY-VGxKGVXyoAr!`nWsd6`yhNOe^SpIr;n4Dz=TyY6;Cha zEBJrC8==qnKCbvs*PIUmlV=|2qNq!q6Vj(|U2yH|d`ZOu`U58JvYZ z%db=enk4GIqG*wq0-shc1U?&_4WCkGBW0Sv&0jTKCfULKxod&wr_M&JDp;d~xyWo} zHl_t<~Wz_tg5iKm!x~}h|)bKFV9xEo-M|dt(>4e z@ir}&1@447;Uue(d1A&bziQI`WPA$7%wM=l@31my$!Y?lyo1I|%jBNA!~PMqEW3Z& z=0N|XKFqP=>OJw?tC9-ShAQrU9$KgQ1Us}Yt-IP`s!XZ2&okxD?^9g$2wqT)XhAo} zC@5?Zqf_hAR#ZF9db-Mk*3*Z2>Kvy+ZLM~>Yjnq;nvq%-#ee#%;(rYP6SHyfyP*80 zx%geT7M|^?c2|2$1vXNRz=Z2;gBSr?Z#9CE*+kqYU{N1)s<$SC701> z6326w>lm*+vwd2G{<+^1%-(Ri+o+5xM(~{f#lWvB=X^fpS23&eluUZ&<9KI|%oL01 zV&PI=**+!+l4(?)1Rd#KSE#&0S(}i!%StWc4&?M8A{I*cpf0rt&-8*9+|Ij+gZxfKcY`KJ#qP(dnfd>Q>nAZCh zB{-`V^il~R7WBNG9160f16TXE*7~+UdCzVJo-~$}XL}b&~7zp)G05l^x+U-&(>8}%tIJLtNR-z~YA~kja z*oLsHq4=xMG(!E0{*@tcW`Jt}CA`XoVQ5|+mEsZ*EmPI?KSheS5zNE%70QRt2Q-yLOyzRX3lo*KUdl79_( zj1>S}H2!gF;Ent0so_RyxTX%X$JgS+$y)qZL9Z6m1~x75PVg<<+1qq??1yz<-#iKV zEkD_763VqAjEPuFw^wauLs%;glZxn2wZG-PT7&g>QneYitl()b^Q67H?jAwRA5@Gr ztk;OJ?9(OrL5Wc*(?QjaxLRvdN}gJ6s3fziroI_q#t@i5wOCaxaR1I|xySBgS4qOq zS(TL87UhzB%tQDeL4OBrQz@Q+|2CgMoSxU+C%~V`nfLnygc$-)AYeWL_HNCRxvAYd z08a_L1IfEjz1NXiqvhn?HVqXRd8022wXk8|$chc>~O+As? z$~s`Vjw1TkD0@n(11={Uf!E(4aDga-*u$IC=PACGz%~L*on?L`XAw2%1+!IIuEq8{ zwODFh)|g>?Yb)D4=f!s9q}&_-4>PPaPp+CjP}}`z?aAkAsn66?pJ}8%GZ$RwUHfit z=5lZ5;!wSJd!u)IEhS$dvOKzlH#Gu3GjDLoVOxswCf%XGH!oyC9cic|HTQMd5xwF? z!0GE&5~mSSq>5Yu*@|f;^Q1uuQu8Xi7XhcQTS=To zM3E|T31lm#mCTa{B}mQV7!XVJqArPEM7ca@N$+?g*q)wO@b~)5z_v=TZRA1M=51TX zD~G8_{&Pzud&v9JHxHS!^(Xs9I%P$GUh>v&m3~yGgl5l39`a|(zE@iclO<-zpH4a8lLSkQXJ?&-3y6YzbY5oRZ_ z_g>xFP9QnzxMyV3pFK=HXwOdAi3LZBYPcFTtu(P}tgVI$r!Mqsf~fxv^s6+pxoj)8 zEl0AgU^jtfrJ>yf*8CTD$^fpCY#V{(WaHiYnEzt4Q8xE=jy1z43sc%;zS2p5oim^Q z+jt#xn(*{rAutZm44o{^V1e^lLOeo%Ex?kYSN}SJZxHw<0j63e_@u?As4iL*XUf!x z^I4``us%v5rrg=O9#iv3#g%Y8d)qlfE# zk2LxosddWNTxdZ}*SxMPYP#Wt2XWm>;xrlZQ^u(;w+!jcq5iOqq%f0GF9@)?Dr}3uLyX| zXr4M3pTJt$vD5_CLj3npp`pqX81$)ZE%kgo^?W1sJPeS_o4|&)zB}~5<)H^^4<4&M z^=y6UxyI0QwRQ4^o!N7d*9Lxpw$2UKRFCV5I@s{SgSc)baT*atlE@{Htr$--UM!&V zLLr=3Dnu0SB<5i@RgJ+)|8W|Mj}v$TpxI%aLB~A+nvs)*7qRas&n@m`nP4sg`j;tm zk-+Z}_!k7eOF;4#_zK|&&|1Io&jFm?0zHk;>P5dVbPu%c&^_cUuts3QY-c%h9SDW? zEGhuDGXY^boCKppFzO^2brWFoc?g@&g*yTwZh{R&u)#^dF$~SRi`~ZrZbAGk-SYac z!&qVaD`>v)Kt5<&3_Uy%!ug187qqv7XkMNO-jrUR2;P()o(OGf$GM#D$$C85hBDGZ z+0lklZJ|uHp^UarcDA9!k+f1fyRC@%%^ot`dM+}lp0(i>TKL+qQ-{%tCU_s{VqB4o& zXJAOCGkN{k3l(7DU@P^e98ab1+kY|LE)GeJr+7l(AO?2>6Sds_*PKv26GJ?P2z_ogd&(Ut0 z2=y- zH@&-Gq6&6^#h+HN%gpyd-I-Va_>?6U2E-lDlkuzN=YFo#N(|IwOyj^9c)Q1sKYHwV_R+DUkBvR5)7#PvKAbc1V=gus1|Iu3uo#4Z<@fJbKJRn) z4STk>n~zde#FO<{tikvcwCGjrA}~j-z_yNBqT;SnG)>{=B4&rNrgJ|jYr}!rh50P+ z5L?^=%NUtpPsiR^AQ68Hr5pbRz}i@_@}Antd+IAQjg@4mPLiYZ65!&~3w>)}OY(oP zRXa&xt4^+it=frOy0rJ1%8lJiRAUfB`pxjR!0*QPMSkD6FXDRUc#pmJdf5H18`Moi z$orf*$}io|(gu!iBd`7ffz1TY5qJ@x5+`1M-}db^;_;ZHWoyVb)e{(=Pz{Qi?q<@! zc~5z51Sjg`i-!IKdQ6?9kp9P% zLT`co?sp>ORS(2xkD$e1_k;6hWX8J|o2)pJAEl*ntuvPbb)*46#-^CEZ z{+pZ*F>NCi%61b_D_L{wcJO*xl|i&+O8*Z;L!Jpue{O=0ZJH&Ve*F(|H;6DF$(8Qo zX#XJ`oNkY9G7B2M00yz;qMuNrIPlG7)LKxt)!Y{rzQww*Gq9i@skOhZ_N;riXXtXz zP`zik(KC!O8@+F7aB*m%yYIrb*SF0F|4Fiu-e2!N(C9u;OL$zg+~X*XmV54_AG$|)m$oSsUhsA4Gtcl-S&sYu(I_da{*ZW=KJdE<{u}!+w6siL|4(lORE-DrGMjV zFD(q*^TxJ|hid~n>H|9(13UiZ{fhyAf5$Zi;06Jlxs*hXTY{@yJqvyPuc`dsV#JjJ zn=z8Buo+`nB`u>eFA~<39yLV89n)=JBXEOtz>qb5o5CAnhqp$5ur)y8?LG?c zVBrUYj|3ur6!rnOh?lRR?CZg(wHAgIDqMPB^zD zyGpYAD&g)Ocg*Z7+EyLI{i<58-3MBq9me{VpZAAj&9GgcsH{&`{(|&LJ08$aC!A`l z-t^xV2N<}^0h}u3T@E0f)@SpH4}}A0tLd;8Cl}yvJx;z|iVLn`Z~WR{j5l6ecef9i zx5E0dn;qLp1>3GecQSAkFLt9%7ntK-3IXR5(9MLyzkZatF<>u#@QhKKdhg$3y0u*E zcP-(=?r^5Fo%EP@Jq}QsmV6yxF5o`m$Ju^#{c}`_d;2+Gv~ZYAKUbt{R}h$vD; zE`e;tw32z!pajtvQ7_Lc(TlnydPEmeJzqNih4Y_(>5DJXW&$d)-Cgm_qUhGLcr}8+ zGs<&)FimGR1MV+>hhFoN>$()>28j>E^IZ(l`L*Wo3p0<&z;kI@OOA-X0QC<#`M1f8W@=DoxArdiOfhycgj);<}Yo!M`_O(PvxIN+;pcWN48?!cI5T7Nhx zDZoERa9fI1Y}twxHoP*IRxi^^@tWsL%Ps_Bzgv72f z{fBsn%?SI@)AYGYj~Q@Ehx&g73HNQiN}<01sHC2zPf5{!O!@@p=%D`Jfqm~Ez+T3L zMg7l-lF3EipwKr7oC3HM)Bh)hZ%5#|#6LKr-&)o(RFrKARL(u(-~j1&hOSJ+mI&uKieDHj^WJB#s;Pv>(SmG$t2s!@k}hlS>FD zm&9SBKPCGyV{$2z$py8U>Cs9Ga0%{)ZO}8*I}5*cOGc*uZ(?KDi8-_~owaV|3`OGG z1N7Bl=1<8#4`ZznE=fvk)cB$&^I0c!xjSS&Of$4gyQ>-A0R#GvcvK&ZjD-(!-CcgN zB`H}(`k5?0G4|0RWF*?#dQO9k!~-ra@-}e;TIkoW5_pHeE&}ZYp(XjpYBy7GZSLep z)_Zdg7vVKjZ#-c0j^5QyJCG^3+eioosZ@*F z%Yv>C1<|%p2v4m)EzTBr1HY|&&0p}n)8Y0abnbtFcoVe0|Bihysg~{h-|`k-pykkB zm@d!l{N2uv*xAQKx$lb|Cnc6mahw#*$?WH`7sg2^!ZICpadf*{Yg0;#J8h`s9WyDX10@NHezMtyOd)N)1vEeE+qf^)Grj>Y0g6(12a7mM*V04$)3 zyGP%`xXgceZsfhXlBio2(49F%xGHVBbd_sUNiG87Z0saE8#`$(pm!{z?>gK|=I)re zfF8#JdIIl$wp4+a~i%FssAN%++y++>q7FX5v>bh_ban8 z{29W{0K86b%i4g$BQ{b4$lB11af<+3!paeL$)z6suZiRcfujWMl^$|!w(Jt-Uv8f0 zWHUX!I|#qU=~7{;Tv>D6;Xo*--b6VD?e}jrMx+;dH{GEzV$FuHf8?tlnV&UF3Iz_5 z8UA4GjgP<#zlO~4Ysdo6Z1LV;E(SAv*Xj$Y*HiV*!A9rc{BWZ)b8)26d0;Md6%xq! z>*EWFzJ=bQ+VH;bgaGJTv41h7^rgwxPX`kBQLUZr!7s0^e^*W2)lhASsJJdW!dEOZ zktzapmLv!~qnsE?B9{|`yi#^vh(yez3+n!wy1(Ioh>Gj7BYedo6R9FlXGwyQ>-%>7;@&Td=4~b9caF84z zFY_N<21(-U@Rayccerm?(iPk8+m+}b@uk}A+m$6g=RMhyrY!Mw3W<-t+G0z5JytKk zeaKmoaCQ7CNqjvniDm_tXy78rB|K+K?B~?WB)mRP3D23s?gd%G^QMv|yp^VlWK!$b zR#j~Y@78^F8xo#2K;OA?O1``g;2^bBAFO75u;v$|58U_f(g9P|R5frG!i7S|_JoY@ zyGExyCe||Z^g5!JrG9Pw9lpELmy6oIyAmDV-Szm|lhB#KYr@uB=AkJ%$D{~XB|~=G zx+OZ)9n;e0`0dmNwGHCSNy%j#LvyPy(a^K-&09C)`nxP1-BsMkUplr0D>~ir`&7L2Fcol+3c!wGg`MFq>0yX+Rjk)Ey7Dgw34Ls^MIWlYHM-*^t2oAxumD?j;N0br9A`CYXE(8a=c{jTo%U77AyAB^8s-@qGtf3rCM zV54u>T%3Pu`?fx;&A*S0uXFPcYx9p_OJCa-Gzd7!1m1~UeqLT0pz~VhC%f?`tz3KI z?CWRew_C3wJapkw;$KOx;!1i8yV`@f5WR}nZ+B14eO-1$uc&b0l?ga~-Adv#B8pUz zOCVb@tz@1wC_!o-$ADO(7j;SWBD#cz{s&;mx6;tv5{PRhLtcCJbJq>%2}^DJ3oyAj z=;^i*a#^~C1>Hk76=_ZxRz89m@{*IzmSLsiI3X{E+E7YTY#S;`N7ll^Z6kthjz3Me zjp&&Ex0tu_85WW%3SXv)oE-ZhmMniC0=}Y1dls+{!OIy9CrkOL*xPYNYv=O8r3V;t z=>&<=OqqX}u+l0$$Rr~qJ@f&XCS7hMosZRfw={aU)Kc%o2EC>L_ISk|&x03YX$ zIE*M*mIXtg^KI$Kzi``qrZ}$^#;%yx`L?dtPN>-OVq_aCN#((wTxq7RUXv0_{qqD&2G}?e`uWb+?=>4VZ11xRB^jQsHLg42bJDD*vpny>+0P>y_v*ZFFlr=4gtt1?#A}=;|`E zxbDIpP0~lM-4iVqUD;^D5s!3$PhVy9(|Nsc@_ZTx?UZzU^cE-LOyM+{^vP1OSUShf z+nJdvoGjq@9-Q<(nJbTQ7v%1#678&Vs?L?z)2HEPhckOjk7VYJGp7F-PvgkaTg~u2 z8LFS_z=uH37xNBN=n(>205DbjcObwyp*@WO+*&XNQHj{?szA}TN zJ25C$s>@l0l!Z<3WBA(e$7$d{PT&cEW+a={N)y?vPCFX)e@%eBLekNR|BDiC5cmm! z|4RS|$SJx?AVwfb;FFZFnZPiC?^)ot5HKDBg?*gjLhylF@PVr#eyHO55Kfu`paTv0 zp^Aig*k>P_*c;lur~ueU9uj86CWz6wi~u$PVft-?I1vEY1cVu|3Bp7GU=t9g(sfn}fP$!+{FC46i(+9DknSbEpPk_>CCekkxAjL?hX;YsgKog@*%WOLFRy{$0 zHsLmhX{R(k^Qga2n8>rks?4xqe#eog=rkKQ*R~SloY={n!M-oX&%EuTqG|C;KSrQL z;41{!z;h4Ao9aOvoHbL-KcKHfzu{7hzXrr{c)vDUX~ zQE3I&2zSFyIO2=?9`a!*HtTSr^8NQu_VN-SHpE7+DFl3%of>*;nW9d!?S zcWH^|W1%LF%ce==G!LtV0xWTj`3ju0Y+K}&OZeO2}R&P zihkeB?(FQ`9Ys@4;@Da)KhEQuZ)RtA=lgx%%nrZX*%_7KNIdmc?mt&a(tlyZIb=I< z?WYiUO;V*1N%g7zyl=!${sszi*6))F!I6-U=7dMWBu5!h;BO!wDMUx2Bu~zF6k;Q> zLgz?lAwCi(X~BG=uwrCIp=+dz#6$V+LeEG~p?9RWuySN&Vb#bgk{-_Y6;_X|_DO!} zAxTwUmej~4za+g0zw{VcLvo^!(;?)nB{?z3=@fF-ss3Y9Dn9WF$cP@-4AV%Cmh_}~ zT1y(IGrFcG3ng_TukBA}^~rIwbZ1i6jPX*@(2%c?G4D)fifZz)Cm%&dEpH4&pU&iS zYR1%zq?s|!7|Fqb!Q{`*e*^vwC%0?oviS)$S3H$0jhnerF_TX|e&2Dz2@Jba1JPgL z)f`RvtdODQwXB&6SW#8eb1!P?GuosT*NUVi>1?TVCZ}0(Rpa9+zqP`c$YwReNINZs zGP+*USv)0M@>7PUTj4|5Y-yrsTJmvdsTDbvJ5|gTA1R%J+E-v_90M`X$VW=qOg>9{ znNpt}xpp@MUXwIw#0L)sJP;!R)&H_IBC7$QK~)ACQiDLlY6z&JhJi*@1v))qkq*C~ z^lSOh({PQWlgUF59NRH8eD{ur?te6CU+P` zqp)oO`}f)aX`spFslH1Pdy}P9q%;ew5;V+zwA=%?&)i|x4?VC0x<2LC@m;V281`8* z+@4VbOC~?)KfM1$7HBPxQ_Ka3kPoNNk&@_FR2F53?n4y^y0@Il=nHGGqG zi@o3Ox&=O{JM8I6w})i!)wJOXdbw2PCa*&n9Q1bAA@IRbTO}@c(1}6`vQ!u=X_H#{065&WQd9#Ew4WI)O`mtC$ob?yAC8F z{P@9hljS{o_Z^t^ec(TMaMo9Te%to($vr6}`jQ<;9!z3rXUf=)e+N^>og`t%2@E^T z5U`ndrHla>@S;SY{eiueR3j~H*-b93guB%8P;kTa7z<;m9VBy$c$DS&$BFl%Hntl$D}9EDAg{1t?2w zPNAaOm4#fPET8flEK3{A8Y4s@{bsI+iltY_DD(prE7ZmMhj9u4eI+~*9>9PqlmLeR z16c5y#DfL(Mqt4=<@fOWdFaprTm+{406+p~2zZ#1E3)ygJiHiVpaMXG3{XKv?k1=Z zH1VCWk1+;MnxI0jM>Y;3VBtcX$ zm7uLwLMD!p_Tjk_*3wGIt4O>tR4Y?5Q76%(mO|}`bcTgara+NwKA7^AyKSwJ+)T%R z&=*!9Yr^23HHQEeZ-iFpNg(AoH>mGMuJOs;sVFz3Z%5V?l71w2Ah{FC4kQTH^}B$i z!gSa@0Ab6Q*6+c;+@L#1BXnrp#Yct>ypi^}N{7`wWUN$kqd4vt%`^<|xuU6^GmZ4P z4%S^(OS_rb-1uowOPE`}PaJPuwx!D01dTD)!y5)T9>(D4>xVD@POWRW-ZczkaA5k0 zmWd5zO;uSlM<@i~oLAP=xhPGfwB}?{9v0Tvj9g1A>uKGJRWId_skx!@xiR_&U&cVI_GNNL84drUJ`J@ktjswt^Jn z4IuP6Zo(2*dN*Epsn&Z(z4wmk$d%5XuYCSH`)Zxr>YdvbBwu8JkSnpZ7ltnAGy7_> z9rf6bYHY_sz{w*FSCREP<9AE83jOkzzigH4rWuGzf9&f%lnnehDIeMz{_*WTpdKZN z_nbUue+=})KWDt!gXfBS>RLS`FxUcuugI04$7=rl*3Toent{Jb6@JpJ=4RE*)G?@L z*CrJBY}LFp4}#Z{WAa5yysgD2j3ZPv-=wuESi^1W7Wkm6; z`V5jSK!~-vv+OZq5f-5lJEY$Z94Xs!WP`Fkh=qr+Fo$IJ>ANt6n6b8{IOvJ(YGo;| zBg42>7&j-=2H1whQ)$;syiFX;Tk|0RGk(UPiGSwsUja1PQSaK}n1{o4Ww@#g&l3s( zIBSZXNTZa_ISh~s%(N8GK*Ovoz zOVEha1-bzjPQ&CGv6E}W3ZAH^* zIyDdBSUUZ}L?(|8eu#zBX|yde96!(d!GBoqkVvmh}N4FuO>)Fx3m2sSSxF@Nyx1&I-b6>|k(h&BkD zk%eD&07N6lH8jF+JlPI2*dePM-?1m{r3w#{l?vsgJ;e^M+h`6}HaP18lBc!&xTYK4 z8FVy|h-rLZ3)5Kj=U_tD>+G2FRlswL!v{U4{Av&;mBCdq0e(jC`Q}ZOXCGr^;us@p zi1>WN$SE4So)WCIXtK5867-g;Fh44qFRmYz`AcDXj@1EXCo|`7>_-LD#gFC-T(3{< z@bm$!mC5;NKiFpfVto-KebM=e>5I6hF9M&=ejw&-;wVg&s=vUO+47gpSYdma&hc3v zWEe1COWf=*6PnB!WL6X$=(B#ESfB@?)CatC{ch@9zuS^wsZoY#ZeD^tQHw74Z~+$a zWKL$_`-o%O`dX}O`?R#k4R6AFQr z=X`sH8p&b|Eu&N>WC_A)3X~O!Qy~H~jW(3+Rb_jfPzVU;9G5cRo~=f*7(>e_l?+)z z6*L9P3dN}qftg0z_3#|mA*=_QuexQIGJd|VXn*n&Xv&A`VOqS|24)&=ph?4Bf(Oi9 zend2z&oh$Ya;Y0@HfJ{TSFPECOtZm4o?oG6Q#^ejefs)9jo?hS8s++dJO?i>j&M4N zR*(J4wfe8+IUw4s^QfMNcjUPKyFm4mNU}&&BpQ-YAm!f209cZRG)Y?nj85eB(~t!# zDE+yjGr3H(UeVEddJYAQ0clnzbd5}tI#IRJ%tRjMYzLRDO`Ib4Vm%uiWl~FQt|vDC z_9>AqRb@+ELE7$|Z_iL8S&X4&l!}EcK|W1^vO;kxL|~@ThO)QnoOLI_QfOda+1oCw zMzR<~%P18BS%O%a0%e8bREWULM;B2a{|nv{aqdv{s{vKc`A)&QPVb6WR3`39T-QkA zjDt7$nM7`J%p!&otb63l7}s?1Dl%9{>s|B$EvJXL=mi{E%>22*;uxh3r!ZgEhD$Wk zA{PVpVi#OP=^AMt!D1KJNK<2~l#fn@E8!V#?$GHASdZx&i#ajxqM0eBqNttR@E=j* zVBAJ3G8nnhN)(J-zTcQRsU|8>SlF}zIh%}Jz6632a#SMTnmuw|F2U`ocG*y%L+w_3 zDjhkx1~s;X#X>P}ecqV81~meG7=`Oq)Lu^?p!)mt^}$NF57bp|cq*d$X@A)7^kUCN zh&&g4pP1)@?~~PBpZHB&IBOrG0|!<{teNVBl@XnGkHsr-<3Bu4y!$4FDsgy|)*`p~ zCaLR|I0~-u$Y!IUiIcVV;eGyLdZ^`F+P(z1_}8^ABVK(QF5%5?!`10+r@H>4OvcZU zx9Y6Be(ewjP zKKa;_RuEi_dDRMPg-kA=dllRs&RqU3oXdx~aypj}0e4yYfMI6L2?M@uQZ;wEIV?uk z&%j-o4Y(@}$xOV*BLWNE$;`Ztjzt1aA6y)qLfgo)Zy9UTPeCawtZF7S*U&!?F~Fuf zv8muhp-qK=+f=fG9vaUri-#6ifxK3Pi2&(k{>1NL)dW-rQ7N$3+=d2Y2C| zhX$wU+pdwcU8a8y$sm#f5Z+Z|FIA_0o4xvZ z&!L~503Y{GtTF;vmHurSK>je?qQj% zKTB-5pjEf+d3(=~9;)ttq`L3XpBS~ov3lYdLG8_cpAY4g0A~U9MDT8iX5U0}3M&W! zTDB$6N8-_FC_U31_|r)Ae69TD@`bGzQ?uXnD@JXQ+`Z#?UfK zWkQx9oTflop*R&HFwvbe@E3P9o&8bB+rG8!g3hS%8(LJS++fGzBP2Y))Zp5TI<7 zr(G1%X%-DI3S}V-WrgCbYR1_zKuo%$u+r-|4slN*c^b(xNXC&ENM1qmEE4>Ng3cF? zUSLOZ%Sq|QgQ-C&!$-#XXwMk3~4lPK{gw}zQ(VfB$)M0mz_Z}49?j^VO0a;U*d7*IOlZbIo{6PI<`2WM$Riu=40Uqz3@ z1wIJ20w2CahP&o|zzx0%o-gi{5&6if+18xGSEOy^37^xzgfjJ2z)~KVP*!ZL9Wimr z&OSVn5H68JwC%muV`J?g;OsE;w>-vtA8-dc=p6uW!(zQ3BfZ~=_v%*f(>@jrK!zffLSfSGyoeXQ`PW@e!ReG-J~Wh7gGP~SUPDT9lM%NglR)`ZO?K%_mb4CYj- zTSsr9709Tn6@qIhd86{xzscFjsUO()E{t7&x%FRtB{($el=2J&`8& zW}`>B&{($a;pe>cUZSV5a#N#wYh(S^g|NILdQ}2)9m%x35S1c5UyFY^ z{;FA1*4LHwRb_o6zWSZ`rn&g0T6}XozPYNjIN$Bt{7zqLt}pey+kVhHd*|gl-|4?+ zuK%8YKK{ec|72|L;Ip;<=j#2>)%r&2eIwN_@th7dx>vu`y=AU@ORalby?fhqxSxXEDE`-6iaMQX=ZE50}_Ph(G*}7_*q>7KSW^~yxkl)opWZh z)6Fx|Y^Iv>uqbe{QY^7KrJ1cI4@eNIrYXQI@UyxEeio&Z2|S=+0Oo%_MgWHYc%Vj999sI|6W{4#VAI-D|JUqV(b@;2L zV8^SXy)h580?0JOO~r7nfuIX-aQhS&F2S}AK~U(SO4x>0FwFqaswGT9r~%Nb1G$@& zdyIFpjxi|Yg{cHu@v?ErAvr1n5CUk$^SLTi-~(tCYr`pkrZ!w*Z;z^-Hnf826_`4S z*w89!qT;g;k0ews{wDbU}+F$M?W4y^c)ccAOm9dLRb;Q6xd0D)KC zurh99u&hiB-g}M!D&J}SD6~hHk}a2r z`o}1si9$v{V|CfPXlSavQIwq zgV!T2)k{#A{wiFn{&ggXa`Y2O{sajEG5s4z{sPH2k^ChRcMad-9^w$KHPt~q!r#WS zuK}^-Og3xk7cjm!?&<&*7kAko5q}16hy%Pp67C_EhCR5>KyRBR3dM+rm)ZFqDAu~f zDAZ*9lm}-Qj=#S4N5`s(Beleldg2JWHoKRB$lgn=YV@xD=0lCOn;SiA8XLDX5~~(E zLJ0SOTt@wmu9|9q|QM7{4s zwM#r(LcY7|%3W3OImbokl@<(?zvg679u|dMR*EGyr!=#*2G<7Uf}4$YrHiVslC}TT330AT*Dr0JFf)>Js=_bP>phGkYHw zpVh|=^6{T%RRSCYjb0Wuc?4foJMby^il?$NogBDd8LVpwR}>q3Mk)~ipYG&6=X+CQl?Z^( zPULO^pK%jE53`Rk)-T?=3Han?yP#5liU2-&KIR1zKgR&@8E?ZWz{)mU!tQHo!UmsF zb%okhiQ3?^!`$Xv8{BEWWeh}CI=pbx8?(V@7{F%)`a3k%>&SyU&`s|E`1Ka+{T|Z$ zy|;Sb={W$N%j$gsJXa#{90K4uM1iM%5yVpH>EE}{^68dOvwS)I&mp-S*_qs%?8hJ2 zV>Rkv7=WCKENt9@d&Yqy3;kLS#@w?A8Wf6nU|}ni$$Wi7iwFg0n^w%6%xmyE^Ul7N z?O`U40V3A?TTCNZ^Y1XURIC|wt53xQys6KEB*5{o%j{A>C$7EYKxZq|iJ~~jY2S|W zBB--zf<`VpfQRnNs-wrf3Zl1|51Oo^;fm}xdkW*P6zqd z!?ncWdgAbMk#T!pWxD$K@%rP>%pH8D*8gn1|JhpKbM?OGs$JsQ61=w90D|oh0oxO> zEex8sDq!0gP#zZLOR+>Ihov;Lwd4VQp=z1}%mP0zSt!y_QgtO&ZGX|MpY#PQO7Az_DfLFESa?HDd z*e&?_ii|$dWx>~uCBxTF0-)k80aW5Q2452%IOzi2<-yk#48C@$-M=RIx^x4A-hrN5 zcL0!wecS|u?L`Q?3n1*SW(d1ykNy_apno69-ylICJAqS8@BjUm-v9o04v}TDseq1< z&+0Hsp1dSr@YmY`rla`1^ z^?v|SZD431UplpX6iPQWu_u)c@(&ZzAh5i({nWF|^>t{K@!8)lv>ESf^sW2m*~V({ zp00;!@&rzkCvciP5uFaW)8s}?SyxxqRh4xgYMT5?tovKAA>@X7Y{NUTEpxFge|@CB zb?@7UfB4Yc*86L*!}ZwVs{#LNs1FG2q0!x*OotXiQqLN3`utPjWNbKYU*667$#WUg zt0QpqD9mac#{olFxcLC2Su&Xf3TKSTVm4RO{}lP&K=K15|Aa(X(K(H2NKmGMjsSi| z=lxar{&#~NFF#)ou3qrRf~f@w$kl)pTEkM-OQD#}&>g&EK?343V9Iuvp(ohCAOUe1 zFs0vRSQFg2AOUe1oD`BwdVn-HwVte3wG?L$e6q2!wI)rsj2)mmHGo@UK-bRduyE3f zup}#Nr%@xH^jzH8ZJ%}>to?Z*5C8n-v{sm;3xhiCe`P}c>r9FKOBA|YEqX_-gq`c` zEkpO5P%z6WNdHd`^+}+wQYTIb9Q&;}PAtJbG^OGwhICx=NB8Ez6|v-ch5Pn_au3}F z=(GAyfCX1`8V>`7on?K#1%JRN!>&9)qLR<&-cTXbd7k_W$8Kr8}zLb0N0D2fGV3ldG6^-OYH=*XM(E#LeU?rd@Y4_CMqE!4uV* OUM1<*+vJjG?f(Hd7i=#8 literal 0 HcmV?d00001 diff --git a/backend/tests/conftest.py b/backend/tests/conftest.py new file mode 100644 index 0000000..f9269e2 --- /dev/null +++ b/backend/tests/conftest.py @@ -0,0 +1,86 @@ +""" +Shared test fixtures for Weibo-HotSign backend tests. + +Uses SQLite in-memory for database tests and a simple dict-based +fake Redis for refresh-token tests, so no external services are needed. +""" + +import asyncio +import sys +import os +from typing import AsyncGenerator + +import pytest +import pytest_asyncio +from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine +from sqlalchemy.orm import sessionmaker + +# Ensure backend/ is on sys.path so `shared` and `app` imports work +sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..")) + +# --------------- override shared settings BEFORE any app import --------------- +os.environ["DATABASE_URL"] = "sqlite+aiosqlite://" +os.environ["REDIS_URL"] = "redis://localhost:6379/0" +os.environ["JWT_SECRET_KEY"] = "test-secret-key" +os.environ["COOKIE_ENCRYPTION_KEY"] = "test-cookie-key" + +# Create the test engine BEFORE importing shared.models so we can swap it in +TEST_ENGINE = create_async_engine("sqlite+aiosqlite://", echo=False) +TestSessionLocal = sessionmaker(TEST_ENGINE, class_=AsyncSession, expire_on_commit=False) + +# Now patch shared.models.base module-level objects before they get used +import shared.models.base as _base_mod # noqa: E402 + +_base_mod.engine = TEST_ENGINE +_base_mod.AsyncSessionLocal = TestSessionLocal + +from shared.models.base import Base # noqa: E402 +from shared.models import User # noqa: E402 + + +@pytest.fixture(scope="session") +def event_loop(): + """Create a single event loop for the whole test session.""" + loop = asyncio.new_event_loop() + yield loop + loop.close() + + +@pytest_asyncio.fixture(autouse=True) +async def setup_db(): + """Create all tables before each test, drop after.""" + async with TEST_ENGINE.begin() as conn: + await conn.run_sync(Base.metadata.create_all) + yield + async with TEST_ENGINE.begin() as conn: + await conn.run_sync(Base.metadata.drop_all) + + +@pytest_asyncio.fixture +async def db_session() -> AsyncGenerator[AsyncSession, None]: + """Yield a fresh async DB session.""" + async with TestSessionLocal() as session: + yield session + + +# --------------- Fake Redis for refresh-token tests --------------- + +class FakeRedis: + """Minimal async Redis stand-in backed by a plain dict.""" + + def __init__(self): + self._store: dict[str, str] = {} + + async def setex(self, key: str, ttl: int, value: str): + self._store[key] = value + + async def get(self, key: str): + return self._store.get(key) + + async def delete(self, key: str): + self._store.pop(key, None) + + +@pytest.fixture +def fake_redis(): + return FakeRedis() diff --git a/backend/tests/test_api_accounts.py b/backend/tests/test_api_accounts.py new file mode 100644 index 0000000..6361e2f --- /dev/null +++ b/backend/tests/test_api_accounts.py @@ -0,0 +1,214 @@ +""" +Tests for api_service account CRUD endpoints. +Validates tasks 4.1 and 4.2. +""" + +import pytest +import pytest_asyncio +from unittest.mock import patch +from httpx import AsyncClient, ASGITransport + +from shared.models import get_db +from tests.conftest import TEST_ENGINE, TestSessionLocal, Base, FakeRedis + + +@pytest_asyncio.fixture +async def client(): + """ + Provide an httpx AsyncClient wired to the api_service app, + with DB overridden to test SQLite and a fake Redis for auth tokens. + """ + fake_redis = FakeRedis() + + async with TEST_ENGINE.begin() as conn: + await conn.run_sync(Base.metadata.create_all) + + # Import apps after DB is ready + from api_service.app.main import app as api_app + from auth_service.app.main import app as auth_app + + async def override_get_db(): + async with TestSessionLocal() as session: + yield session + + async def _fake_get_redis(): + return fake_redis + + api_app.dependency_overrides[get_db] = override_get_db + auth_app.dependency_overrides[get_db] = override_get_db + + with patch( + "auth_service.app.utils.security.get_redis", + new=_fake_get_redis, + ): + # We need both clients: auth for getting tokens, api for account ops + async with AsyncClient( + transport=ASGITransport(app=auth_app), base_url="http://auth" + ) as auth_client, AsyncClient( + transport=ASGITransport(app=api_app), base_url="http://api" + ) as api_client: + yield auth_client, api_client + + api_app.dependency_overrides.clear() + auth_app.dependency_overrides.clear() + + +async def _register_and_login(auth_client: AsyncClient, suffix: str = "1") -> str: + """Helper: register a user and return an access token.""" + reg = await auth_client.post("/auth/register", json={ + "username": f"acctuser{suffix}", + "email": f"acct{suffix}@example.com", + "password": "Str0ng!Pass1", + }) + assert reg.status_code == 201, f"Register failed: {reg.json()}" + resp = await auth_client.post("/auth/login", json={ + "email": f"acct{suffix}@example.com", + "password": "Str0ng!Pass1", + }) + login_body = resp.json() + assert resp.status_code == 200, f"Login failed: {login_body}" + # Handle both wrapped (success_response) and unwrapped token formats + if "data" in login_body: + return login_body["data"]["access_token"] + return login_body["access_token"] + + +def _auth_header(token: str) -> dict: + return {"Authorization": f"Bearer {token}"} + + +# ===================== Basic structure tests ===================== + + +class TestAPIServiceBase: + + @pytest.mark.asyncio + async def test_health(self, client): + _, api = client + resp = await api.get("/health") + assert resp.status_code == 200 + assert resp.json()["success"] is True + + @pytest.mark.asyncio + async def test_root(self, client): + _, api = client + resp = await api.get("/") + assert resp.status_code == 200 + assert "API Service" in resp.json()["data"]["service"] + + +# ===================== Account CRUD tests ===================== + + +class TestAccountCRUD: + + @pytest.mark.asyncio + async def test_create_account(self, client): + auth, api = client + token = await _register_and_login(auth) + + resp = await api.post("/api/v1/accounts", json={ + "weibo_user_id": "12345", + "cookie": "SUB=abc; SUBP=xyz;", + "remark": "test account", + }, headers=_auth_header(token)) + + assert resp.status_code == 201 + body = resp.json() + assert body["success"] is True + assert body["data"]["weibo_user_id"] == "12345" + assert body["data"]["status"] == "pending" + assert body["data"]["remark"] == "test account" + # Cookie plaintext must NOT appear in response + assert "SUB=abc" not in str(body) + + @pytest.mark.asyncio + async def test_list_accounts(self, client): + auth, api = client + token = await _register_and_login(auth, "list") + + # Create two accounts + for i in range(2): + await api.post("/api/v1/accounts", json={ + "weibo_user_id": f"uid{i}", + "cookie": f"cookie{i}", + }, headers=_auth_header(token)) + + resp = await api.get("/api/v1/accounts", headers=_auth_header(token)) + assert resp.status_code == 200 + data = resp.json()["data"] + assert len(data) == 2 + + @pytest.mark.asyncio + async def test_get_account_detail(self, client): + auth, api = client + token = await _register_and_login(auth, "detail") + + create_resp = await api.post("/api/v1/accounts", json={ + "weibo_user_id": "99", + "cookie": "c=1", + "remark": "my remark", + }, headers=_auth_header(token)) + account_id = create_resp.json()["data"]["id"] + + resp = await api.get(f"/api/v1/accounts/{account_id}", headers=_auth_header(token)) + assert resp.status_code == 200 + assert resp.json()["data"]["remark"] == "my remark" + + @pytest.mark.asyncio + async def test_update_account_remark(self, client): + auth, api = client + token = await _register_and_login(auth, "upd") + + create_resp = await api.post("/api/v1/accounts", json={ + "weibo_user_id": "55", + "cookie": "c=old", + }, headers=_auth_header(token)) + account_id = create_resp.json()["data"]["id"] + + resp = await api.put(f"/api/v1/accounts/{account_id}", json={ + "remark": "updated remark", + }, headers=_auth_header(token)) + assert resp.status_code == 200 + assert resp.json()["data"]["remark"] == "updated remark" + + @pytest.mark.asyncio + async def test_delete_account(self, client): + auth, api = client + token = await _register_and_login(auth, "del") + + create_resp = await api.post("/api/v1/accounts", json={ + "weibo_user_id": "77", + "cookie": "c=del", + }, headers=_auth_header(token)) + account_id = create_resp.json()["data"]["id"] + + resp = await api.delete(f"/api/v1/accounts/{account_id}", headers=_auth_header(token)) + assert resp.status_code == 200 + + # Verify it's gone + resp2 = await api.get(f"/api/v1/accounts/{account_id}", headers=_auth_header(token)) + assert resp2.status_code == 404 + + @pytest.mark.asyncio + async def test_access_other_users_account_forbidden(self, client): + auth, api = client + token_a = await _register_and_login(auth, "ownerA") + token_b = await _register_and_login(auth, "ownerB") + + # User A creates an account + create_resp = await api.post("/api/v1/accounts", json={ + "weibo_user_id": "111", + "cookie": "c=a", + }, headers=_auth_header(token_a)) + account_id = create_resp.json()["data"]["id"] + + # User B tries to access it + resp = await api.get(f"/api/v1/accounts/{account_id}", headers=_auth_header(token_b)) + assert resp.status_code == 403 + + @pytest.mark.asyncio + async def test_unauthenticated_request_rejected(self, client): + _, api = client + resp = await api.get("/api/v1/accounts") + assert resp.status_code in (401, 403) diff --git a/backend/tests/test_api_signin_logs.py b/backend/tests/test_api_signin_logs.py new file mode 100644 index 0000000..43cc014 --- /dev/null +++ b/backend/tests/test_api_signin_logs.py @@ -0,0 +1,238 @@ +""" +Tests for api_service signin log query endpoints. +Validates task 6.1. +""" + +import pytest +import pytest_asyncio +from datetime import datetime, timedelta +from unittest.mock import patch +from httpx import AsyncClient, ASGITransport + +from shared.models import get_db, Account, SigninLog +from tests.conftest import TEST_ENGINE, TestSessionLocal, Base, FakeRedis + + +@pytest_asyncio.fixture +async def client(): + """ + Provide an httpx AsyncClient wired to the api_service app, + with DB overridden to test SQLite and a fake Redis for auth tokens. + """ + fake_redis = FakeRedis() + + async with TEST_ENGINE.begin() as conn: + await conn.run_sync(Base.metadata.create_all) + + # Import apps after DB is ready + from api_service.app.main import app as api_app + from auth_service.app.main import app as auth_app + + async def override_get_db(): + async with TestSessionLocal() as session: + yield session + + async def _fake_get_redis(): + return fake_redis + + api_app.dependency_overrides[get_db] = override_get_db + auth_app.dependency_overrides[get_db] = override_get_db + + with patch( + "auth_service.app.utils.security.get_redis", + new=_fake_get_redis, + ): + async with AsyncClient( + transport=ASGITransport(app=auth_app), base_url="http://auth" + ) as auth_client, AsyncClient( + transport=ASGITransport(app=api_app), base_url="http://api" + ) as api_client: + yield auth_client, api_client + + api_app.dependency_overrides.clear() + auth_app.dependency_overrides.clear() + + +async def _register_and_login(auth_client: AsyncClient, suffix: str = "1") -> str: + """Helper: register a user and return an access token.""" + reg = await auth_client.post("/auth/register", json={ + "username": f"loguser{suffix}", + "email": f"log{suffix}@example.com", + "password": "Str0ng!Pass1", + }) + assert reg.status_code == 201 + resp = await auth_client.post("/auth/login", json={ + "email": f"log{suffix}@example.com", + "password": "Str0ng!Pass1", + }) + login_body = resp.json() + assert resp.status_code == 200 + if "data" in login_body: + return login_body["data"]["access_token"] + return login_body["access_token"] + + +def _auth_header(token: str) -> dict: + return {"Authorization": f"Bearer {token}"} + + +async def _create_account(api: AsyncClient, token: str, weibo_id: str) -> str: + """Helper: create an account and return its ID.""" + resp = await api.post("/api/v1/accounts", json={ + "weibo_user_id": weibo_id, + "cookie": f"cookie_{weibo_id}", + }, headers=_auth_header(token)) + assert resp.status_code == 201 + return resp.json()["data"]["id"] + + +async def _create_signin_logs(db, account_id: str, count: int, statuses: list = None): + """Helper: create signin logs for testing.""" + if statuses is None: + statuses = ["success"] * count + + base_time = datetime.utcnow() + for i in range(count): + log = SigninLog( + account_id=account_id, + topic_title=f"Topic {i}", + status=statuses[i] if i < len(statuses) else "success", + signed_at=base_time - timedelta(hours=i), # Descending order + ) + db.add(log) + await db.commit() + + +# ===================== Signin Log Query Tests ===================== + + +class TestSigninLogQuery: + + @pytest.mark.asyncio + async def test_get_signin_logs_empty(self, client): + """Test querying logs for an account with no logs.""" + auth, api = client + token = await _register_and_login(auth, "empty") + account_id = await _create_account(api, token, "empty_acc") + + resp = await api.get( + f"/api/v1/accounts/{account_id}/signin-logs", + headers=_auth_header(token) + ) + assert resp.status_code == 200 + data = resp.json()["data"] + assert data["total"] == 0 + assert len(data["items"]) == 0 + + @pytest.mark.asyncio + async def test_get_signin_logs_with_data(self, client): + """Test querying logs returns data in descending order.""" + auth, api = client + token = await _register_and_login(auth, "data") + account_id = await _create_account(api, token, "data_acc") + + # Create logs directly in DB + async with TestSessionLocal() as db: + await _create_signin_logs(db, account_id, 5) + + resp = await api.get( + f"/api/v1/accounts/{account_id}/signin-logs", + headers=_auth_header(token) + ) + assert resp.status_code == 200 + data = resp.json()["data"] + assert data["total"] == 5 + assert len(data["items"]) == 5 + + # Verify descending order by signed_at + items = data["items"] + for i in range(len(items) - 1): + assert items[i]["signed_at"] >= items[i + 1]["signed_at"] + + @pytest.mark.asyncio + async def test_signin_logs_pagination(self, client): + """Test pagination works correctly.""" + auth, api = client + token = await _register_and_login(auth, "page") + account_id = await _create_account(api, token, "page_acc") + + # Create 10 logs + async with TestSessionLocal() as db: + await _create_signin_logs(db, account_id, 10) + + # Page 1, size 3 + resp = await api.get( + f"/api/v1/accounts/{account_id}/signin-logs?page=1&size=3", + headers=_auth_header(token) + ) + assert resp.status_code == 200 + data = resp.json()["data"] + assert data["total"] == 10 + assert len(data["items"]) == 3 + assert data["page"] == 1 + assert data["size"] == 3 + assert data["total_pages"] == 4 + + # Page 2, size 3 + resp2 = await api.get( + f"/api/v1/accounts/{account_id}/signin-logs?page=2&size=3", + headers=_auth_header(token) + ) + data2 = resp2.json()["data"] + assert len(data2["items"]) == 3 + assert data2["page"] == 2 + + @pytest.mark.asyncio + async def test_signin_logs_status_filter(self, client): + """Test status filtering works correctly.""" + auth, api = client + token = await _register_and_login(auth, "filter") + account_id = await _create_account(api, token, "filter_acc") + + # Create logs with different statuses + async with TestSessionLocal() as db: + statuses = ["success", "success", "failed_network", "success", "failed_already_signed"] + await _create_signin_logs(db, account_id, 5, statuses) + + # Filter by success + resp = await api.get( + f"/api/v1/accounts/{account_id}/signin-logs?status=success", + headers=_auth_header(token) + ) + assert resp.status_code == 200 + data = resp.json()["data"] + assert data["total"] == 3 + assert all(item["status"] == "success" for item in data["items"]) + + # Filter by failed_network + resp2 = await api.get( + f"/api/v1/accounts/{account_id}/signin-logs?status=failed_network", + headers=_auth_header(token) + ) + data2 = resp2.json()["data"] + assert data2["total"] == 1 + assert data2["items"][0]["status"] == "failed_network" + + @pytest.mark.asyncio + async def test_access_other_users_logs_forbidden(self, client): + """Test that users cannot access other users' signin logs.""" + auth, api = client + token_a = await _register_and_login(auth, "logA") + token_b = await _register_and_login(auth, "logB") + + # User A creates an account + account_id = await _create_account(api, token_a, "logA_acc") + + # User B tries to access logs + resp = await api.get( + f"/api/v1/accounts/{account_id}/signin-logs", + headers=_auth_header(token_b) + ) + assert resp.status_code == 403 + + @pytest.mark.asyncio + async def test_unauthenticated_logs_request_rejected(self, client): + """Test that unauthenticated requests are rejected.""" + _, api = client + resp = await api.get("/api/v1/accounts/fake-id/signin-logs") + assert resp.status_code in (401, 403) diff --git a/backend/tests/test_api_tasks.py b/backend/tests/test_api_tasks.py new file mode 100644 index 0000000..a12a68e --- /dev/null +++ b/backend/tests/test_api_tasks.py @@ -0,0 +1,226 @@ +""" +Tests for API_Service task management endpoints. +""" + +import pytest +import pytest_asyncio +from httpx import AsyncClient +from sqlalchemy.ext.asyncio import AsyncSession + +from shared.models import User, Account, Task +from auth_service.app.utils.security import create_access_token +from shared.crypto import encrypt_cookie, derive_key + + +@pytest_asyncio.fixture +async def test_user(db_session: AsyncSession) -> User: + """Create a test user.""" + user = User( + username="testuser", + email="test@example.com", + hashed_password="hashed_password", + ) + db_session.add(user) + await db_session.commit() + await db_session.refresh(user) + return user + + +@pytest_asyncio.fixture +async def test_account(db_session: AsyncSession, test_user: User) -> Account: + """Create a test account.""" + key = derive_key("test-cookie-key") + ciphertext, iv = encrypt_cookie("test_cookie_data", key) + + account = Account( + user_id=test_user.id, + weibo_user_id="123456", + remark="Test Account", + encrypted_cookies=ciphertext, + iv=iv, + status="pending", + ) + db_session.add(account) + await db_session.commit() + await db_session.refresh(account) + return account + + +@pytest_asyncio.fixture +async def auth_headers(test_user: User) -> dict: + """Generate JWT auth headers for test user.""" + token = create_access_token({"sub": test_user.id}) + return {"Authorization": f"Bearer {token}"} + + +@pytest.mark.asyncio +async def test_create_task_valid_cron( + db_session: AsyncSession, + test_user: User, + test_account: Account, + auth_headers: dict, +): + """Test creating a task with valid cron expression.""" + from api_service.app.main import app + + async with AsyncClient(app=app, base_url="http://test") as client: + response = await client.post( + f"/api/v1/accounts/{test_account.id}/tasks", + json={"cron_expression": "0 9 * * *"}, + headers=auth_headers, + ) + + assert response.status_code == 201 + data = response.json() + assert data["success"] is True + assert data["data"]["cron_expression"] == "0 9 * * *" + assert data["data"]["is_enabled"] is True + assert data["data"]["account_id"] == test_account.id + + +@pytest.mark.asyncio +async def test_create_task_invalid_cron( + db_session: AsyncSession, + test_user: User, + test_account: Account, + auth_headers: dict, +): + """Test creating a task with invalid cron expression.""" + from api_service.app.main import app + + async with AsyncClient(app=app, base_url="http://test") as client: + response = await client.post( + f"/api/v1/accounts/{test_account.id}/tasks", + json={"cron_expression": "invalid cron"}, + headers=auth_headers, + ) + + assert response.status_code == 400 + data = response.json() + assert data["success"] is False + + +@pytest.mark.asyncio +async def test_list_tasks( + db_session: AsyncSession, + test_user: User, + test_account: Account, + auth_headers: dict, +): + """Test listing tasks for an account.""" + # Create two tasks + task1 = Task(account_id=test_account.id, cron_expression="0 9 * * *", is_enabled=True) + task2 = Task(account_id=test_account.id, cron_expression="0 18 * * *", is_enabled=False) + db_session.add_all([task1, task2]) + await db_session.commit() + + from api_service.app.main import app + + async with AsyncClient(app=app, base_url="http://test") as client: + response = await client.get( + f"/api/v1/accounts/{test_account.id}/tasks", + headers=auth_headers, + ) + + assert response.status_code == 200 + data = response.json() + assert data["success"] is True + assert len(data["data"]) == 2 + + +@pytest.mark.asyncio +async def test_update_task( + db_session: AsyncSession, + test_user: User, + test_account: Account, + auth_headers: dict, +): + """Test updating a task (enable/disable).""" + task = Task(account_id=test_account.id, cron_expression="0 9 * * *", is_enabled=True) + db_session.add(task) + await db_session.commit() + await db_session.refresh(task) + + from api_service.app.main import app + + async with AsyncClient(app=app, base_url="http://test") as client: + response = await client.put( + f"/api/v1/tasks/{task.id}", + json={"is_enabled": False}, + headers=auth_headers, + ) + + assert response.status_code == 200 + data = response.json() + assert data["success"] is True + assert data["data"]["is_enabled"] is False + + +@pytest.mark.asyncio +async def test_delete_task( + db_session: AsyncSession, + test_user: User, + test_account: Account, + auth_headers: dict, +): + """Test deleting a task.""" + task = Task(account_id=test_account.id, cron_expression="0 9 * * *", is_enabled=True) + db_session.add(task) + await db_session.commit() + await db_session.refresh(task) + + from api_service.app.main import app + + async with AsyncClient(app=app, base_url="http://test") as client: + response = await client.delete( + f"/api/v1/tasks/{task.id}", + headers=auth_headers, + ) + + assert response.status_code == 200 + data = response.json() + assert data["success"] is True + + # Verify task is deleted + from sqlalchemy import select + result = await db_session.execute(select(Task).where(Task.id == task.id)) + deleted_task = result.scalar_one_or_none() + assert deleted_task is None + + +@pytest.mark.asyncio +async def test_access_other_user_task_forbidden( + db_session: AsyncSession, + test_account: Account, +): + """Test that users cannot access tasks from other users' accounts.""" + # Create another user + other_user = User( + username="otheruser", + email="other@example.com", + hashed_password="hashed_password", + ) + db_session.add(other_user) + await db_session.commit() + await db_session.refresh(other_user) + + # Create a task for test_account + task = Task(account_id=test_account.id, cron_expression="0 9 * * *", is_enabled=True) + db_session.add(task) + await db_session.commit() + await db_session.refresh(task) + + # Try to access with other_user's token + other_token = create_access_token({"sub": other_user.id}) + other_headers = {"Authorization": f"Bearer {other_token}"} + + from api_service.app.main import app + + async with AsyncClient(app=app, base_url="http://test") as client: + response = await client.put( + f"/api/v1/tasks/{task.id}", + json={"is_enabled": False}, + headers=other_headers, + ) + + assert response.status_code == 403 diff --git a/backend/tests/test_auth_service.py b/backend/tests/test_auth_service.py new file mode 100644 index 0000000..c14b0a3 --- /dev/null +++ b/backend/tests/test_auth_service.py @@ -0,0 +1,317 @@ +""" +Tests for auth_service: security utils, AuthService logic, and API endpoints. +Validates tasks 2.1 – 2.3. +""" + +import pytest +import pytest_asyncio +from unittest.mock import patch, AsyncMock +from fastapi import HTTPException + +from shared.models import User +from tests.conftest import TestSessionLocal, FakeRedis + +# Import security utilities +from auth_service.app.utils.security import ( + hash_password, + verify_password, + validate_password_strength, + create_access_token, + decode_access_token, +) +from auth_service.app.services.auth_service import AuthService +from auth_service.app.schemas.user import UserCreate, UserLogin + + +# ===================== Password utilities ===================== + + +class TestPasswordUtils: + + def test_hash_and_verify(self): + raw = "MyP@ssw0rd" + hashed = hash_password(raw) + assert verify_password(raw, hashed) + + def test_wrong_password_rejected(self): + hashed = hash_password("Correct1!") + assert not verify_password("Wrong1!", hashed) + + @pytest.mark.parametrize( + "pwd, expected_valid", + [ + ("Ab1!abcd", True), # meets all criteria + ("short1A!", True), # 8 chars, has upper/lower/digit/special – valid + ("alllower1!", False), # no uppercase + ("ALLUPPER1!", False), # no lowercase + ("NoDigits!Aa", False), # no digit + ("NoSpecial1a", False), # no special char + ], + ) + def test_password_strength(self, pwd, expected_valid): + is_valid, _ = validate_password_strength(pwd) + assert is_valid == expected_valid + + def test_password_too_short(self): + is_valid, msg = validate_password_strength("Ab1!") + assert not is_valid + assert "8 characters" in msg + + +# ===================== JWT utilities ===================== + + +class TestJWT: + + def test_create_and_decode(self): + token = create_access_token({"sub": "user-123", "username": "alice"}) + payload = decode_access_token(token) + assert payload is not None + assert payload["sub"] == "user-123" + + def test_invalid_token_returns_none(self): + assert decode_access_token("not.a.valid.token") is None + + +# ===================== Refresh token helpers (with fake Redis) ===================== + + +class TestRefreshToken: + + @pytest.mark.asyncio + async def test_create_verify_revoke(self, fake_redis): + """Full lifecycle: create → verify → revoke → verify again returns None.""" + + async def _fake_get_redis(): + return fake_redis + + with patch( + "auth_service.app.utils.security.get_redis", + new=_fake_get_redis, + ): + from auth_service.app.utils.security import ( + create_refresh_token, + verify_refresh_token, + revoke_refresh_token, + ) + + token = await create_refresh_token("user-42") + assert isinstance(token, str) and len(token) > 0 + + uid = await verify_refresh_token(token) + assert uid == "user-42" + + await revoke_refresh_token(token) + assert await verify_refresh_token(token) is None + + +# ===================== AuthService business logic ===================== + + +class TestAuthServiceLogic: + + @pytest_asyncio.fixture + async def auth_svc(self, db_session): + return AuthService(db_session) + + @pytest.mark.asyncio + async def test_create_user_success(self, auth_svc, db_session): + data = UserCreate(username="newuser", email="new@example.com", password="Str0ng!Pass") + user = await auth_svc.create_user(data) + assert user.username == "newuser" + assert user.email == "new@example.com" + assert user.hashed_password != "Str0ng!Pass" + + @pytest.mark.asyncio + async def test_create_user_weak_password_rejected(self, auth_svc): + # Use a password that passes Pydantic min_length=8 but fails strength check + data = UserCreate(username="weakuser", email="weak@example.com", password="weakpassword") + with pytest.raises(HTTPException) as exc_info: + await auth_svc.create_user(data) + assert exc_info.value.status_code == 400 + + @pytest.mark.asyncio + async def test_get_user_by_email(self, auth_svc, db_session): + data = UserCreate(username="findme", email="find@example.com", password="Str0ng!Pass") + await auth_svc.create_user(data) + found = await auth_svc.get_user_by_email("find@example.com") + assert found is not None + assert found.username == "findme" + + @pytest.mark.asyncio + async def test_check_user_exists(self, auth_svc, db_session): + data = UserCreate(username="exists", email="exists@example.com", password="Str0ng!Pass") + await auth_svc.create_user(data) + email_u, username_u = await auth_svc.check_user_exists("exists@example.com", "other") + assert email_u is not None + assert username_u is None + + +# ===================== Auth API endpoint tests ===================== + + +class TestAuthAPI: + """Integration tests hitting the FastAPI app via httpx.""" + + @pytest_asyncio.fixture + async def client(self, fake_redis): + """ + Provide an httpx AsyncClient wired to the auth_service app, + with DB session overridden to use the test SQLite engine. + """ + from shared.models import get_db + from auth_service.app.main import app + from httpx import AsyncClient, ASGITransport + from tests.conftest import TEST_ENGINE, TestSessionLocal, Base + + async with TEST_ENGINE.begin() as conn: + await conn.run_sync(Base.metadata.create_all) + + async def override_get_db(): + async with TestSessionLocal() as session: + yield session + + async def _fake_get_redis(): + return fake_redis + + app.dependency_overrides[get_db] = override_get_db + + with patch( + "auth_service.app.utils.security.get_redis", + new=_fake_get_redis, + ): + async with AsyncClient( + transport=ASGITransport(app=app), base_url="http://test" + ) as ac: + yield ac + + app.dependency_overrides.clear() + + @pytest.mark.asyncio + async def test_register_and_login(self, client): + # Register + resp = await client.post("/auth/register", json={ + "username": "apiuser", + "email": "api@example.com", + "password": "Str0ng!Pass1", + }) + assert resp.status_code == 201 + body = resp.json() + assert body["success"] is True + assert body["data"]["username"] == "apiuser" + + # Login + resp = await client.post("/auth/login", json={ + "email": "api@example.com", + "password": "Str0ng!Pass1", + }) + assert resp.status_code == 200 + body = resp.json() + assert body["success"] is True + assert "access_token" in body["data"] + assert "refresh_token" in body["data"] + + @pytest.mark.asyncio + async def test_login_wrong_password(self, client): + await client.post("/auth/register", json={ + "username": "wrongpw", + "email": "wrongpw@example.com", + "password": "Str0ng!Pass1", + }) + resp = await client.post("/auth/login", json={ + "email": "wrongpw@example.com", + "password": "WrongPassword1!", + }) + assert resp.status_code == 401 + + @pytest.mark.asyncio + async def test_register_duplicate_email(self, client): + await client.post("/auth/register", json={ + "username": "dup1", + "email": "dup@example.com", + "password": "Str0ng!Pass1", + }) + resp = await client.post("/auth/register", json={ + "username": "dup2", + "email": "dup@example.com", + "password": "Str0ng!Pass1", + }) + assert resp.status_code == 409 + + @pytest.mark.asyncio + async def test_register_weak_password(self, client): + resp = await client.post("/auth/register", json={ + "username": "weakpwd", + "email": "weakpwd@example.com", + "password": "weakpassword", + }) + assert resp.status_code == 400 + + @pytest.mark.asyncio + async def test_me_endpoint(self, client): + await client.post("/auth/register", json={ + "username": "meuser", + "email": "me@example.com", + "password": "Str0ng!Pass1", + }) + login_resp = await client.post("/auth/login", json={ + "email": "me@example.com", + "password": "Str0ng!Pass1", + }) + token = login_resp.json()["data"]["access_token"] + + resp = await client.get( + "/auth/me", + headers={"Authorization": f"Bearer {token}"}, + ) + assert resp.status_code == 200 + body = resp.json() + assert body["data"]["username"] == "meuser" + assert body["data"]["email"] == "me@example.com" + + @pytest.mark.asyncio + async def test_refresh_endpoint(self, client): + await client.post("/auth/register", json={ + "username": "refreshuser", + "email": "refresh@example.com", + "password": "Str0ng!Pass1", + }) + login_resp = await client.post("/auth/login", json={ + "email": "refresh@example.com", + "password": "Str0ng!Pass1", + }) + refresh_token = login_resp.json()["data"]["refresh_token"] + + # Refresh + resp = await client.post("/auth/refresh", json={ + "refresh_token": refresh_token, + }) + assert resp.status_code == 200 + body = resp.json() + assert body["success"] is True + assert "access_token" in body["data"] + new_refresh = body["data"]["refresh_token"] + assert new_refresh != refresh_token # rotation + + # Old token should be revoked + resp2 = await client.post("/auth/refresh", json={ + "refresh_token": refresh_token, + }) + assert resp2.status_code == 401 + + @pytest.mark.asyncio + async def test_me_without_token(self, client): + resp = await client.get("/auth/me") + assert resp.status_code in (401, 403) + + @pytest.mark.asyncio + async def test_unified_error_format(self, client): + """Verify error responses follow the unified format.""" + resp = await client.post("/auth/login", json={ + "email": "nobody@example.com", + "password": "Whatever1!", + }) + body = resp.json() + assert body["success"] is False + assert body["data"] is None + assert "error" in body diff --git a/backend/tests/test_shared.py b/backend/tests/test_shared.py new file mode 100644 index 0000000..649a6de --- /dev/null +++ b/backend/tests/test_shared.py @@ -0,0 +1,171 @@ +""" +Tests for the shared module: crypto, response format, and ORM models. +Validates tasks 1.1 – 1.5 (excluding optional PBT task 1.4). +""" + +import pytest +import pytest_asyncio +from sqlalchemy import select + +from shared.crypto import derive_key, encrypt_cookie, decrypt_cookie +from shared.response import success_response, error_response +from shared.models import User, Account, Task, SigninLog + +from tests.conftest import TestSessionLocal + + +# ===================== Crypto tests ===================== + + +class TestCrypto: + """Verify AES-256-GCM encrypt/decrypt round-trip and error handling.""" + + def setup_method(self): + self.key = derive_key("test-encryption-key") + + def test_encrypt_decrypt_roundtrip(self): + original = "SUB=abc123; SUBP=xyz789;" + ct, iv = encrypt_cookie(original, self.key) + assert decrypt_cookie(ct, iv, self.key) == original + + def test_different_plaintexts_produce_different_ciphertexts(self): + ct1, _ = encrypt_cookie("cookie_a", self.key) + ct2, _ = encrypt_cookie("cookie_b", self.key) + assert ct1 != ct2 + + def test_wrong_key_raises(self): + ct, iv = encrypt_cookie("secret", self.key) + wrong_key = derive_key("wrong-key") + with pytest.raises(Exception): + decrypt_cookie(ct, iv, wrong_key) + + def test_empty_string_roundtrip(self): + ct, iv = encrypt_cookie("", self.key) + assert decrypt_cookie(ct, iv, self.key) == "" + + def test_unicode_roundtrip(self): + original = "微博Cookie=值; 中文=测试" + ct, iv = encrypt_cookie(original, self.key) + assert decrypt_cookie(ct, iv, self.key) == original + + +# ===================== Response format tests ===================== + + +class TestResponseFormat: + """Verify unified response helpers.""" + + def test_success_response_structure(self): + resp = success_response({"id": 1}, "ok") + assert resp["success"] is True + assert resp["data"] == {"id": 1} + assert resp["message"] == "ok" + + def test_success_response_defaults(self): + resp = success_response() + assert resp["success"] is True + assert resp["data"] is None + assert "Operation successful" in resp["message"] + + def test_error_response_structure(self): + resp = error_response("bad", "VALIDATION_ERROR", [{"field": "email"}], 400) + assert resp.status_code == 400 + import json + body = json.loads(resp.body) + assert body["success"] is False + assert body["data"] is None + assert body["error"]["code"] == "VALIDATION_ERROR" + assert len(body["error"]["details"]) == 1 + + +# ===================== ORM model smoke tests ===================== + + +class TestORMModels: + """Verify ORM models can be created and queried with SQLite.""" + + @pytest.mark.asyncio + async def test_create_user(self, db_session): + user = User( + username="testuser", + email="test@example.com", + hashed_password="hashed", + ) + db_session.add(user) + await db_session.commit() + + result = await db_session.execute(select(User).where(User.username == "testuser")) + fetched = result.scalar_one() + assert fetched.email == "test@example.com" + assert fetched.is_active is True + + @pytest.mark.asyncio + async def test_create_account_linked_to_user(self, db_session): + user = User(username="u1", email="u1@x.com", hashed_password="h") + db_session.add(user) + await db_session.commit() + + acct = Account( + user_id=user.id, + weibo_user_id="12345", + remark="test", + encrypted_cookies="enc", + iv="iv123", + ) + db_session.add(acct) + await db_session.commit() + + result = await db_session.execute(select(Account).where(Account.user_id == user.id)) + fetched = result.scalar_one() + assert fetched.weibo_user_id == "12345" + assert fetched.status == "pending" + + @pytest.mark.asyncio + async def test_create_task_linked_to_account(self, db_session): + user = User(username="u2", email="u2@x.com", hashed_password="h") + db_session.add(user) + await db_session.commit() + + acct = Account( + user_id=user.id, weibo_user_id="99", remark="r", + encrypted_cookies="e", iv="i", + ) + db_session.add(acct) + await db_session.commit() + + task = Task(account_id=acct.id, cron_expression="0 8 * * *") + db_session.add(task) + await db_session.commit() + + result = await db_session.execute(select(Task).where(Task.account_id == acct.id)) + fetched = result.scalar_one() + assert fetched.cron_expression == "0 8 * * *" + assert fetched.is_enabled is True + + @pytest.mark.asyncio + async def test_create_signin_log(self, db_session): + user = User(username="u3", email="u3@x.com", hashed_password="h") + db_session.add(user) + await db_session.commit() + + acct = Account( + user_id=user.id, weibo_user_id="77", remark="r", + encrypted_cookies="e", iv="i", + ) + db_session.add(acct) + await db_session.commit() + + log = SigninLog( + account_id=acct.id, + topic_title="超话A", + status="success", + ) + db_session.add(log) + await db_session.commit() + + result = await db_session.execute( + select(SigninLog).where(SigninLog.account_id == acct.id) + ) + fetched = result.scalar_one() + assert fetched.status == "success" + assert fetched.topic_title == "超话A" diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..1cd75a9 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,170 @@ +version: '3.8' + +services: + # Redis缓存服务 + redis: + image: redis:7-alpine + container_name: weibo-redis + ports: + - "6379:6379" + volumes: + - redis_data:/data + networks: + - weibo-network + + # Nginx反向代理 + nginx: + image: nginx:alpine + container_name: weibo-nginx + ports: + - "80:80" + - "443:443" + volumes: + - ./nginx/nginx.conf:/etc/nginx/nginx.conf + - ./nginx/ssl:/etc/nginx/ssl + depends_on: + - api-gateway + - frontend + networks: + - weibo-network + + # 前端React应用 + frontend: + build: + context: ./frontend + dockerfile: Dockerfile + container_name: weibo-frontend + ports: + - "3000:3000" + environment: + - REACT_APP_API_BASE_URL=http://localhost/api/v1 + depends_on: + - api-gateway + networks: + - weibo-network + + # API网关和主API服务 + api-gateway: + build: + context: ./backend + dockerfile: Dockerfile + target: api_gateway + container_name: weibo-api-gateway + ports: + - "8000:8000" + environment: + - DATABASE_URL=mysql+aiomysql://weibo:123456789@XX.XX.XX.XX/weibo + - REDIS_URL=redis://redis:6379 + - JWT_SECRET_KEY=your-super-secret-jwt-key-here + - ENVIRONMENT=development + depends_on: + - redis + networks: + - weibo-network + + # 认证服务 + auth-service: + build: + context: ./backend + dockerfile: Dockerfile + target: auth_service + container_name: weibo-auth-service + ports: + - "8001:8000" + environment: + - DATABASE_URL=mysql+aiomysql://weibo:123456789@XX.XX.XX.XX/weibo + - JWT_SECRET_KEY=your-super-secret-jwt-key-here + networks: + - weibo-network + + # 任务调度服务 + task-scheduler: + build: + context: ./backend + dockerfile: Dockerfile + target: task_scheduler + container_name: weibo-task-scheduler + environment: + - DATABASE_URL=mysql+aiomysql://weibo:123456789@XX.XX.XX.XX/weibo + - REDIS_URL=redis://redis:6379 + depends_on: + - redis + networks: + - weibo-network + + # 签到执行Worker + signin-executor: + build: + context: ./backend + dockerfile: Dockerfile + target: signin_executor + container_name: weibo-signin-executor + environment: + - DATABASE_URL=mysql+aiomysql://weibo:123456789@XX.XX.XX.XX/weibo + - REDIS_URL=redis://redis:6379 + - PROXY_POOL_URL=http://proxy-pool:8080 + - BROWSER_AUTOMATION_URL=http://browser-automation:3001 + depends_on: + - redis + networks: + - weibo-network + + # 通知中心服务 + notification-hub: + build: + context: ./backend/notification_hub + dockerfile: Dockerfile + container_name: weibo-notification-hub + ports: + - "8002:8000" + environment: + - REDIS_URL=redis://redis:6379 + depends_on: + - redis + networks: + - weibo-network + + # 浏览器自动化服务 + browser-automation: + build: + context: ./backend/browser_automation_service + dockerfile: Dockerfile + container_name: weibo-browser-automation + ports: + - "3001:3000" + environment: + - PLAYWRIGHT_BROWSERS_PATH=/app/browsers + volumes: + - browser_data:/app/browsers + networks: + - weibo-network + + # 代理池管理器 + proxy-pool: + build: + context: ./elk-stack/proxy_pool + dockerfile: Dockerfile + container_name: weibo-proxy-pool + ports: + - "8080:8080" + networks: + - weibo-network + + # 浏览器指纹生成器 + fingerprint-generator: + build: + context: ./browser_fingerprint_generator + dockerfile: Dockerfile + container_name: weibo-fingerprint-generator + ports: + - "8081:8080" + networks: + - weibo-network + +volumes: + redis_data: + browser_data: + +networks: + weibo-network: + driver: bridge diff --git a/init-db.sql b/init-db.sql new file mode 100644 index 0000000..f1d2b79 --- /dev/null +++ b/init-db.sql @@ -0,0 +1,64 @@ +-- Weibo-HotSign Database Initialization Script for MySQL +-- Create tables according to development document specification + +-- Users table +CREATE TABLE IF NOT EXISTS users ( + id CHAR(36) PRIMARY KEY, + username VARCHAR(50) UNIQUE NOT NULL, + email VARCHAR(255) UNIQUE NOT NULL, + hashed_password VARCHAR(255) NOT NULL, + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + is_active BOOLEAN DEFAULT TRUE +); + +-- Accounts table +CREATE TABLE IF NOT EXISTS accounts ( + id CHAR(36) PRIMARY KEY, + user_id CHAR(36) NOT NULL, + weibo_user_id VARCHAR(20) NOT NULL, + remark VARCHAR(100), + encrypted_cookies TEXT NOT NULL, + iv VARCHAR(32) NOT NULL, + status VARCHAR(20) DEFAULT 'pending', + last_checked_at TIMESTAMP NULL, + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE +); + +-- Tasks table +CREATE TABLE IF NOT EXISTS tasks ( + id CHAR(36) PRIMARY KEY, + account_id CHAR(36) NOT NULL, + cron_expression VARCHAR(50) NOT NULL, + is_enabled BOOLEAN DEFAULT TRUE, + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + FOREIGN KEY (account_id) REFERENCES accounts(id) ON DELETE CASCADE +); + +-- Signin logs table +CREATE TABLE IF NOT EXISTS signin_logs ( + id BIGINT AUTO_INCREMENT PRIMARY KEY, + account_id CHAR(36) NOT NULL, + topic_title VARCHAR(100), + status VARCHAR(20) NOT NULL, + reward_info JSON, + error_message TEXT, + signed_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + FOREIGN KEY (account_id) REFERENCES accounts(id) +); + +-- Create indexes for better performance +CREATE INDEX idx_users_email ON users(email); +CREATE INDEX idx_users_username ON users(username); +CREATE INDEX idx_accounts_user_id ON accounts(user_id); +CREATE INDEX idx_accounts_status ON accounts(status); +CREATE INDEX idx_tasks_account_id ON tasks(account_id); +CREATE INDEX idx_tasks_is_enabled ON tasks(is_enabled); +CREATE INDEX idx_signin_logs_account_id ON signin_logs(account_id); +CREATE INDEX idx_signin_logs_signed_at ON signin_logs(signed_at); +CREATE INDEX idx_signin_logs_status ON signin_logs(status); + +-- Insert sample data for testing (optional) +-- Note: UUIDs must be provided by the application +-- INSERT INTO users (id, username, email, hashed_password) VALUES +-- ('your-uuid-here', 'testuser', 'test@example.com', '$2b$12$hashed_password_here'); diff --git a/开发文档.txt b/开发文档.txt new file mode 100644 index 0000000..f78587e --- /dev/null +++ b/开发文档.txt @@ -0,0 +1,292 @@ +好的,这是一份为专业 Coding AI 模型或开发者编写的详细开发文档 (Development Document)。文档遵循常见的软件工程规范,旨在将一个概念转化为可实施的技术蓝图。 + +--- + +## **项目名称: Weibo-HotSign** + +### **1. 项目概述 (Project Overview)** + +#### 1.1. 项目简介 +Weibo-HotSign 是一个分布式的、面向多用户的微博超话智能签到系统。它旨在通过自动化的方式,帮助用户定时、稳定地完成微博超话的签到任务,以获取积分和经验值。项目的核心挑战在于应对微博复杂的反爬虫机制,确保长期运行的稳定性和成功率。 + +#### 1.2. 核心价值主张 +* **多账户管理**: 为用户提供一个集中的平台来管理多个微博账号的签到任务。 +* **高稳定性与反爬虫**: 采用动态IP代理池、浏览器指纹模拟、智能Cookie管理及高级JS逆向(必要时)等技术,最大限度规避微博的风控策略,保障签到任务的成功率。 +* **用户友好**: 提供Web UI进行可视化管理,避免用户直接操作代码或配置文件。 +* **可观测性**: 实时日志、任务历史和状态监控,让用户清晰掌握每个账号的运作情况。 +* **通知系统**: 提供多种渠道的实时通知,让用户第一时间了解任务结果。 + +#### 1.3. 目标用户 +* 拥有多个微博账号,希望自动化完成日常签到的普通用户。 +* 希望通过技术手段研究反爬虫方案的开发者。 +* 需要将此类自动化功能集成到自己系统中的第三方服务(通过API)。 + +--- + +### **2. 系统架构 (System Architecture)** + +本项目采用基于微服务的架构,以实现模块间的解耦、独立部署和水平扩展。 + +#### 2.1. 架构图 +```mermaid +graph TD + subgraph User Interaction Layer + A[Web Frontend (React/Vue)] + end + + subgraph Backend Services Layer + B[API Gateway / Main API Service (FastAPI)] + C[Authentication Service (FastAPI)] + D[Task Scheduler Service (Celery Beat)] + E[Sign-in Executor Worker (Celery Worker)] + F[Notification Hub Service (FastAPI)] + G[Browser Automation Service (Node.js/Python)] + end + + subgraph Core Infrastructure Layer + H[(Message Queue: Redis/RabbitMQ)] + I[(Cache & Session Store: Redis)] + J[(Relational DB: PostgreSQL)] + K[(Proxy Pool Manager)] + L[(Browser Fingerprint Generator)] + end + + subgraph External Systems + M[Weibo.com] + N[User Notification Channels (ServerChan, Email, etc.)] + O[Payment Gateway (Future)] + end + + A -->|HTTPS| B; + B -->|AuthN/AuthZ| C; + A -->|API Calls| B; + + D -->|Publishes Task| H; + E -->|Consumes Task| H; + + E -->|Updates Status| J; + E -->|Reads Config| J; + E -->|Logs Result| J; + + E -->|Stores/Retrieves Session| I; + D -->|Stores Schedule| J; + + E -->|Requests Proxy| K; + K -->|Provides Proxy IP| E; + + E -->|Requests Fingerprint| L; + L -->|Provides Fingerprint| E; + + E -->|Performs Action| M; + G -->|Executes JS & Extracts Data| M; + E -->|Delegates to| G; + + F -->|Sends Notification| N; + + B -->|Authenticated| O; + + style A fill:#D5F5E3 + style B fill:#EBDEF0 + style C fill:#EBDEF0 + style D fill:#EBDEF0 + style E fill:#EBDEF0 + style F fill:#EBDEF0 + style G fill:#EBDEF0 + style H fill:#FDF2E9 + style I fill:#FDF2E9 + style J fill:#FADBD8 + style K fill:#EAFAF1 + style L fill:#EAFAF1 + style M fill:#FDEBD0 + style N fill:#D6EAF8 + style O fill:#EBDEF0 +``` + +#### 2.2. 组件职责描述 +* **Web Frontend**: 负责所有用户交互,包括登录注册、账号管理、任务配置、日志查看等。 +* **API Gateway / Main API Service**: 作为系统的唯一入口,负责请求路由、API组合、限流和初步的请求验证。 +* **Authentication Service**: 独立的认证授权服务,使用JWT或OAuth2.0标准,为所有服务提供统一的身份验证和权限校验。 +* **Task Scheduler Service**: 基于Celery Beat,负责解析用户配置的Cron表达式,并将签到任务发布到消息队列中。 +* **Sign-in Executor Worker**: 核心工作节点,消费消息队列中的任务,执行具体的登录、签到逻辑。此服务可水平扩展以应对高并发。 +* **Notification Hub Service**: 统一处理所有通知请求,并根据用户偏好分发至不同的渠道(Server酱、邮件等)。 +* **Browser Automation Service**: 独立的、无状态的浏览器服务。当遇到复杂的JS加密时,Executor Worker将通过API调用此服务来获取签名后的参数。此服务可使用Playwright或Puppeteer构建,并可独立集群部署。 +* **Message Queue (Redis/RabbitMQ)**: 实现异步和解耦的核心组件。Scheduler生产任务,Executor消费任务。 +* **Cache & Session Store (Redis)**: 存储短期数据,如用户会话、分布式锁、API速率限制计数器、以及所有微博账号的有效Cookies。 +* **Relational DB (PostgreSQL)**: 存储结构化核心业务数据,如用户信息、账号元数据、任务配置、签到历史记录。 +* **Proxy Pool Manager**: (可以是独立进程或一个服务) 负责维护一个高质量的代理IP池,提供健康检查、分配和回收机制。 +* **Browser Fingerprint Generator**: (可以是独立进程或一个库) 负责生成高仿真的浏览器指纹,供Executor Worker在发起请求时使用。 + +--- + +### **3. 核心技术选型 (Technology Stack)** + +| 层级 | 技术组件 | 推荐技术 | 备选技术 | 目的 | +| :--- | :--- | :--- | :--- | :--- | +| **前端** | 框架 | React (Vite) | Vue 3 (Nuxt) | 构建动态、响应式的用户界面 | +| | 状态管理 | Redux Toolkit / Zustand | Pinia | 管理复杂的应用状态 | +| | UI库 | Ant Design / Material-UI | Element Plus | 快速搭建美观的界面 | +| **后端** | Web框架 | Python FastAPI | Node.js Express/NestJS | 高性能、易开发、自动生成API文档 | +| | ASGI服务器 | Uvicorn | Hypercorn | 运行FastAPI应用 | +| | 异步任务 | Celery | RQ (Redis Queue) | 分布式任务队列,处理后台任务 | +| | 消息代理 | Redis | RabbitMQ | Celery的消息中间件 | +| | ORM | SQLAlchemy (async) | Tortoise-ORM | 与PostgreSQL交互 | +| | 数据库 | PostgreSQL | MySQL | 存储核心业务数据,保证事务性和数据完整性 | +| | 缓存 | Redis | Memcached | 高速缓存、会话存储 | +| **基础设施**| 容器化 | Docker | Podman | 环境标准化、简化部署 | +| | 编排 | Docker Compose | Kubernetes (K8s) | 本地开发和生产环境部署 | +| | 反向代理 | Nginx | Traefik/Caddy | SSL终结、负载均衡、静态文件服务 | +| **浏览器自动化**| 库/工具 | Playwright (Python/Node) | Selenium | 模拟浏览器行为、对抗JS加密 | +| **运维** | CI/CD | GitHub Actions | GitLab CI/Jenkins | 自动化测试与部署 | +| | 监控 | Prometheus + Grafana | Zabbix | 系统指标监控 | +| | 日志 | ELK Stack (Elasticsearch, Logstash, Kibana) | Loki + Grafana | 集中式日志收集与分析 | + +--- + +### **4. 模块详细设计 (Detailed Module Design)** + +#### 4.1. 认证与授权模块 (`auth_service`) +* **API端点**: + * `POST /auth/register`: 用户注册。 + * `POST /auth/login`: 用户登录,返回JWT。 + * `POST /auth/refresh`: 刷新JWT。 + * `GET /auth/me`: 获取当前用户信息。 +* **数据库模型 (`users`)**: `id`, `username`, `email`, `hashed_password`, `created_at`, `is_active`。 +* **关键逻辑**: 使用 `bcrypt` 或 `argon2` 哈希密码。JWT包含 `user_id` 和权限声明。所有受保护的API都需通过中间件校验JWT的有效性。 + +#### 4.2. 账号与任务管理模块 (`api_service`) +* **API端点 (示例)**: + * `POST /accounts`: 添加一个微博账号,需提交从前端获取的微博Cookie或通过二维码登录流程绑定的凭证。 + * `GET /accounts`: 获取当前用户的所有微博账号列表及其状态。 + * `PUT /accounts/{account_id}`: 更新账号信息(如备注、启用/禁用状态)。 + * `DELETE /accounts/{account_id}`: 删除一个账号。 + * `POST /tasks`: 为指定账号创建一个签到任务,接收Cron表达式和配置参数。 +* **数据库模型**: + * `accounts`: `id`, `user_id` (FK), `weibo_user_id`, `remark`, `encrypted_cookies`, `status` ('active', 'invalid_cookie', 'banned'), `last_check_time`. + * `tasks`: `id`, `account_id` (FK), `cron_expression`, `is_enabled`, `created_at`. +* **关键逻辑**: 对用户输入的Cookie进行强加密(如AES-256-GCM)后存储。状态`status`由一个独立的后台校验服务定期更新。 + +#### 4.3. 任务调度模块 (`task_scheduler`) +* **技术**: Celery Beat + Redis。 +* **配置**: 在启动时,从数据库`tasks`表中加载所有`is_enabled=True`的任务,并将其注册到Celery Beat的调度器中。 +* **工作流程**: + 1. Beat根据Cron表达式触发任务。 + 2. 任务内容是向消息队列(Redis)发送一条消息,消息体包含`task_id`和`account_id`。 + 3. Worker监听到消息后,调用`Sign-in Executor`的逻辑。 + +#### 4.4. 签到执行模块 (`signin_executor`) +* **技术**: Celery Worker。 +* **工作流程**: + 1. **接收任务**: 从消息队列获取任务。 + 2. **前置检查**: 查询Redis或DB,检查账号`status`是否为`active`。若否,则记录日志并终止。 + 3. **获取资源**: 从`Proxy Pool Manager`获取一个健康代理;从`Fingerprint Generator`获取一个指纹;从Redis解密并获取该账号的`cookies`。 + 4. **执行签到**: + * 调用`login_and_verify`模块检查Cookie有效性。 + * 调用`get_super_topics`获取签到列表。 + * 遍历列表,对每个超话调用`sign_topic`。 + * **反爬策略**: 在请求间引入随机延迟;为每个请求构造独特的Headers(含UA、指纹等);使用代理IP。 + * **JS逆向**: 如果API请求签名失败,向`Browser Automation Service`发起RPC调用,获取签名参数后再重试。 + 5. **结果上报**: 将签到结果(成功、失败、原因、获得积分)写入数据库的`signin_logs`表。 + 6. **清理**: 归还代理IP,更新Redis中的会话状态。 + +#### 4.5. 浏览器自动化模块 (`browser_automation_service`) +* **API端点 (gRPC or REST)**: + * `POST /api/v1/get_signature`: 接收目标URL和必要的上下文,返回一个已签名的请求载荷或Headers。 +* **实现**: 使用Playwright启动一个无头浏览器池。收到请求后,从池中分配一个浏览器上下文,导航至相关页面,注入JS钩子或直接监听网络请求,提取所需数据后关闭上下文并返回结果。 + +--- + +### **5. 数据库设计 (Database Schema)** + +#### 5.1. `users` Table +| Column | Type | Constraints | Description | +| :--- | :--- | :--- | :--- | +| `id` | UUID | PK | Primary Key | +| `username` | VARCHAR(50) | UNIQUE, NOT NULL | Unique username | +| `email` | VARCHAR(255) | UNIQUE, NOT NULL | User's email address | +| `hashed_password` | VARCHAR(255) | NOT NULL | Hashed password | +| `created_at` | TIMESTAMPTZ | DEFAULT NOW() | Account creation time | +| `is_active` | BOOLEAN | DEFAULT TRUE | Whether the user account is active | + +#### 5.2. `accounts` Table +| Column | Type | Constraints | Description | +| :--- | :--- | :--- | :--- | +| `id` | UUID | PK | Primary Key | +| `user_id` | UUID | FK (`users.id`), ON DELETE CASCADE | Owner of the account | +| `weibo_user_id` | VARCHAR(20) | NOT NULL | UID on Weibo platform | +| `remark` | VARCHAR(100) | | User-defined note for the account | +| `encrypted_cookies` | TEXT | NOT NULL | AES-256 encrypted cookie string | +| `iv` | VARCHAR(32) | NOT NULL | Initialization vector for AES-GCM | +| `status` | VARCHAR(20) | DEFAULT 'pending' | 'active', 'invalid_cookie', 'banned' | +| `last_checked_at` | TIMESTAMPTZ | | Last time the cookie was verified | +| `created_at` | TIMESTAMPTZ | DEFAULT NOW() | When the account was added | + +#### 5.3. `tasks` Table +| Column | Type | Constraints | Description | +| :--- | :--- | :--- | :--- | +| `id` | UUID | PK | Primary Key | +| `account_id` | UUID | FK (`accounts.id`), ON DELETE CASCADE | Associated account | +| `cron_expression` | VARCHAR(50) | NOT NULL | Cron expression for scheduling | +| `is_enabled` | BOOLEAN | DEFAULT TRUE | Enable or disable the task | +| `created_at` | TIMESTAMPTZ | DEFAULT NOW() | Task creation time | + +#### 5.4. `signin_logs` Table +| Column | Type | Constraints | Description | +| :--- | :--- | :--- | :--- | +| `id` | BIGSERIAL | PK | Primary Key | +| `account_id` | UUID | FK (`accounts.id`) | Associated account | +| `topic_title` | VARCHAR(100) | | Title of the signed super topic | +| `status` | VARCHAR(20) | NOT NULL | 'success', 'failed_already_signed', 'failed_network', 'failed_banned' | +| `reward_info` | JSONB | | Details about rewards, e.g., `{"exp": 2, "credit": 1}` | +| `error_message` | TEXT | | Error details if status is 'failed' | +| `signed_at` | TIMESTAMPTZ | DEFAULT NOW() | Timestamp of the sign-in attempt | + +--- + +### **6. API 设计规范 (API Design Specification)** + +* **协议**: 全站使用 HTTPS。 +* **数据格式**: 请求和响应体均使用 `application/json`。 +* **认证方式**: Bearer Token (JWT)。需要在请求头的 `Authorization` 字段中携带:`Authorization: Bearer `。 +* **版本控制**: URL路径中包含版本号,例如 `/api/v1/auth/login`。 +* **通用响应结构**: + ```json + { + "success": true, + "data": { ... }, // 成功时返回的数据 + "message": "Operation successful.", // 可选的提示信息 + "error": null // 失败时返回的错误对象 + } + ``` + * 失败时 (HTTP状态码 >= 400): + ```json + { + "success": false, + "data": null, + "message": "Validation failed.", + "error": { + "code": "VALIDATION_ERROR", + "details": [ + {"field": "email", "message": "Invalid email format."} + ] + } + } + ``` +* **状态码**: 严格遵守 RESTful 规范。200 (OK), 201 (Created), 400 (Bad Request), 401 (Unauthorized), 403 (Forbidden), 404 (Not Found), 409 (Conflict), 500 (Internal Server Error)。 + +--- + +### **7. 部署与运维 (Deployment & Operations)** + +* **开发环境**: 使用 `docker-compose.yml` 定义所有服务(API, DB, Redis, Frontend Dev Server),实现一键启动。 +* **生产环境**: 使用 `Kubernetes` 进行编排。编写 `Deployment.yaml`, `Service.yaml`, `Ingress.yaml` 等清单文件。 +* **CI/CD Pipeline (GitHub Actions Example)**: + 1. **On Push to `main`**: Trigger workflow. + 2. **Lint & Test**: Run code linters (Black, Flake8) and unit/integration tests. + 3. **Build**: Build Docker images for all services and push them to a container registry (e.g., Docker Hub, AWS ECR). + 4. **Deploy**: Use `kubectl` or ArgoCD to apply the new Kubernetes manifests to the production cluster. +* **监控与告警**: + * **Metrics**: Expose metrics via `prometheus_client` in Python services. Monitor queue length, task success/failure rates, API latency. + * **Alerts**: Configure Alertmanager to trigger alerts on Slack/PagerDuty for critical issues (e.g., high task failure rate > 10%, worker node down). + * **Logging**: Configure all services to output structured logs (JSON). Use Filebeat or Fluentd to ship logs to Elasticsearch. Use Kibana for visualization and querying. + +这份文档为专业的 Coding AI 或开发团队提供了从宏观架构到微观实现的全面指导。开发过程应遵循此文档,并可根据实际情况进行迭代和优化。 \ No newline at end of file