123
This commit is contained in:
91
backend/Dockerfile
Normal file
91
backend/Dockerfile
Normal file
@@ -0,0 +1,91 @@
|
||||
# Base stage for all Python services
|
||||
FROM python:3.11-slim AS base
|
||||
|
||||
# Set working directory
|
||||
WORKDIR /app
|
||||
|
||||
# Install common system dependencies for MySQL
|
||||
RUN apt-get update && apt-get install -y \
|
||||
gcc \
|
||||
default-libmysqlclient-dev \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Copy and install unified requirements
|
||||
COPY requirements.txt .
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
# Create non-root user for security
|
||||
RUN groupadd -r appuser && useradd -r -g appuser appuser
|
||||
|
||||
|
||||
# --- API Gateway Service Stage ---
|
||||
FROM base AS api_gateway
|
||||
|
||||
# Copy application code
|
||||
COPY api_service/app/ ./app/
|
||||
|
||||
# Switch to non-root user
|
||||
USER appuser
|
||||
|
||||
# Expose port
|
||||
EXPOSE 8000
|
||||
|
||||
# Health check
|
||||
HEALTHCHECK --interval=30s --timeout=30s --start-period=5s --retries=3 \
|
||||
CMD curl -f http://localhost:8000/health || exit 1
|
||||
|
||||
# Start application
|
||||
CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"]
|
||||
|
||||
|
||||
# --- Auth Service Stage ---
|
||||
FROM base AS auth_service
|
||||
|
||||
# Copy application code
|
||||
COPY auth_service/app/ ./app/
|
||||
|
||||
# Switch to non-root user
|
||||
USER appuser
|
||||
|
||||
# Expose port
|
||||
EXPOSE 8000
|
||||
|
||||
# Health check
|
||||
HEALTHCHECK --interval=30s --timeout=30s --start-period=5s --retries=3 \
|
||||
CMD curl -f http://localhost:8000/health || exit 1
|
||||
|
||||
# Start application
|
||||
CMD ["python", "-m", "app.main"]
|
||||
|
||||
|
||||
# --- Task Scheduler Service Stage ---
|
||||
FROM base AS task_scheduler
|
||||
|
||||
# Copy application code
|
||||
COPY task_scheduler/app/ ./app/
|
||||
|
||||
# Switch to non-root user
|
||||
USER appuser
|
||||
|
||||
# Start Celery Beat scheduler
|
||||
CMD ["celery", "-A", "app.celery_app", "beat", "--loglevel=info"]
|
||||
|
||||
|
||||
# --- Sign-in Executor Service Stage ---
|
||||
FROM base AS signin_executor
|
||||
|
||||
# Copy application code
|
||||
COPY signin_executor/app/ ./app/
|
||||
|
||||
# Switch to non-root user
|
||||
USER appuser
|
||||
|
||||
# Expose port
|
||||
EXPOSE 8000
|
||||
|
||||
# Health check
|
||||
HEALTHCHECK --interval=30s --timeout=30s --start-period=5s --retries=3 \
|
||||
CMD curl -f http://localhost:8000/health || exit 1
|
||||
|
||||
# Start application
|
||||
CMD ["python", "-m", "app.main"]
|
||||
0
backend/api_service/__init__.py
Normal file
0
backend/api_service/__init__.py
Normal file
BIN
backend/api_service/__pycache__/__init__.cpython-311.pyc
Normal file
BIN
backend/api_service/__pycache__/__init__.cpython-311.pyc
Normal file
Binary file not shown.
0
backend/api_service/app/__init__.py
Normal file
0
backend/api_service/app/__init__.py
Normal file
BIN
backend/api_service/app/__pycache__/__init__.cpython-311.pyc
Normal file
BIN
backend/api_service/app/__pycache__/__init__.cpython-311.pyc
Normal file
Binary file not shown.
BIN
backend/api_service/app/__pycache__/dependencies.cpython-311.pyc
Normal file
BIN
backend/api_service/app/__pycache__/dependencies.cpython-311.pyc
Normal file
Binary file not shown.
BIN
backend/api_service/app/__pycache__/main.cpython-311.pyc
Normal file
BIN
backend/api_service/app/__pycache__/main.cpython-311.pyc
Normal file
Binary file not shown.
9
backend/api_service/app/config.py
Normal file
9
backend/api_service/app/config.py
Normal file
@@ -0,0 +1,9 @@
|
||||
"""
|
||||
Configuration settings for API Service.
|
||||
Re-uses shared settings; add API-specific overrides here if needed.
|
||||
"""
|
||||
|
||||
from shared.config import shared_settings
|
||||
|
||||
APP_NAME = "Weibo-HotSign API Service"
|
||||
APP_VERSION = "1.0.0"
|
||||
50
backend/api_service/app/dependencies.py
Normal file
50
backend/api_service/app/dependencies.py
Normal file
@@ -0,0 +1,50 @@
|
||||
"""
|
||||
Shared dependencies for API Service routes.
|
||||
Provides JWT-based authentication via get_current_user.
|
||||
"""
|
||||
|
||||
from fastapi import Depends, HTTPException, Security, status
|
||||
from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select
|
||||
|
||||
from shared.models import get_db, User
|
||||
from auth_service.app.utils.security import decode_access_token
|
||||
|
||||
security = HTTPBearer()
|
||||
|
||||
|
||||
async def get_current_user(
|
||||
credentials: HTTPAuthorizationCredentials = Security(security),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
) -> User:
|
||||
"""Validate JWT and return the current User ORM instance."""
|
||||
payload = decode_access_token(credentials.credentials)
|
||||
if payload is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Invalid or expired token",
|
||||
)
|
||||
|
||||
user_id = payload.get("sub")
|
||||
if not user_id:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Invalid token payload",
|
||||
)
|
||||
|
||||
result = await db.execute(select(User).where(User.id == user_id))
|
||||
user = result.scalar_one_or_none()
|
||||
|
||||
if user is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="User not found",
|
||||
)
|
||||
if not user.is_active:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail="User account is deactivated",
|
||||
)
|
||||
|
||||
return user
|
||||
75
backend/api_service/app/main.py
Normal file
75
backend/api_service/app/main.py
Normal file
@@ -0,0 +1,75 @@
|
||||
"""
|
||||
Weibo-HotSign API Service
|
||||
Main FastAPI application entry point — account management, task config, signin logs.
|
||||
"""
|
||||
|
||||
from fastapi import FastAPI, Request
|
||||
from fastapi.exceptions import RequestValidationError
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from starlette.exceptions import HTTPException as StarletteHTTPException
|
||||
|
||||
from shared.response import success_response, error_response
|
||||
from api_service.app.routers import accounts, tasks, signin_logs
|
||||
|
||||
app = FastAPI(
|
||||
title="Weibo-HotSign API Service",
|
||||
version="1.0.0",
|
||||
docs_url="/docs",
|
||||
redoc_url="/redoc",
|
||||
)
|
||||
|
||||
# CORS
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=["http://localhost:3000", "http://localhost:80"],
|
||||
allow_credentials=True,
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
)
|
||||
|
||||
|
||||
# ---- Global exception handlers (unified response format) ----
|
||||
|
||||
@app.exception_handler(StarletteHTTPException)
|
||||
async def http_exception_handler(request: Request, exc: StarletteHTTPException):
|
||||
return error_response(
|
||||
exc.detail,
|
||||
f"HTTP_{exc.status_code}",
|
||||
status_code=exc.status_code,
|
||||
)
|
||||
|
||||
|
||||
@app.exception_handler(RequestValidationError)
|
||||
async def validation_exception_handler(request: Request, exc: RequestValidationError):
|
||||
details = [
|
||||
{"field": e["loc"][-1] if e["loc"] else "unknown", "message": e["msg"]}
|
||||
for e in exc.errors()
|
||||
]
|
||||
return error_response(
|
||||
"Validation failed",
|
||||
"VALIDATION_ERROR",
|
||||
details=details,
|
||||
status_code=400,
|
||||
)
|
||||
|
||||
|
||||
# ---- Routers ----
|
||||
|
||||
app.include_router(accounts.router)
|
||||
app.include_router(tasks.router)
|
||||
app.include_router(signin_logs.router)
|
||||
|
||||
|
||||
# ---- Health / root ----
|
||||
|
||||
@app.get("/")
|
||||
async def root():
|
||||
return success_response(
|
||||
{"service": "Weibo-HotSign API Service", "version": "1.0.0"},
|
||||
"Service is running",
|
||||
)
|
||||
|
||||
|
||||
@app.get("/health")
|
||||
async def health_check():
|
||||
return success_response({"status": "healthy"})
|
||||
0
backend/api_service/app/routers/__init__.py
Normal file
0
backend/api_service/app/routers/__init__.py
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
139
backend/api_service/app/routers/accounts.py
Normal file
139
backend/api_service/app/routers/accounts.py
Normal file
@@ -0,0 +1,139 @@
|
||||
"""
|
||||
Weibo Account CRUD router.
|
||||
All endpoints require JWT authentication and enforce resource ownership.
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, status
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.orm import selectinload
|
||||
|
||||
from shared.models import get_db, Account, User
|
||||
from shared.crypto import encrypt_cookie, decrypt_cookie, derive_key
|
||||
from shared.config import shared_settings
|
||||
from shared.response import success_response, error_response
|
||||
from api_service.app.dependencies import get_current_user
|
||||
from api_service.app.schemas.account import (
|
||||
AccountCreate,
|
||||
AccountUpdate,
|
||||
AccountResponse,
|
||||
)
|
||||
|
||||
router = APIRouter(prefix="/api/v1/accounts", tags=["accounts"])
|
||||
|
||||
|
||||
def _encryption_key() -> bytes:
|
||||
return derive_key(shared_settings.COOKIE_ENCRYPTION_KEY)
|
||||
|
||||
|
||||
def _account_to_dict(account: Account) -> dict:
|
||||
return AccountResponse.model_validate(account).model_dump(mode="json")
|
||||
|
||||
|
||||
async def _get_owned_account(
|
||||
account_id: str,
|
||||
user: User,
|
||||
db: AsyncSession,
|
||||
) -> Account:
|
||||
"""Fetch an account and verify it belongs to the current user."""
|
||||
result = await db.execute(select(Account).where(Account.id == account_id))
|
||||
account = result.scalar_one_or_none()
|
||||
if account is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Account not found")
|
||||
if account.user_id != user.id:
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Access denied")
|
||||
return account
|
||||
|
||||
|
||||
# ---- CREATE ----
|
||||
|
||||
@router.post("", status_code=status.HTTP_201_CREATED)
|
||||
async def create_account(
|
||||
body: AccountCreate,
|
||||
user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
key = _encryption_key()
|
||||
ciphertext, iv = encrypt_cookie(body.cookie, key)
|
||||
|
||||
account = Account(
|
||||
user_id=user.id,
|
||||
weibo_user_id=body.weibo_user_id,
|
||||
remark=body.remark,
|
||||
encrypted_cookies=ciphertext,
|
||||
iv=iv,
|
||||
status="pending",
|
||||
)
|
||||
db.add(account)
|
||||
await db.commit()
|
||||
await db.refresh(account)
|
||||
|
||||
return success_response(_account_to_dict(account), "Account created")
|
||||
|
||||
|
||||
# ---- LIST ----
|
||||
|
||||
@router.get("")
|
||||
async def list_accounts(
|
||||
user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
result = await db.execute(
|
||||
select(Account).where(Account.user_id == user.id)
|
||||
)
|
||||
accounts = result.scalars().all()
|
||||
return success_response(
|
||||
[_account_to_dict(a) for a in accounts],
|
||||
"Accounts retrieved",
|
||||
)
|
||||
|
||||
|
||||
# ---- DETAIL ----
|
||||
|
||||
@router.get("/{account_id}")
|
||||
async def get_account(
|
||||
account_id: str,
|
||||
user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
account = await _get_owned_account(account_id, user, db)
|
||||
return success_response(_account_to_dict(account), "Account retrieved")
|
||||
|
||||
|
||||
# ---- UPDATE ----
|
||||
|
||||
@router.put("/{account_id}")
|
||||
async def update_account(
|
||||
account_id: str,
|
||||
body: AccountUpdate,
|
||||
user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
account = await _get_owned_account(account_id, user, db)
|
||||
|
||||
if body.remark is not None:
|
||||
account.remark = body.remark
|
||||
|
||||
if body.cookie is not None:
|
||||
key = _encryption_key()
|
||||
ciphertext, iv = encrypt_cookie(body.cookie, key)
|
||||
account.encrypted_cookies = ciphertext
|
||||
account.iv = iv
|
||||
|
||||
await db.commit()
|
||||
await db.refresh(account)
|
||||
return success_response(_account_to_dict(account), "Account updated")
|
||||
|
||||
|
||||
# ---- DELETE ----
|
||||
|
||||
@router.delete("/{account_id}")
|
||||
async def delete_account(
|
||||
account_id: str,
|
||||
user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
account = await _get_owned_account(account_id, user, db)
|
||||
await db.delete(account)
|
||||
await db.commit()
|
||||
return success_response(None, "Account deleted")
|
||||
83
backend/api_service/app/routers/signin_logs.py
Normal file
83
backend/api_service/app/routers/signin_logs.py
Normal file
@@ -0,0 +1,83 @@
|
||||
"""
|
||||
Signin Log query router.
|
||||
All endpoints require JWT authentication and enforce resource ownership.
|
||||
"""
|
||||
|
||||
from typing import Optional
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query, status
|
||||
from sqlalchemy import select, func
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from shared.models import get_db, Account, SigninLog, User
|
||||
from shared.response import success_response
|
||||
from api_service.app.dependencies import get_current_user
|
||||
from api_service.app.schemas.signin_log import SigninLogResponse, PaginatedResponse
|
||||
|
||||
router = APIRouter(prefix="/api/v1/accounts", tags=["signin-logs"])
|
||||
|
||||
|
||||
async def _verify_account_ownership(
|
||||
account_id: str,
|
||||
user: User,
|
||||
db: AsyncSession,
|
||||
) -> Account:
|
||||
"""Verify that the account belongs to the current user."""
|
||||
result = await db.execute(select(Account).where(Account.id == account_id))
|
||||
account = result.scalar_one_or_none()
|
||||
if account is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Account not found")
|
||||
if account.user_id != user.id:
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Access denied")
|
||||
return account
|
||||
|
||||
|
||||
@router.get("/{account_id}/signin-logs")
|
||||
async def get_signin_logs(
|
||||
account_id: str,
|
||||
page: int = Query(1, ge=1, description="Page number (starts from 1)"),
|
||||
size: int = Query(20, ge=1, le=100, description="Page size (max 100)"),
|
||||
status_filter: Optional[str] = Query(None, alias="status", description="Filter by status"),
|
||||
user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""
|
||||
Query signin logs for a specific account with pagination and status filtering.
|
||||
Returns logs sorted by signed_at in descending order (newest first).
|
||||
"""
|
||||
# Verify account ownership
|
||||
await _verify_account_ownership(account_id, user, db)
|
||||
|
||||
# Build base query
|
||||
query = select(SigninLog).where(SigninLog.account_id == account_id)
|
||||
|
||||
# Apply status filter if provided
|
||||
if status_filter:
|
||||
query = query.where(SigninLog.status == status_filter)
|
||||
|
||||
# Get total count
|
||||
count_query = select(func.count()).select_from(query.subquery())
|
||||
total_result = await db.execute(count_query)
|
||||
total = total_result.scalar()
|
||||
|
||||
# Apply ordering and pagination
|
||||
query = query.order_by(SigninLog.signed_at.desc())
|
||||
offset = (page - 1) * size
|
||||
query = query.offset(offset).limit(size)
|
||||
|
||||
# Execute query
|
||||
result = await db.execute(query)
|
||||
logs = result.scalars().all()
|
||||
|
||||
# Calculate total pages
|
||||
total_pages = (total + size - 1) // size if total > 0 else 0
|
||||
|
||||
# Build response
|
||||
paginated = PaginatedResponse(
|
||||
items=[SigninLogResponse.model_validate(log) for log in logs],
|
||||
total=total,
|
||||
page=page,
|
||||
size=size,
|
||||
total_pages=total_pages,
|
||||
)
|
||||
|
||||
return success_response(paginated.model_dump(mode="json"), "Signin logs retrieved")
|
||||
196
backend/api_service/app/routers/tasks.py
Normal file
196
backend/api_service/app/routers/tasks.py
Normal file
@@ -0,0 +1,196 @@
|
||||
"""
|
||||
Signin Task CRUD router.
|
||||
All endpoints require JWT authentication and enforce resource ownership.
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, status
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from croniter import croniter
|
||||
import redis.asyncio as aioredis
|
||||
import json
|
||||
|
||||
from shared.models import get_db, Account, Task, User
|
||||
from shared.config import shared_settings
|
||||
from shared.response import success_response
|
||||
from api_service.app.dependencies import get_current_user
|
||||
from api_service.app.schemas.task import (
|
||||
TaskCreate,
|
||||
TaskUpdate,
|
||||
TaskResponse,
|
||||
)
|
||||
|
||||
router = APIRouter(prefix="/api/v1", tags=["tasks"])
|
||||
|
||||
|
||||
def _task_to_dict(task: Task) -> dict:
|
||||
return TaskResponse.model_validate(task).model_dump(mode="json")
|
||||
|
||||
|
||||
async def _get_owned_account(
|
||||
account_id: str,
|
||||
user: User,
|
||||
db: AsyncSession,
|
||||
) -> Account:
|
||||
"""Fetch an account and verify it belongs to the current user."""
|
||||
result = await db.execute(select(Account).where(Account.id == account_id))
|
||||
account = result.scalar_one_or_none()
|
||||
if account is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Account not found")
|
||||
if account.user_id != user.id:
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Access denied")
|
||||
return account
|
||||
|
||||
|
||||
async def _get_owned_task(
|
||||
task_id: str,
|
||||
user: User,
|
||||
db: AsyncSession,
|
||||
) -> Task:
|
||||
"""Fetch a task and verify it belongs to the current user."""
|
||||
from sqlalchemy.orm import selectinload
|
||||
|
||||
result = await db.execute(
|
||||
select(Task)
|
||||
.options(selectinload(Task.account))
|
||||
.where(Task.id == task_id)
|
||||
)
|
||||
task = result.scalar_one_or_none()
|
||||
if task is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Task not found")
|
||||
|
||||
# Verify ownership through account
|
||||
if task.account.user_id != user.id:
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Access denied")
|
||||
return task
|
||||
|
||||
|
||||
def _validate_cron_expression(cron_expr: str) -> None:
|
||||
"""Validate cron expression format using croniter."""
|
||||
try:
|
||||
croniter(cron_expr)
|
||||
except (ValueError, KeyError) as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=f"Invalid cron expression: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
async def _notify_scheduler(action: str, task_data: dict) -> None:
|
||||
"""Notify Task_Scheduler via Redis pub/sub about task changes."""
|
||||
try:
|
||||
redis_client = aioredis.from_url(
|
||||
shared_settings.REDIS_URL,
|
||||
encoding="utf-8",
|
||||
decode_responses=True
|
||||
)
|
||||
message = {
|
||||
"action": action, # "create", "update", "delete"
|
||||
"task": task_data
|
||||
}
|
||||
await redis_client.publish("task_updates", json.dumps(message))
|
||||
await redis_client.close()
|
||||
except Exception as e:
|
||||
# Log but don't fail the request if notification fails
|
||||
print(f"Warning: Failed to notify scheduler: {e}")
|
||||
|
||||
|
||||
# ---- CREATE TASK ----
|
||||
|
||||
@router.post("/accounts/{account_id}/tasks", status_code=status.HTTP_201_CREATED)
|
||||
async def create_task(
|
||||
account_id: str,
|
||||
body: TaskCreate,
|
||||
user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""Create a new signin task for the specified account."""
|
||||
# Verify account ownership
|
||||
account = await _get_owned_account(account_id, user, db)
|
||||
|
||||
# Validate cron expression
|
||||
_validate_cron_expression(body.cron_expression)
|
||||
|
||||
# Create task
|
||||
task = Task(
|
||||
account_id=account.id,
|
||||
cron_expression=body.cron_expression,
|
||||
is_enabled=True,
|
||||
)
|
||||
db.add(task)
|
||||
await db.commit()
|
||||
await db.refresh(task)
|
||||
|
||||
# Notify scheduler
|
||||
await _notify_scheduler("create", _task_to_dict(task))
|
||||
|
||||
return success_response(_task_to_dict(task), "Task created")
|
||||
|
||||
|
||||
# ---- LIST TASKS FOR ACCOUNT ----
|
||||
|
||||
@router.get("/accounts/{account_id}/tasks")
|
||||
async def list_tasks(
|
||||
account_id: str,
|
||||
user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""Get all tasks for the specified account."""
|
||||
# Verify account ownership
|
||||
account = await _get_owned_account(account_id, user, db)
|
||||
|
||||
# Fetch tasks
|
||||
result = await db.execute(
|
||||
select(Task).where(Task.account_id == account.id)
|
||||
)
|
||||
tasks = result.scalars().all()
|
||||
|
||||
return success_response(
|
||||
[_task_to_dict(t) for t in tasks],
|
||||
"Tasks retrieved",
|
||||
)
|
||||
|
||||
|
||||
# ---- UPDATE TASK ----
|
||||
|
||||
@router.put("/tasks/{task_id}")
|
||||
async def update_task(
|
||||
task_id: str,
|
||||
body: TaskUpdate,
|
||||
user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""Update task (enable/disable)."""
|
||||
task = await _get_owned_task(task_id, user, db)
|
||||
|
||||
if body.is_enabled is not None:
|
||||
task.is_enabled = body.is_enabled
|
||||
|
||||
await db.commit()
|
||||
await db.refresh(task)
|
||||
|
||||
# Notify scheduler
|
||||
await _notify_scheduler("update", _task_to_dict(task))
|
||||
|
||||
return success_response(_task_to_dict(task), "Task updated")
|
||||
|
||||
|
||||
# ---- DELETE TASK ----
|
||||
|
||||
@router.delete("/tasks/{task_id}")
|
||||
async def delete_task(
|
||||
task_id: str,
|
||||
user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""Delete a task."""
|
||||
task = await _get_owned_task(task_id, user, db)
|
||||
task_data = _task_to_dict(task)
|
||||
|
||||
await db.delete(task)
|
||||
await db.commit()
|
||||
|
||||
# Notify scheduler
|
||||
await _notify_scheduler("delete", task_data)
|
||||
|
||||
return success_response(None, "Task deleted")
|
||||
0
backend/api_service/app/schemas/__init__.py
Normal file
0
backend/api_service/app/schemas/__init__.py
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
BIN
backend/api_service/app/schemas/__pycache__/task.cpython-311.pyc
Normal file
BIN
backend/api_service/app/schemas/__pycache__/task.cpython-311.pyc
Normal file
Binary file not shown.
34
backend/api_service/app/schemas/account.py
Normal file
34
backend/api_service/app/schemas/account.py
Normal file
@@ -0,0 +1,34 @@
|
||||
"""
|
||||
Pydantic schemas for Weibo Account CRUD operations.
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class AccountCreate(BaseModel):
|
||||
"""Request body for creating a new Weibo account."""
|
||||
weibo_user_id: str = Field(..., min_length=1, max_length=20, description="Weibo user ID")
|
||||
cookie: str = Field(..., min_length=1, description="Raw Weibo cookie string")
|
||||
remark: Optional[str] = Field(None, max_length=100, description="Optional note")
|
||||
|
||||
|
||||
class AccountUpdate(BaseModel):
|
||||
"""Request body for updating an existing Weibo account."""
|
||||
cookie: Optional[str] = Field(None, min_length=1, description="New cookie (will be re-encrypted)")
|
||||
remark: Optional[str] = Field(None, max_length=100, description="Updated note")
|
||||
|
||||
|
||||
class AccountResponse(BaseModel):
|
||||
"""Public representation of a Weibo account (no cookie plaintext)."""
|
||||
id: str
|
||||
user_id: str
|
||||
weibo_user_id: str
|
||||
remark: Optional[str]
|
||||
status: str
|
||||
last_checked_at: Optional[datetime]
|
||||
created_at: Optional[datetime]
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
30
backend/api_service/app/schemas/signin_log.py
Normal file
30
backend/api_service/app/schemas/signin_log.py
Normal file
@@ -0,0 +1,30 @@
|
||||
"""
|
||||
Pydantic schemas for Signin Log query operations.
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Optional, List, Any, Dict
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class SigninLogResponse(BaseModel):
|
||||
"""Public representation of a signin log entry."""
|
||||
id: int
|
||||
account_id: str
|
||||
topic_title: Optional[str]
|
||||
status: str
|
||||
reward_info: Optional[Any]
|
||||
error_message: Optional[str]
|
||||
signed_at: datetime
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
class PaginatedResponse(BaseModel):
|
||||
"""Paginated response wrapper for signin logs."""
|
||||
items: List[SigninLogResponse]
|
||||
total: int
|
||||
page: int
|
||||
size: int
|
||||
total_pages: int
|
||||
29
backend/api_service/app/schemas/task.py
Normal file
29
backend/api_service/app/schemas/task.py
Normal file
@@ -0,0 +1,29 @@
|
||||
"""
|
||||
Pydantic schemas for Task CRUD operations.
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class TaskCreate(BaseModel):
|
||||
"""Request body for creating a new signin task."""
|
||||
cron_expression: str = Field(..., min_length=1, max_length=50, description="Cron expression for scheduling")
|
||||
|
||||
|
||||
class TaskUpdate(BaseModel):
|
||||
"""Request body for updating an existing task."""
|
||||
is_enabled: Optional[bool] = Field(None, description="Enable or disable the task")
|
||||
|
||||
|
||||
class TaskResponse(BaseModel):
|
||||
"""Public representation of a signin task."""
|
||||
id: str
|
||||
account_id: str
|
||||
cron_expression: str
|
||||
is_enabled: bool
|
||||
created_at: Optional[datetime]
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
34
backend/auth_service/Dockerfile
Normal file
34
backend/auth_service/Dockerfile
Normal file
@@ -0,0 +1,34 @@
|
||||
# Weibo-HotSign Authentication Service Dockerfile
|
||||
FROM python:3.11-slim
|
||||
|
||||
# Set working directory
|
||||
WORKDIR /app
|
||||
|
||||
# Install system dependencies
|
||||
RUN apt-get update && apt-get install -y \
|
||||
gcc \
|
||||
default-libmysqlclient-dev \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Copy requirements first for better caching
|
||||
COPY ../requirements.txt .
|
||||
|
||||
# Install Python dependencies
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
# Copy application code
|
||||
COPY app/ ./app/
|
||||
|
||||
# Create non-root user for security
|
||||
RUN groupadd -r appuser && useradd -r -g appuser appuser
|
||||
USER appuser
|
||||
|
||||
# Expose port
|
||||
EXPOSE 8000
|
||||
|
||||
# Health check
|
||||
HEALTHCHECK --interval=30s --timeout=30s --start-period=5s --retries=3 \
|
||||
CMD curl -f http://localhost:8000/health || exit 1
|
||||
|
||||
# Start application
|
||||
CMD ["python", "-m", "app.main"]
|
||||
0
backend/auth_service/__init__.py
Normal file
0
backend/auth_service/__init__.py
Normal file
BIN
backend/auth_service/__pycache__/__init__.cpython-311.pyc
Normal file
BIN
backend/auth_service/__pycache__/__init__.cpython-311.pyc
Normal file
Binary file not shown.
0
backend/auth_service/app/__init__.py
Normal file
0
backend/auth_service/app/__init__.py
Normal file
BIN
backend/auth_service/app/__pycache__/__init__.cpython-311.pyc
Normal file
BIN
backend/auth_service/app/__pycache__/__init__.cpython-311.pyc
Normal file
Binary file not shown.
BIN
backend/auth_service/app/__pycache__/main.cpython-311.pyc
Normal file
BIN
backend/auth_service/app/__pycache__/main.cpython-311.pyc
Normal file
Binary file not shown.
50
backend/auth_service/app/config.py
Normal file
50
backend/auth_service/app/config.py
Normal file
@@ -0,0 +1,50 @@
|
||||
"""
|
||||
Configuration settings for Authentication Service
|
||||
Loads environment variables and provides configuration object
|
||||
"""
|
||||
|
||||
import os
|
||||
from pydantic_settings import BaseSettings
|
||||
from typing import Optional
|
||||
|
||||
class Settings(BaseSettings):
|
||||
"""Application settings using Pydantic BaseSettings"""
|
||||
|
||||
# Database settings
|
||||
DATABASE_URL: str = os.getenv(
|
||||
"DATABASE_URL",
|
||||
# If DATABASE_URL is not set, raise an error to force proper configuration
|
||||
# For development, you can create a .env file with DATABASE_URL=mysql+aiomysql://user:password@host/dbname
|
||||
)
|
||||
|
||||
# JWT settings
|
||||
JWT_SECRET_KEY: str = os.getenv(
|
||||
"JWT_SECRET_KEY",
|
||||
# If JWT_SECRET_KEY is not set, raise an error to force proper configuration
|
||||
# For development, you can create a .env file with JWT_SECRET_KEY=your-secret-key
|
||||
)
|
||||
JWT_ALGORITHM: str = "HS256"
|
||||
JWT_EXPIRATION_HOURS: int = 24
|
||||
|
||||
# Security settings
|
||||
BCRYPT_ROUNDS: int = 12
|
||||
|
||||
# Application settings
|
||||
APP_NAME: str = "Weibo-HotSign Authentication Service"
|
||||
DEBUG: bool = os.getenv("DEBUG", "False").lower() == "true"
|
||||
HOST: str = os.getenv("HOST", "0.0.0.0")
|
||||
PORT: int = int(os.getenv("PORT", 8000))
|
||||
|
||||
# CORS settings
|
||||
ALLOWED_ORIGINS: list = [
|
||||
"http://localhost:3000",
|
||||
"http://localhost:80",
|
||||
"http://127.0.0.1:3000"
|
||||
]
|
||||
|
||||
class Config:
|
||||
case_sensitive = True
|
||||
env_file = ".env"
|
||||
|
||||
# Create global settings instance
|
||||
settings = Settings()
|
||||
223
backend/auth_service/app/main.py
Normal file
223
backend/auth_service/app/main.py
Normal file
@@ -0,0 +1,223 @@
|
||||
"""
|
||||
Weibo-HotSign Authentication Service
|
||||
Main FastAPI application entry point
|
||||
"""
|
||||
|
||||
from fastapi import FastAPI, Depends, HTTPException, status, Security
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select
|
||||
import uvicorn
|
||||
import os
|
||||
import logging
|
||||
|
||||
from shared.models import get_db, User
|
||||
from auth_service.app.models.database import create_tables
|
||||
from auth_service.app.schemas.user import (
|
||||
UserCreate, UserLogin, UserResponse, Token, TokenData, RefreshTokenRequest,
|
||||
)
|
||||
from auth_service.app.services.auth_service import AuthService
|
||||
from auth_service.app.utils.security import (
|
||||
verify_password, create_access_token, decode_access_token,
|
||||
create_refresh_token, verify_refresh_token, revoke_refresh_token,
|
||||
)
|
||||
|
||||
# Configure logger
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Initialize FastAPI app
|
||||
app = FastAPI(
|
||||
title="Weibo-HotSign Authentication Service",
|
||||
description="Handles user authentication and authorization for Weibo-HotSign system",
|
||||
version="1.0.0",
|
||||
docs_url="/docs",
|
||||
redoc_url="/redoc"
|
||||
)
|
||||
|
||||
# CORS middleware configuration
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=["http://localhost:3000", "http://localhost:80"],
|
||||
allow_credentials=True,
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
)
|
||||
|
||||
# Security scheme for JWT
|
||||
security = HTTPBearer()
|
||||
|
||||
async def get_current_user(
|
||||
credentials: HTTPAuthorizationCredentials = Security(security),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
) -> UserResponse:
|
||||
"""
|
||||
Dependency to get current user from JWT token
|
||||
"""
|
||||
token = credentials.credentials
|
||||
payload = decode_access_token(token)
|
||||
|
||||
if payload is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Invalid or expired token",
|
||||
headers={"WWW-Authenticate": "Bearer"},
|
||||
)
|
||||
|
||||
user_id = payload.get("sub")
|
||||
if not user_id:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Invalid token payload",
|
||||
headers={"WWW-Authenticate": "Bearer"},
|
||||
)
|
||||
|
||||
auth_service = AuthService(db)
|
||||
user = await auth_service.get_user_by_id(user_id)
|
||||
|
||||
if user is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="User not found",
|
||||
)
|
||||
|
||||
if not user.is_active:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail="User account is deactivated",
|
||||
)
|
||||
|
||||
return UserResponse.from_orm(user)
|
||||
|
||||
@app.on_event("startup")
|
||||
async def startup_event():
|
||||
"""Initialize database tables on startup"""
|
||||
await create_tables()
|
||||
|
||||
@app.get("/")
|
||||
async def root():
|
||||
return {
|
||||
"service": "Weibo-HotSign Authentication Service",
|
||||
"status": "running",
|
||||
"version": "1.0.0"
|
||||
}
|
||||
|
||||
@app.get("/health")
|
||||
async def health_check():
|
||||
return {"status": "healthy"}
|
||||
|
||||
@app.post("/auth/register", response_model=UserResponse, status_code=status.HTTP_201_CREATED)
|
||||
async def register_user(user_data: UserCreate, db: AsyncSession = Depends(get_db)):
|
||||
"""
|
||||
Register a new user account
|
||||
"""
|
||||
auth_service = AuthService(db)
|
||||
|
||||
# Check if user already exists - optimized with single query
|
||||
email_user, username_user = await auth_service.check_user_exists(user_data.email, user_data.username)
|
||||
|
||||
if email_user:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_409_CONFLICT,
|
||||
detail="User with this email already exists"
|
||||
)
|
||||
|
||||
if username_user:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_409_CONFLICT,
|
||||
detail="Username already taken"
|
||||
)
|
||||
|
||||
# Create new user
|
||||
try:
|
||||
user = await auth_service.create_user(user_data)
|
||||
return UserResponse.from_orm(user)
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to create user: {str(e)}"
|
||||
)
|
||||
|
||||
@app.post("/auth/login", response_model=Token)
|
||||
async def login_user(login_data: UserLogin, db: AsyncSession = Depends(get_db)):
|
||||
"""
|
||||
Authenticate user and return JWT token
|
||||
"""
|
||||
auth_service = AuthService(db)
|
||||
|
||||
# Find user by email
|
||||
user = await auth_service.get_user_by_email(login_data.email)
|
||||
if not user:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Invalid email or password"
|
||||
)
|
||||
|
||||
# Verify password
|
||||
if not verify_password(login_data.password, user.hashed_password):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Invalid email or password"
|
||||
)
|
||||
|
||||
# Check if user is active
|
||||
if not user.is_active:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail="User account is deactivated"
|
||||
)
|
||||
|
||||
# Create access token
|
||||
access_token = create_access_token(data={"sub": str(user.id), "username": user.username})
|
||||
|
||||
# Create refresh token (stored in Redis)
|
||||
refresh_token = await create_refresh_token(str(user.id))
|
||||
|
||||
return Token(
|
||||
access_token=access_token,
|
||||
refresh_token=refresh_token,
|
||||
token_type="bearer",
|
||||
expires_in=3600 # 1 hour
|
||||
)
|
||||
|
||||
@app.post("/auth/refresh", response_model=Token)
|
||||
async def refresh_token(body: RefreshTokenRequest, db: AsyncSession = Depends(get_db)):
|
||||
"""
|
||||
Exchange a valid refresh token for a new access + refresh token pair (Token Rotation).
|
||||
The old refresh token is revoked immediately.
|
||||
"""
|
||||
# Verify the incoming refresh token
|
||||
user_id = await verify_refresh_token(body.refresh_token)
|
||||
if user_id is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Invalid or expired refresh token",
|
||||
)
|
||||
|
||||
# Ensure the user still exists and is active
|
||||
auth_service = AuthService(db)
|
||||
user = await auth_service.get_user_by_id(user_id)
|
||||
if user is None or not user.is_active:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="User not found or deactivated",
|
||||
)
|
||||
|
||||
# Revoke old token, issue new pair
|
||||
await revoke_refresh_token(body.refresh_token)
|
||||
new_access = create_access_token(data={"sub": str(user.id), "username": user.username})
|
||||
new_refresh = await create_refresh_token(str(user.id))
|
||||
|
||||
return Token(
|
||||
access_token=new_access,
|
||||
refresh_token=new_refresh,
|
||||
token_type="bearer",
|
||||
expires_in=3600,
|
||||
)
|
||||
|
||||
@app.get("/auth/me", response_model=UserResponse)
|
||||
async def get_current_user_info(current_user: UserResponse = Depends(get_current_user)):
|
||||
"""
|
||||
Get current user information
|
||||
"""
|
||||
return current_user
|
||||
0
backend/auth_service/app/models/__init__.py
Normal file
0
backend/auth_service/app/models/__init__.py
Normal file
Binary file not shown.
Binary file not shown.
15
backend/auth_service/app/models/database.py
Normal file
15
backend/auth_service/app/models/database.py
Normal file
@@ -0,0 +1,15 @@
|
||||
"""
|
||||
Database models and connection management for Authentication Service.
|
||||
Re-exports shared module components for backward compatibility.
|
||||
"""
|
||||
|
||||
# Re-export everything from the shared module
|
||||
from shared.models import Base, get_db, engine, AsyncSessionLocal, User
|
||||
|
||||
__all__ = ["Base", "get_db", "engine", "AsyncSessionLocal", "User"]
|
||||
|
||||
|
||||
async def create_tables():
|
||||
"""Create all tables in the database."""
|
||||
async with engine.begin() as conn:
|
||||
await conn.run_sync(Base.metadata.create_all)
|
||||
0
backend/auth_service/app/schemas/__init__.py
Normal file
0
backend/auth_service/app/schemas/__init__.py
Normal file
Binary file not shown.
Binary file not shown.
57
backend/auth_service/app/schemas/user.py
Normal file
57
backend/auth_service/app/schemas/user.py
Normal file
@@ -0,0 +1,57 @@
|
||||
"""
|
||||
Pydantic schemas for User-related data structures
|
||||
Defines request/response models for authentication endpoints
|
||||
"""
|
||||
|
||||
from pydantic import BaseModel, EmailStr, Field
|
||||
from typing import Optional
|
||||
from datetime import datetime
|
||||
from uuid import UUID
|
||||
|
||||
class UserBase(BaseModel):
|
||||
"""Base schema for user data"""
|
||||
username: str = Field(..., min_length=3, max_length=50, description="Unique username")
|
||||
email: EmailStr = Field(..., description="Valid email address")
|
||||
|
||||
class UserCreate(UserBase):
|
||||
"""Schema for user registration request"""
|
||||
password: str = Field(..., min_length=8, description="Password (min 8 characters)")
|
||||
|
||||
class UserLogin(BaseModel):
|
||||
"""Schema for user login request"""
|
||||
email: EmailStr = Field(..., description="User's email address")
|
||||
password: str = Field(..., description="User's password")
|
||||
|
||||
class UserUpdate(BaseModel):
|
||||
"""Schema for user profile updates"""
|
||||
username: Optional[str] = Field(None, min_length=3, max_length=50)
|
||||
email: Optional[EmailStr] = None
|
||||
is_active: Optional[bool] = None
|
||||
|
||||
class UserResponse(UserBase):
|
||||
"""Schema for user response data"""
|
||||
id: UUID
|
||||
created_at: datetime
|
||||
is_active: bool
|
||||
|
||||
class Config:
|
||||
from_attributes = True # Enable ORM mode
|
||||
|
||||
class Token(BaseModel):
|
||||
"""Schema for JWT token response (login / refresh)"""
|
||||
access_token: str = Field(..., description="JWT access token")
|
||||
refresh_token: str = Field(..., description="Opaque refresh token")
|
||||
token_type: str = Field(default="bearer", description="Token type")
|
||||
expires_in: int = Field(..., description="Access token expiration time in seconds")
|
||||
|
||||
|
||||
class RefreshTokenRequest(BaseModel):
|
||||
"""Schema for token refresh request"""
|
||||
refresh_token: str = Field(..., description="The refresh token to exchange")
|
||||
|
||||
|
||||
class TokenData(BaseModel):
|
||||
"""Schema for decoded token payload"""
|
||||
sub: str = Field(..., description="Subject (user ID)")
|
||||
username: str = Field(..., description="Username")
|
||||
exp: Optional[int] = None
|
||||
0
backend/auth_service/app/services/__init__.py
Normal file
0
backend/auth_service/app/services/__init__.py
Normal file
Binary file not shown.
Binary file not shown.
191
backend/auth_service/app/services/auth_service.py
Normal file
191
backend/auth_service/app/services/auth_service.py
Normal file
@@ -0,0 +1,191 @@
|
||||
"""
|
||||
Authentication service business logic
|
||||
Handles user registration, login, and user management operations
|
||||
"""
|
||||
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select, or_
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
from fastapi import HTTPException, status
|
||||
import logging
|
||||
from typing import Optional
|
||||
|
||||
from shared.models import User
|
||||
from ..schemas.user import UserCreate, UserLogin
|
||||
from ..utils.security import hash_password, validate_password_strength, verify_password
|
||||
|
||||
# Configure logger
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class AuthService:
|
||||
"""Service class for authentication and user management"""
|
||||
|
||||
def __init__(self, db: AsyncSession):
|
||||
self.db = db
|
||||
|
||||
async def get_user_by_email(self, email: str) -> Optional[User]:
|
||||
"""Find user by email address"""
|
||||
try:
|
||||
stmt = select(User).where(User.email == email)
|
||||
result = await self.db.execute(stmt)
|
||||
return result.scalar_one_or_none()
|
||||
except Exception as e:
|
||||
logger.error(f"Error fetching user by email {email}: {e}")
|
||||
return None
|
||||
|
||||
async def get_user_by_username(self, username: str) -> Optional[User]:
|
||||
"""Find user by username"""
|
||||
try:
|
||||
stmt = select(User).where(User.username == username)
|
||||
result = await self.db.execute(stmt)
|
||||
return result.scalar_one_or_none()
|
||||
except Exception as e:
|
||||
logger.error(f"Error fetching user by username {username}: {e}")
|
||||
return None
|
||||
|
||||
async def get_user_by_id(self, user_id: str) -> Optional[User]:
|
||||
"""Find user by UUID"""
|
||||
try:
|
||||
# For MySQL, user_id is already a string, no need to convert to UUID
|
||||
stmt = select(User).where(User.id == user_id)
|
||||
result = await self.db.execute(stmt)
|
||||
return result.scalar_one_or_none()
|
||||
except Exception as e:
|
||||
logger.error(f"Error fetching user by ID {user_id}: {e}")
|
||||
return None
|
||||
|
||||
async def create_user(self, user_data: UserCreate) -> User:
|
||||
"""Create a new user account with validation"""
|
||||
|
||||
# Validate password strength
|
||||
is_strong, message = validate_password_strength(user_data.password)
|
||||
if not is_strong:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=f"Password too weak: {message}"
|
||||
)
|
||||
|
||||
# Hash password
|
||||
hashed_password = hash_password(user_data.password)
|
||||
|
||||
# Create user instance
|
||||
user = User(
|
||||
username=user_data.username,
|
||||
email=user_data.email,
|
||||
hashed_password=hashed_password,
|
||||
is_active=True
|
||||
)
|
||||
|
||||
try:
|
||||
self.db.add(user)
|
||||
await self.db.commit()
|
||||
await self.db.refresh(user)
|
||||
|
||||
logger.info(f"Successfully created user: {user.username} ({user.email})")
|
||||
return user
|
||||
|
||||
except IntegrityError as e:
|
||||
await self.db.rollback()
|
||||
logger.error(f"Integrity error creating user {user_data.username}: {e}")
|
||||
|
||||
# Check which constraint was violated
|
||||
if "users_username_key" in str(e.orig):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_409_CONFLICT,
|
||||
detail="Username already exists"
|
||||
)
|
||||
elif "users_email_key" in str(e.orig):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_409_CONFLICT,
|
||||
detail="Email already registered"
|
||||
)
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to create user due to database constraint"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
await self.db.rollback()
|
||||
logger.error(f"Unexpected error creating user {user_data.username}: {e}")
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Internal server error during user creation"
|
||||
)
|
||||
|
||||
async def check_user_exists(self, email: str, username: str) -> tuple[Optional[User], Optional[User]]:
|
||||
"""Check if user exists by email or username in a single query"""
|
||||
try:
|
||||
stmt = select(User).where(or_(User.email == email, User.username == username))
|
||||
result = await self.db.execute(stmt)
|
||||
users = result.scalars().all()
|
||||
|
||||
email_user = None
|
||||
username_user = None
|
||||
|
||||
for user in users:
|
||||
if user.email == email:
|
||||
email_user = user
|
||||
if user.username == username:
|
||||
username_user = user
|
||||
|
||||
return email_user, username_user
|
||||
except Exception as e:
|
||||
logger.error(f"Error checking user existence: {e}")
|
||||
return None, None
|
||||
|
||||
async def authenticate_user(self, login_data: UserLogin) -> Optional[User]:
|
||||
"""Authenticate user credentials"""
|
||||
user = await self.get_user_by_email(login_data.email)
|
||||
|
||||
if not user:
|
||||
return None
|
||||
|
||||
# Verify password
|
||||
if not verify_password(login_data.password, user.hashed_password):
|
||||
return None
|
||||
|
||||
# Check if user is active
|
||||
if not user.is_active:
|
||||
logger.warning(f"Login attempt for deactivated user: {user.email}")
|
||||
return None
|
||||
|
||||
logger.info(f"Successful authentication for user: {user.username}")
|
||||
return user
|
||||
|
||||
async def update_user_status(self, user_id: str, is_active: bool) -> Optional[User]:
|
||||
"""Update user active status"""
|
||||
user = await self.get_user_by_id(user_id)
|
||||
if not user:
|
||||
return None
|
||||
|
||||
user.is_active = is_active
|
||||
try:
|
||||
await self.db.commit()
|
||||
await self.db.refresh(user)
|
||||
logger.info(f"Updated user {user.username} status to: {is_active}")
|
||||
return user
|
||||
except Exception as e:
|
||||
await self.db.rollback()
|
||||
logger.error(f"Error updating user status: {e}")
|
||||
return None
|
||||
|
||||
async def get_all_users(self, skip: int = 0, limit: int = 100) -> list[User]:
|
||||
"""Get list of all users (admin function)"""
|
||||
try:
|
||||
stmt = select(User).offset(skip).limit(limit)
|
||||
result = await self.db.execute(stmt)
|
||||
return result.scalars().all()
|
||||
except Exception as e:
|
||||
logger.error(f"Error fetching users list: {e}")
|
||||
return []
|
||||
|
||||
async def check_database_health(self) -> bool:
|
||||
"""Check if database connection is healthy"""
|
||||
try:
|
||||
stmt = select(User).limit(1)
|
||||
await self.db.execute(stmt)
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Database health check failed: {e}")
|
||||
return False
|
||||
0
backend/auth_service/app/utils/__init__.py
Normal file
0
backend/auth_service/app/utils/__init__.py
Normal file
Binary file not shown.
Binary file not shown.
148
backend/auth_service/app/utils/security.py
Normal file
148
backend/auth_service/app/utils/security.py
Normal file
@@ -0,0 +1,148 @@
|
||||
"""
|
||||
Security utilities for password hashing and JWT token management
|
||||
"""
|
||||
|
||||
import bcrypt
|
||||
import hashlib
|
||||
import jwt
|
||||
import secrets
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Optional
|
||||
|
||||
import redis.asyncio as aioredis
|
||||
|
||||
from shared.config import shared_settings
|
||||
|
||||
# Auth-specific defaults
|
||||
BCRYPT_ROUNDS = 12
|
||||
REFRESH_TOKEN_TTL = 7 * 24 * 3600 # 7 days in seconds
|
||||
|
||||
# Lazy-initialised async Redis client
|
||||
_redis_client: Optional[aioredis.Redis] = None
|
||||
|
||||
|
||||
async def get_redis() -> aioredis.Redis:
|
||||
"""Return a shared async Redis connection."""
|
||||
global _redis_client
|
||||
if _redis_client is None:
|
||||
_redis_client = aioredis.from_url(
|
||||
shared_settings.REDIS_URL, decode_responses=True
|
||||
)
|
||||
return _redis_client
|
||||
|
||||
def hash_password(password: str) -> str:
|
||||
"""
|
||||
Hash a password using bcrypt
|
||||
Returns the hashed password as a string
|
||||
"""
|
||||
salt = bcrypt.gensalt(rounds=BCRYPT_ROUNDS)
|
||||
hashed = bcrypt.hashpw(password.encode('utf-8'), salt)
|
||||
return hashed.decode('utf-8')
|
||||
|
||||
def verify_password(plain_password: str, hashed_password: str) -> bool:
|
||||
"""
|
||||
Verify a plain text password against a hashed password
|
||||
Returns True if passwords match, False otherwise
|
||||
"""
|
||||
try:
|
||||
return bcrypt.checkpw(
|
||||
plain_password.encode('utf-8'),
|
||||
hashed_password.encode('utf-8')
|
||||
)
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def create_access_token(data: dict, expires_delta: Optional[timedelta] = None) -> str:
|
||||
"""
|
||||
Create a JWT access token
|
||||
"""
|
||||
to_encode = data.copy()
|
||||
|
||||
if expires_delta:
|
||||
expire = datetime.utcnow() + expires_delta
|
||||
else:
|
||||
expire = datetime.utcnow() + timedelta(hours=shared_settings.JWT_EXPIRATION_HOURS)
|
||||
|
||||
to_encode.update({"exp": expire})
|
||||
encoded_jwt = jwt.encode(to_encode, shared_settings.JWT_SECRET_KEY, algorithm=shared_settings.JWT_ALGORITHM)
|
||||
return encoded_jwt
|
||||
|
||||
def decode_access_token(token: str) -> Optional[dict]:
|
||||
"""
|
||||
Decode and validate a JWT access token
|
||||
Returns the payload if valid, None otherwise
|
||||
"""
|
||||
try:
|
||||
payload = jwt.decode(token, shared_settings.JWT_SECRET_KEY, algorithms=[shared_settings.JWT_ALGORITHM])
|
||||
return payload
|
||||
except jwt.ExpiredSignatureError:
|
||||
return None
|
||||
except jwt.InvalidTokenError:
|
||||
return None
|
||||
|
||||
def generate_password_reset_token(email: str) -> str:
|
||||
"""
|
||||
Generate a secure token for password reset
|
||||
"""
|
||||
data = {"email": email, "type": "password_reset"}
|
||||
return create_access_token(data, timedelta(hours=1))
|
||||
|
||||
# Password strength validation
|
||||
def validate_password_strength(password: str) -> tuple[bool, str]:
|
||||
"""
|
||||
Validate password meets strength requirements
|
||||
Returns (is_valid, error_message)
|
||||
"""
|
||||
if len(password) < 8:
|
||||
return False, "Password must be at least 8 characters long"
|
||||
|
||||
if not any(c.isupper() for c in password):
|
||||
return False, "Password must contain at least one uppercase letter"
|
||||
|
||||
if not any(c.islower() for c in password):
|
||||
return False, "Password must contain at least one lowercase letter"
|
||||
|
||||
if not any(c.isdigit() for c in password):
|
||||
return False, "Password must contain at least one digit"
|
||||
|
||||
if not any(c in "!@#$%^&*()_+-=[]{}|;:,.<>?" for c in password):
|
||||
return False, "Password must contain at least one special character"
|
||||
|
||||
return True, "Password is strong"
|
||||
|
||||
|
||||
# --------------- Refresh Token helpers ---------------
|
||||
|
||||
def _hash_token(token: str) -> str:
|
||||
"""SHA-256 hash of a refresh token for safe Redis key storage."""
|
||||
return hashlib.sha256(token.encode("utf-8")).hexdigest()
|
||||
|
||||
|
||||
async def create_refresh_token(user_id: str) -> str:
|
||||
"""
|
||||
Generate a cryptographically random refresh token, store its hash in Redis
|
||||
with a 7-day TTL, and return the raw token string.
|
||||
"""
|
||||
token = secrets.token_urlsafe(48)
|
||||
token_hash = _hash_token(token)
|
||||
r = await get_redis()
|
||||
await r.setex(f"refresh_token:{token_hash}", REFRESH_TOKEN_TTL, user_id)
|
||||
return token
|
||||
|
||||
|
||||
async def verify_refresh_token(token: str) -> Optional[str]:
|
||||
"""
|
||||
Verify a refresh token by looking up its hash in Redis.
|
||||
Returns the associated user_id if valid, None otherwise.
|
||||
"""
|
||||
token_hash = _hash_token(token)
|
||||
r = await get_redis()
|
||||
user_id = await r.get(f"refresh_token:{token_hash}")
|
||||
return user_id
|
||||
|
||||
|
||||
async def revoke_refresh_token(token: str) -> None:
|
||||
"""Delete a refresh token from Redis (used during rotation)."""
|
||||
token_hash = _hash_token(token)
|
||||
r = await get_redis()
|
||||
await r.delete(f"refresh_token:{token_hash}")
|
||||
31
backend/auth_service/requirements.txt
Normal file
31
backend/auth_service/requirements.txt
Normal file
@@ -0,0 +1,31 @@
|
||||
# Weibo-HotSign Authentication Service Requirements
|
||||
# Web Framework
|
||||
fastapi==0.104.1
|
||||
uvicorn[standard]==0.24.0
|
||||
pydantic-settings==2.0.3
|
||||
|
||||
# Database
|
||||
sqlalchemy==2.0.23
|
||||
aiomysql==0.2.0
|
||||
PyMySQL==1.1.0
|
||||
|
||||
# Security
|
||||
bcrypt==4.1.2
|
||||
PyJWT[crypto]==2.8.0
|
||||
|
||||
# Validation and Serialization
|
||||
pydantic==2.5.0
|
||||
python-multipart==0.0.6
|
||||
|
||||
# Utilities
|
||||
python-dotenv==1.0.0
|
||||
requests==2.31.0
|
||||
|
||||
# Logging and Monitoring
|
||||
structlog==23.2.0
|
||||
|
||||
# Development tools (optional)
|
||||
# pytest==7.4.3
|
||||
# pytest-asyncio==0.21.1
|
||||
# black==23.11.0
|
||||
# flake8==6.1.0
|
||||
33
backend/requirements.txt
Normal file
33
backend/requirements.txt
Normal file
@@ -0,0 +1,33 @@
|
||||
# Weibo-HotSign Unified Backend Requirements
|
||||
|
||||
# Web Framework & Server
|
||||
fastapi==0.104.1
|
||||
uvicorn[standard]==0.24.0
|
||||
|
||||
# Task Queue
|
||||
celery==5.3.6
|
||||
redis==5.0.1
|
||||
|
||||
# Database
|
||||
sqlalchemy==2.0.23
|
||||
aiomysql==0.2.0
|
||||
PyMySQL==1.1.0
|
||||
|
||||
# Configuration, Validation, and Serialization
|
||||
pydantic-settings==2.0.3
|
||||
pydantic==2.5.0
|
||||
python-multipart==0.0.6
|
||||
|
||||
# Security
|
||||
bcrypt==4.1.2
|
||||
PyJWT[crypto]==2.8.0
|
||||
pycryptodome==3.19.0
|
||||
|
||||
# HTTP & Utilities
|
||||
httpx==0.25.2
|
||||
requests==2.31.0
|
||||
python-dotenv==1.0.0
|
||||
croniter==2.0.1
|
||||
|
||||
# Logging and Monitoring
|
||||
structlog==23.2.0
|
||||
1
backend/shared/__init__.py
Normal file
1
backend/shared/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""Shared module for Weibo-HotSign backend services."""
|
||||
BIN
backend/shared/__pycache__/__init__.cpython-311.pyc
Normal file
BIN
backend/shared/__pycache__/__init__.cpython-311.pyc
Normal file
Binary file not shown.
BIN
backend/shared/__pycache__/config.cpython-311.pyc
Normal file
BIN
backend/shared/__pycache__/config.cpython-311.pyc
Normal file
Binary file not shown.
BIN
backend/shared/__pycache__/crypto.cpython-311.pyc
Normal file
BIN
backend/shared/__pycache__/crypto.cpython-311.pyc
Normal file
Binary file not shown.
BIN
backend/shared/__pycache__/response.cpython-311.pyc
Normal file
BIN
backend/shared/__pycache__/response.cpython-311.pyc
Normal file
Binary file not shown.
31
backend/shared/config.py
Normal file
31
backend/shared/config.py
Normal file
@@ -0,0 +1,31 @@
|
||||
"""
|
||||
Shared configuration for all Weibo-HotSign backend services.
|
||||
Loads settings from environment variables using pydantic-settings.
|
||||
"""
|
||||
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
|
||||
class SharedSettings(BaseSettings):
|
||||
"""Shared settings across all backend services."""
|
||||
|
||||
# Database
|
||||
DATABASE_URL: str = "mysql+aiomysql://root:password@localhost/weibo_hotsign"
|
||||
|
||||
# Redis
|
||||
REDIS_URL: str = "redis://localhost:6379/0"
|
||||
|
||||
# JWT
|
||||
JWT_SECRET_KEY: str = "change-me-in-production"
|
||||
JWT_ALGORITHM: str = "HS256"
|
||||
JWT_EXPIRATION_HOURS: int = 24
|
||||
|
||||
# Cookie encryption
|
||||
COOKIE_ENCRYPTION_KEY: str = "change-me-in-production"
|
||||
|
||||
class Config:
|
||||
case_sensitive = True
|
||||
env_file = ".env"
|
||||
|
||||
|
||||
shared_settings = SharedSettings()
|
||||
44
backend/shared/crypto.py
Normal file
44
backend/shared/crypto.py
Normal file
@@ -0,0 +1,44 @@
|
||||
"""
|
||||
AES-256-GCM Cookie encryption / decryption utilities.
|
||||
"""
|
||||
|
||||
import base64
|
||||
import hashlib
|
||||
|
||||
from Crypto.Cipher import AES
|
||||
|
||||
|
||||
def derive_key(raw_key: str) -> bytes:
|
||||
"""Derive a 32-byte key from an arbitrary string using SHA-256."""
|
||||
return hashlib.sha256(raw_key.encode("utf-8")).digest()
|
||||
|
||||
|
||||
def encrypt_cookie(plaintext: str, key: bytes) -> tuple[str, str]:
|
||||
"""
|
||||
Encrypt a cookie string with AES-256-GCM.
|
||||
|
||||
Returns:
|
||||
(ciphertext_b64, iv_b64) — both base64-encoded strings.
|
||||
"""
|
||||
cipher = AES.new(key, AES.MODE_GCM)
|
||||
ciphertext, tag = cipher.encrypt_and_digest(plaintext.encode("utf-8"))
|
||||
# Append the 16-byte tag to the ciphertext so decryption can verify it
|
||||
ciphertext_with_tag = ciphertext + tag
|
||||
ciphertext_b64 = base64.b64encode(ciphertext_with_tag).decode("utf-8")
|
||||
iv_b64 = base64.b64encode(cipher.nonce).decode("utf-8")
|
||||
return ciphertext_b64, iv_b64
|
||||
|
||||
|
||||
def decrypt_cookie(ciphertext_b64: str, iv_b64: str, key: bytes) -> str:
|
||||
"""
|
||||
Decrypt a cookie string previously encrypted with encrypt_cookie.
|
||||
|
||||
Raises ValueError on decryption failure (wrong key, corrupted data, etc.).
|
||||
"""
|
||||
raw = base64.b64decode(ciphertext_b64)
|
||||
nonce = base64.b64decode(iv_b64)
|
||||
# Last 16 bytes are the GCM tag
|
||||
ciphertext, tag = raw[:-16], raw[-16:]
|
||||
cipher = AES.new(key, AES.MODE_GCM, nonce=nonce)
|
||||
plaintext = cipher.decrypt_and_verify(ciphertext, tag)
|
||||
return plaintext.decode("utf-8")
|
||||
18
backend/shared/models/__init__.py
Normal file
18
backend/shared/models/__init__.py
Normal file
@@ -0,0 +1,18 @@
|
||||
"""Shared ORM models for Weibo-HotSign."""
|
||||
|
||||
from .base import Base, get_db, engine, AsyncSessionLocal
|
||||
from .user import User
|
||||
from .account import Account
|
||||
from .task import Task
|
||||
from .signin_log import SigninLog
|
||||
|
||||
__all__ = [
|
||||
"Base",
|
||||
"get_db",
|
||||
"engine",
|
||||
"AsyncSessionLocal",
|
||||
"User",
|
||||
"Account",
|
||||
"Task",
|
||||
"SigninLog",
|
||||
]
|
||||
BIN
backend/shared/models/__pycache__/__init__.cpython-311.pyc
Normal file
BIN
backend/shared/models/__pycache__/__init__.cpython-311.pyc
Normal file
Binary file not shown.
BIN
backend/shared/models/__pycache__/account.cpython-311.pyc
Normal file
BIN
backend/shared/models/__pycache__/account.cpython-311.pyc
Normal file
Binary file not shown.
BIN
backend/shared/models/__pycache__/base.cpython-311.pyc
Normal file
BIN
backend/shared/models/__pycache__/base.cpython-311.pyc
Normal file
Binary file not shown.
BIN
backend/shared/models/__pycache__/signin_log.cpython-311.pyc
Normal file
BIN
backend/shared/models/__pycache__/signin_log.cpython-311.pyc
Normal file
Binary file not shown.
BIN
backend/shared/models/__pycache__/task.cpython-311.pyc
Normal file
BIN
backend/shared/models/__pycache__/task.cpython-311.pyc
Normal file
Binary file not shown.
BIN
backend/shared/models/__pycache__/user.cpython-311.pyc
Normal file
BIN
backend/shared/models/__pycache__/user.cpython-311.pyc
Normal file
Binary file not shown.
30
backend/shared/models/account.py
Normal file
30
backend/shared/models/account.py
Normal file
@@ -0,0 +1,30 @@
|
||||
"""Account ORM model."""
|
||||
|
||||
import uuid
|
||||
|
||||
from sqlalchemy import Column, DateTime, ForeignKey, String, Text
|
||||
from sqlalchemy.orm import relationship
|
||||
from sqlalchemy.sql import func
|
||||
|
||||
from .base import Base
|
||||
|
||||
|
||||
class Account(Base):
|
||||
__tablename__ = "accounts"
|
||||
|
||||
id = Column(String(36), primary_key=True, default=lambda: str(uuid.uuid4()))
|
||||
user_id = Column(String(36), ForeignKey("users.id", ondelete="CASCADE"), nullable=False)
|
||||
weibo_user_id = Column(String(20), nullable=False)
|
||||
remark = Column(String(100))
|
||||
encrypted_cookies = Column(Text, nullable=False)
|
||||
iv = Column(String(32), nullable=False)
|
||||
status = Column(String(20), default="pending")
|
||||
last_checked_at = Column(DateTime, nullable=True)
|
||||
created_at = Column(DateTime, server_default=func.now())
|
||||
|
||||
user = relationship("User", back_populates="accounts")
|
||||
tasks = relationship("Task", back_populates="account", cascade="all, delete-orphan")
|
||||
signin_logs = relationship("SigninLog", back_populates="account")
|
||||
|
||||
def __repr__(self):
|
||||
return f"<Account(id={self.id}, weibo_user_id='{self.weibo_user_id}')>"
|
||||
33
backend/shared/models/base.py
Normal file
33
backend/shared/models/base.py
Normal file
@@ -0,0 +1,33 @@
|
||||
"""
|
||||
Database engine, session factory, and declarative base.
|
||||
"""
|
||||
|
||||
from typing import AsyncGenerator
|
||||
|
||||
from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine
|
||||
from sqlalchemy.orm import sessionmaker, declarative_base
|
||||
|
||||
from ..config import shared_settings
|
||||
|
||||
_engine_kwargs: dict = {"echo": False}
|
||||
if "sqlite" not in shared_settings.DATABASE_URL:
|
||||
_engine_kwargs.update(pool_size=20, max_overflow=30, pool_pre_ping=True)
|
||||
|
||||
engine = create_async_engine(shared_settings.DATABASE_URL, **_engine_kwargs)
|
||||
|
||||
AsyncSessionLocal = sessionmaker(
|
||||
engine,
|
||||
class_=AsyncSession,
|
||||
expire_on_commit=False,
|
||||
)
|
||||
|
||||
Base = declarative_base()
|
||||
|
||||
|
||||
async def get_db() -> AsyncGenerator[AsyncSession, None]:
|
||||
"""Dependency that yields an async database session."""
|
||||
async with AsyncSessionLocal() as session:
|
||||
try:
|
||||
yield session
|
||||
finally:
|
||||
await session.close()
|
||||
23
backend/shared/models/signin_log.py
Normal file
23
backend/shared/models/signin_log.py
Normal file
@@ -0,0 +1,23 @@
|
||||
"""SigninLog ORM model."""
|
||||
|
||||
from sqlalchemy import Integer, Column, DateTime, ForeignKey, JSON, String, Text
|
||||
from sqlalchemy.orm import relationship
|
||||
from sqlalchemy.sql import func
|
||||
|
||||
from .base import Base
|
||||
|
||||
|
||||
class SigninLog(Base):
|
||||
__tablename__ = "signin_logs"
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
account_id = Column(String(36), ForeignKey("accounts.id"), nullable=False)
|
||||
topic_title = Column(String(100))
|
||||
status = Column(String(20), nullable=False)
|
||||
reward_info = Column(JSON, nullable=True)
|
||||
error_message = Column(Text, nullable=True)
|
||||
signed_at = Column(DateTime, server_default=func.now())
|
||||
|
||||
account = relationship("Account", back_populates="signin_logs")
|
||||
|
||||
def __repr__(self):
|
||||
return f"<SigninLog(id={self.id}, status='{self.status}')>"
|
||||
24
backend/shared/models/task.py
Normal file
24
backend/shared/models/task.py
Normal file
@@ -0,0 +1,24 @@
|
||||
"""Task ORM model."""
|
||||
|
||||
import uuid
|
||||
|
||||
from sqlalchemy import Boolean, Column, DateTime, ForeignKey, String
|
||||
from sqlalchemy.orm import relationship
|
||||
from sqlalchemy.sql import func
|
||||
|
||||
from .base import Base
|
||||
|
||||
|
||||
class Task(Base):
|
||||
__tablename__ = "tasks"
|
||||
|
||||
id = Column(String(36), primary_key=True, default=lambda: str(uuid.uuid4()))
|
||||
account_id = Column(String(36), ForeignKey("accounts.id", ondelete="CASCADE"), nullable=False)
|
||||
cron_expression = Column(String(50), nullable=False)
|
||||
is_enabled = Column(Boolean, default=True)
|
||||
created_at = Column(DateTime, server_default=func.now())
|
||||
|
||||
account = relationship("Account", back_populates="tasks")
|
||||
|
||||
def __repr__(self):
|
||||
return f"<Task(id={self.id}, cron='{self.cron_expression}')>"
|
||||
25
backend/shared/models/user.py
Normal file
25
backend/shared/models/user.py
Normal file
@@ -0,0 +1,25 @@
|
||||
"""User ORM model."""
|
||||
|
||||
import uuid
|
||||
|
||||
from sqlalchemy import Boolean, Column, DateTime, String
|
||||
from sqlalchemy.orm import relationship
|
||||
from sqlalchemy.sql import func
|
||||
|
||||
from .base import Base
|
||||
|
||||
|
||||
class User(Base):
|
||||
__tablename__ = "users"
|
||||
|
||||
id = Column(String(36), primary_key=True, default=lambda: str(uuid.uuid4()))
|
||||
username = Column(String(50), unique=True, nullable=False, index=True)
|
||||
email = Column(String(255), unique=True, nullable=False, index=True)
|
||||
hashed_password = Column(String(255), nullable=False)
|
||||
created_at = Column(DateTime, server_default=func.now())
|
||||
is_active = Column(Boolean, default=True)
|
||||
|
||||
accounts = relationship("Account", back_populates="user", cascade="all, delete-orphan")
|
||||
|
||||
def __repr__(self):
|
||||
return f"<User(id={self.id}, username='{self.username}')>"
|
||||
35
backend/shared/response.py
Normal file
35
backend/shared/response.py
Normal file
@@ -0,0 +1,35 @@
|
||||
"""
|
||||
Unified API response format utilities.
|
||||
"""
|
||||
|
||||
from typing import Any, List, Optional
|
||||
|
||||
from fastapi.responses import JSONResponse
|
||||
|
||||
|
||||
def success_response(data: Any = None, message: str = "Operation successful") -> dict:
|
||||
"""Return a standardised success payload."""
|
||||
return {
|
||||
"success": True,
|
||||
"data": data,
|
||||
"message": message,
|
||||
}
|
||||
|
||||
|
||||
def error_response(
|
||||
message: str,
|
||||
code: str,
|
||||
details: Optional[List[dict]] = None,
|
||||
status_code: int = 400,
|
||||
) -> JSONResponse:
|
||||
"""Return a standardised error JSONResponse."""
|
||||
body: dict = {
|
||||
"success": False,
|
||||
"data": None,
|
||||
"message": message,
|
||||
"error": {
|
||||
"code": code,
|
||||
"details": details or [],
|
||||
},
|
||||
}
|
||||
return JSONResponse(status_code=status_code, content=body)
|
||||
34
backend/signin_executor/Dockerfile
Normal file
34
backend/signin_executor/Dockerfile
Normal file
@@ -0,0 +1,34 @@
|
||||
# Weibo-HotSign Sign-in Executor Service Dockerfile
|
||||
FROM python:3.11-slim
|
||||
|
||||
# Set working directory
|
||||
WORKDIR /app
|
||||
|
||||
# Install system dependencies
|
||||
RUN apt-get update && apt-get install -y \
|
||||
gcc \
|
||||
default-libmysqlclient-dev \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Copy requirements first for better caching
|
||||
COPY requirements.txt .
|
||||
|
||||
# Install Python dependencies
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
# Copy application code
|
||||
COPY app/ ./app/
|
||||
|
||||
# Create non-root user for security
|
||||
RUN groupadd -r appuser && useradd -r -g appuser appuser
|
||||
USER appuser
|
||||
|
||||
# Expose port
|
||||
EXPOSE 8000
|
||||
|
||||
# Health check
|
||||
HEALTHCHECK --interval=30s --timeout=30s --start-period=5s --retries=3 \
|
||||
CMD curl -f http://localhost:8000/health || exit 1
|
||||
|
||||
# Start application
|
||||
CMD ["python", "-m", "app.main"]
|
||||
56
backend/signin_executor/app/config.py
Normal file
56
backend/signin_executor/app/config.py
Normal file
@@ -0,0 +1,56 @@
|
||||
"""
|
||||
Configuration for Sign-in Executor Service
|
||||
"""
|
||||
|
||||
import os
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
class Settings(BaseSettings):
|
||||
"""Sign-in Executor settings"""
|
||||
|
||||
# Server settings
|
||||
HOST: str = os.getenv("HOST", "0.0.0.0")
|
||||
PORT: int = int(os.getenv("PORT", 8000))
|
||||
DEBUG: bool = os.getenv("DEBUG", "False").lower() == "true"
|
||||
|
||||
# Database settings
|
||||
DATABASE_URL: str = os.getenv(
|
||||
"DATABASE_URL",
|
||||
"mysql+aiomysql://weibo:123456789@118.195.133.163/weibo"
|
||||
)
|
||||
REDIS_URL: str = os.getenv("REDIS_URL", "redis://redis:6379")
|
||||
|
||||
# External service URLs
|
||||
PROXY_POOL_URL: str = os.getenv("PROXY_POOL_URL", "http://proxy-pool:8080")
|
||||
BROWSER_AUTOMATION_URL: str = os.getenv("BROWSER_AUTOMATION_URL", "http://browser-automation:3001")
|
||||
TASK_SCHEDULER_URL: str = os.getenv("TASK_SCHEDULER_URL", "http://task-scheduler:8000")
|
||||
|
||||
# Weibo API settings
|
||||
WEIBO_LOGIN_URL: str = "https://weibo.com/login.php"
|
||||
WEIBO_SUPER_TOPIC_URL: str = "https://weibo.com/p/aj/general/button"
|
||||
|
||||
# Anti-bot protection settings
|
||||
RANDOM_DELAY_MIN: float = float(os.getenv("RANDOM_DELAY_MIN", "1.0"))
|
||||
RANDOM_DELAY_MAX: float = float(os.getenv("RANDOM_DELAY_MAX", "3.0"))
|
||||
USER_AGENT_ROTATION: bool = os.getenv("USER_AGENT_ROTATION", "True").lower() == "true"
|
||||
|
||||
# Cookie and session settings
|
||||
COOKIE_ENCRYPTION_KEY: str = os.getenv("COOKIE_ENCRYPTION_KEY", "your-cookie-encryption-key")
|
||||
SESSION_TIMEOUT_MINUTES: int = int(os.getenv("SESSION_TIMEOUT_MINUTES", "30"))
|
||||
|
||||
# Browser automation settings
|
||||
BROWSER_HEADLESS: bool = os.getenv("BROWSER_HEADLESS", "True").lower() == "true"
|
||||
BROWSER_TIMEOUT_SECONDS: int = int(os.getenv("BROWSER_TIMEOUT_SECONDS", "30"))
|
||||
|
||||
# Task execution settings
|
||||
MAX_CONCURRENT_SIGNIN: int = int(os.getenv("MAX_CONCURRENT_SIGNIN", "5"))
|
||||
TASK_TIMEOUT_SECONDS: int = int(os.getenv("TASK_TIMEOUT_SECONDS", "300"))
|
||||
|
||||
# Logging
|
||||
LOG_LEVEL: str = os.getenv("LOG_LEVEL", "INFO")
|
||||
|
||||
class Config:
|
||||
case_sensitive = True
|
||||
env_file = ".env"
|
||||
|
||||
settings = Settings()
|
||||
226
backend/signin_executor/app/main.py
Normal file
226
backend/signin_executor/app/main.py
Normal file
@@ -0,0 +1,226 @@
|
||||
"""
|
||||
Weibo-HotSign Sign-in Executor Service
|
||||
Core service that executes sign-in tasks and handles Weibo interactions
|
||||
"""
|
||||
|
||||
from fastapi import FastAPI, BackgroundTasks, HTTPException, status, Depends, Request
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from fastapi.responses import JSONResponse
|
||||
import uvicorn
|
||||
import asyncio
|
||||
import httpx
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from typing import Dict, Any, Optional
|
||||
import os
|
||||
|
||||
from app.config import settings
|
||||
from app.services.signin_service import SignInService
|
||||
from app.services.weibo_client import WeiboClient
|
||||
from app.models.signin_models import SignInRequest, SignInResult, TaskStatus
|
||||
|
||||
# Initialize FastAPI app
|
||||
app = FastAPI(
|
||||
title="Weibo-HotSign Sign-in Executor",
|
||||
description="Core service for executing Weibo super topic sign-in tasks",
|
||||
version="1.0.0",
|
||||
docs_url="/docs",
|
||||
redoc_url="/redoc"
|
||||
)
|
||||
|
||||
# CORS middleware
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=["*"], # In production, specify actual origins
|
||||
allow_credentials=True,
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
)
|
||||
|
||||
# Initialize services
|
||||
signin_service = SignInService()
|
||||
weibo_client = WeiboClient()
|
||||
|
||||
@app.on_event("startup")
|
||||
async def startup_event():
|
||||
"""Initialize executor service on startup"""
|
||||
print("🚀 Weibo-HotSign Sign-in Executor starting up...")
|
||||
print(f"📡 Service Documentation: http://{settings.HOST}:{settings.PORT}/docs")
|
||||
print("🔧 Ready to process sign-in tasks...")
|
||||
|
||||
@app.on_event("shutdown")
|
||||
async def shutdown_event():
|
||||
"""Cleanup on shutdown"""
|
||||
print("👋 Weibo-HotSign Sign-in Executor shutting down...")
|
||||
|
||||
@app.get("/")
|
||||
async def root():
|
||||
return {
|
||||
"service": "Weibo-HotSign Sign-in Executor",
|
||||
"status": "running",
|
||||
"version": "1.0.0",
|
||||
"description": "Core sign-in execution service for Weibo super topics",
|
||||
"capabilities": [
|
||||
"Weibo login and verification",
|
||||
"Super topic sign-in automation",
|
||||
"Anti-bot protection handling",
|
||||
"Proxy integration",
|
||||
"Browser fingerprint simulation"
|
||||
]
|
||||
}
|
||||
|
||||
@app.get("/health")
|
||||
async def health_check():
|
||||
"""Health check endpoint"""
|
||||
return {
|
||||
"status": "healthy",
|
||||
"service": "signin-executor",
|
||||
"timestamp": datetime.now().isoformat(),
|
||||
"dependencies": {
|
||||
"database": "connected",
|
||||
"redis": "connected",
|
||||
"proxy_pool": f"{settings.PROXY_POOL_URL}",
|
||||
"browser_automation": f"{settings.BROWSER_AUTOMATION_URL}"
|
||||
}
|
||||
}
|
||||
|
||||
@app.post("/api/v1/signin/execute", response_model=SignInResult)
|
||||
async def execute_signin_task(
|
||||
signin_request: SignInRequest,
|
||||
background_tasks: BackgroundTasks
|
||||
):
|
||||
"""
|
||||
Execute sign-in task for specified account
|
||||
This endpoint is called by the task scheduler
|
||||
"""
|
||||
try:
|
||||
logger.info(f"🎯 Received sign-in request for account: {signin_request.account_id}")
|
||||
|
||||
# Execute sign-in in background to avoid timeout
|
||||
background_tasks.add_task(
|
||||
signin_service.execute_signin_task,
|
||||
signin_request.account_id,
|
||||
signin_request.task_id
|
||||
)
|
||||
|
||||
# Return immediate response
|
||||
return SignInResult(
|
||||
task_id=signin_request.task_id,
|
||||
account_id=signin_request.account_id,
|
||||
status="accepted",
|
||||
message="Sign-in task accepted and queued for execution",
|
||||
started_at=datetime.now(),
|
||||
estimated_completion=None
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Failed to accept sign-in task: {e}")
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to accept sign-in task: {str(e)}"
|
||||
)
|
||||
|
||||
@app.get("/api/v1/signin/status/{task_id}", response_model=TaskStatus)
|
||||
async def get_task_status(task_id: str):
|
||||
"""Get status of a sign-in task"""
|
||||
try:
|
||||
status_info = await signin_service.get_task_status(task_id)
|
||||
if not status_info:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=f"Task {task_id} not found"
|
||||
)
|
||||
return status_info
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Error getting task status: {e}")
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Internal server error"
|
||||
)
|
||||
|
||||
@app.post("/api/v1/signin/test")
|
||||
async def test_signin_capability():
|
||||
"""Test sign-in service capabilities (for debugging)"""
|
||||
try:
|
||||
# Test basic service connectivity
|
||||
tests = {
|
||||
"weibo_connectivity": await _test_weibo_connectivity(),
|
||||
"proxy_pool_access": await _test_proxy_pool(),
|
||||
"browser_automation": await _test_browser_automation(),
|
||||
"database_connection": await _test_database_connection()
|
||||
}
|
||||
|
||||
return {
|
||||
"test_timestamp": datetime.now().isoformat(),
|
||||
"tests": tests,
|
||||
"overall_status": "operational" if all(tests.values()) else "degraded"
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Capability test failed: {e}")
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Capability test failed: {str(e)}"
|
||||
)
|
||||
|
||||
async def _test_weibo_connectivity() -> bool:
|
||||
"""Test connectivity to Weibo"""
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=10.0) as client:
|
||||
response = await client.get("https://weibo.com", follow_redirects=True)
|
||||
return response.status_code == 200
|
||||
except:
|
||||
return False
|
||||
|
||||
async def _test_proxy_pool() -> bool:
|
||||
"""Test proxy pool service availability"""
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=5.0) as client:
|
||||
response = await client.get(f"{settings.PROXY_POOL_URL}/health", timeout=5.0)
|
||||
return response.status_code == 200
|
||||
except:
|
||||
return False
|
||||
|
||||
async def _test_browser_automation() -> bool:
|
||||
"""Test browser automation service availability"""
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=5.0) as client:
|
||||
response = await client.get(f"{settings.BROWSER_AUTOMATION_URL}/health", timeout=5.0)
|
||||
return response.status_code == 200
|
||||
except:
|
||||
return False
|
||||
|
||||
async def _test_database_connection() -> bool:
|
||||
"""Test database connectivity"""
|
||||
try:
|
||||
# Simple database ping test
|
||||
return True # Simplified for demo
|
||||
except:
|
||||
return False
|
||||
|
||||
@app.exception_handler(HTTPException)
|
||||
async def http_exception_handler(request: Request, exc: HTTPException):
|
||||
"""Global HTTP exception handler"""
|
||||
return JSONResponse(
|
||||
status_code=exc.status_code,
|
||||
content={
|
||||
"success": False,
|
||||
"data": None,
|
||||
"message": exc.detail,
|
||||
"error": {
|
||||
"code": f"HTTP_{exc.status_code}",
|
||||
"details": []
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
if __name__ == "__main__":
|
||||
host = os.getenv("HOST", settings.HOST)
|
||||
port = int(os.getenv("PORT", settings.PORT))
|
||||
uvicorn.run(
|
||||
app,
|
||||
host=host,
|
||||
port=port,
|
||||
log_level="info" if not settings.DEBUG else "debug"
|
||||
)
|
||||
89
backend/signin_executor/app/models/signin_models.py
Normal file
89
backend/signin_executor/app/models/signin_models.py
Normal file
@@ -0,0 +1,89 @@
|
||||
"""
|
||||
Data models for Sign-in Executor Service
|
||||
"""
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
from typing import Optional, Dict, Any, List
|
||||
from datetime import datetime
|
||||
from uuid import UUID
|
||||
|
||||
class SignInRequest(BaseModel):
|
||||
"""Request model for sign-in task execution"""
|
||||
task_id: str = Field(..., description="Unique task identifier")
|
||||
account_id: str = Field(..., description="Weibo account identifier")
|
||||
timestamp: Optional[datetime] = Field(default_factory=datetime.now, description="Request timestamp")
|
||||
requested_by: Optional[str] = Field(default="task_scheduler", description="Request source")
|
||||
|
||||
class SignInResult(BaseModel):
|
||||
"""Result model for sign-in task execution"""
|
||||
task_id: str = Field(..., description="Task identifier")
|
||||
account_id: str = Field(..., description="Account identifier")
|
||||
status: str = Field(..., description="Task status: accepted, running, success, failed")
|
||||
message: str = Field(..., description="Human readable result message")
|
||||
started_at: datetime = Field(..., description="Task start timestamp")
|
||||
completed_at: Optional[datetime] = Field(None, description="Task completion timestamp")
|
||||
estimated_completion: Optional[datetime] = Field(None, description="Estimated completion time")
|
||||
reward_info: Optional[Dict[str, Any]] = Field(None, description="Reward details like exp, credits")
|
||||
error_message: Optional[str] = Field(None, description="Error details if failed")
|
||||
signed_topics: Optional[List[str]] = Field(None, description="List of successfully signed topics")
|
||||
total_topics: Optional[int] = Field(None, description="Total number of topics attempted")
|
||||
|
||||
class TaskStatus(BaseModel):
|
||||
"""Status model for tracking sign-in task progress"""
|
||||
task_id: str = Field(..., description="Task identifier")
|
||||
account_id: str = Field(..., description="Account identifier")
|
||||
status: str = Field(..., description="Current status: pending, running, success, failed")
|
||||
progress_percentage: int = Field(default=0, ge=0, le=100, description="Progress percentage")
|
||||
current_step: Optional[str] = Field(None, description="Current execution step")
|
||||
steps_completed: List[str] = Field(default_factory=list, description="Completed steps")
|
||||
steps_remaining: List[str] = Field(default_factory=list, description="Remaining steps")
|
||||
started_at: datetime = Field(..., description="Start timestamp")
|
||||
updated_at: datetime = Field(default_factory=datetime.now, description="Last update timestamp")
|
||||
estimated_completion: Optional[datetime] = Field(None, description="Estimated completion")
|
||||
|
||||
class WeiboAccount(BaseModel):
|
||||
"""Weibo account information for sign-in"""
|
||||
id: UUID = Field(..., description="Account UUID")
|
||||
user_id: UUID = Field(..., description="Owner user UUID")
|
||||
weibo_user_id: str = Field(..., description="Weibo user ID")
|
||||
remark: Optional[str] = Field(None, description="User remark")
|
||||
encrypted_cookies: str = Field(..., description="Encrypted Weibo cookies")
|
||||
iv: str = Field(..., description="Encryption initialization vector")
|
||||
status: str = Field(default="active", description="Account status: active, invalid_cookie, banned")
|
||||
last_checked_at: Optional[datetime] = Field(None, description="Last validation timestamp")
|
||||
|
||||
class SignInLog(BaseModel):
|
||||
"""Sign-in operation log entry"""
|
||||
id: Optional[int] = Field(None, description="Log entry ID")
|
||||
account_id: UUID = Field(..., description="Account UUID")
|
||||
topic_title: Optional[str] = Field(None, description="Signed topic title")
|
||||
status: str = Field(..., description="Sign-in status")
|
||||
reward_info: Optional[Dict[str, Any]] = Field(None, description="Reward information")
|
||||
error_message: Optional[str] = Field(None, description="Error details")
|
||||
signed_at: datetime = Field(default_factory=datetime.now, description="Sign-in timestamp")
|
||||
execution_time_ms: Optional[int] = Field(None, description="Execution time in milliseconds")
|
||||
|
||||
class WeiboSuperTopic(BaseModel):
|
||||
"""Weibo super topic information"""
|
||||
id: str = Field(..., description="Topic ID")
|
||||
title: str = Field(..., description="Topic title")
|
||||
url: str = Field(..., description="Topic URL")
|
||||
is_signed: bool = Field(default=False, description="Whether already signed")
|
||||
sign_url: Optional[str] = Field(None, description="Sign-in API URL")
|
||||
reward_exp: Optional[int] = Field(None, description="Experience points reward")
|
||||
reward_credit: Optional[int] = Field(None, description="Credit points reward")
|
||||
|
||||
class AntiBotConfig(BaseModel):
|
||||
"""Anti-bot protection configuration"""
|
||||
random_delay_min: float = Field(default=1.0, description="Minimum random delay seconds")
|
||||
random_delay_max: float = Field(default=3.0, description="Maximum random delay seconds")
|
||||
user_agent_rotation: bool = Field(default=True, description="Enable user agent rotation")
|
||||
proxy_enabled: bool = Field(default=True, description="Enable proxy usage")
|
||||
fingerprint_simulation: bool = Field(default=True, description="Enable browser fingerprint simulation")
|
||||
|
||||
class BrowserAutomationRequest(BaseModel):
|
||||
"""Request for browser automation service"""
|
||||
target_url: str = Field(..., description="Target URL to automate")
|
||||
action_type: str = Field(..., description="Action type: signin, extract, click")
|
||||
context_data: Optional[Dict[str, Any]] = Field(None, description="Additional context data")
|
||||
timeout_seconds: int = Field(default=30, description="Operation timeout")
|
||||
271
backend/signin_executor/app/services/signin_service.py
Normal file
271
backend/signin_executor/app/services/signin_service.py
Normal file
@@ -0,0 +1,271 @@
|
||||
"""
|
||||
Core sign-in business logic service
|
||||
Handles Weibo super topic sign-in operations
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import httpx
|
||||
import logging
|
||||
import random
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Dict, Any, List, Optional
|
||||
from uuid import UUID
|
||||
|
||||
from app.config import settings
|
||||
from app.models.signin_models import SignInRequest, SignInResult, TaskStatus, WeiboAccount, WeiboSuperTopic, AntiBotConfig
|
||||
from app.services.weibo_client import WeiboClient
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class SignInService:
|
||||
"""Main service for handling sign-in operations"""
|
||||
|
||||
def __init__(self):
|
||||
self.weibo_client = WeiboClient()
|
||||
self.active_tasks: Dict[str, TaskStatus] = {}
|
||||
self.antibot_config = AntiBotConfig(
|
||||
random_delay_min=settings.RANDOM_DELAY_MIN,
|
||||
random_delay_max=settings.RANDOM_DELAY_MAX,
|
||||
user_agent_rotation=settings.USER_AGENT_ROTATION,
|
||||
proxy_enabled=True,
|
||||
fingerprint_simulation=True
|
||||
)
|
||||
|
||||
async def execute_signin_task(self, account_id: str, task_id: str):
|
||||
"""
|
||||
Execute complete sign-in workflow for an account
|
||||
This is the main business logic method
|
||||
"""
|
||||
logger.info(f"🎯 Starting sign-in execution for account {account_id}, task {task_id}")
|
||||
|
||||
# Initialize task status
|
||||
task_status = TaskStatus(
|
||||
task_id=task_id,
|
||||
account_id=account_id,
|
||||
status="running",
|
||||
progress_percentage=0,
|
||||
current_step="initializing",
|
||||
steps_completed=[],
|
||||
steps_remaining=[
|
||||
"validate_account",
|
||||
"setup_session",
|
||||
"get_super_topics",
|
||||
"execute_signin",
|
||||
"record_results"
|
||||
],
|
||||
started_at=datetime.now()
|
||||
)
|
||||
self.active_tasks[task_id] = task_status
|
||||
|
||||
try:
|
||||
# Step 1: Validate account
|
||||
task_status.current_step = "validate_account"
|
||||
await self._update_task_progress(task_id, 10)
|
||||
|
||||
account = await self._get_account_info(account_id)
|
||||
if not account or account.status != "active":
|
||||
raise Exception(f"Account {account_id} not found or inactive")
|
||||
|
||||
task_status.steps_completed.append("validate_account")
|
||||
task_status.steps_remaining.remove("validate_account")
|
||||
task_status.progress_percentage = 20
|
||||
|
||||
# Step 2: Setup session with proxy and fingerprint
|
||||
task_status.current_step = "setup_session"
|
||||
await self._apply_anti_bot_protection()
|
||||
|
||||
task_status.steps_completed.append("setup_session")
|
||||
task_status.steps_remaining.remove("setup_session")
|
||||
task_status.progress_percentage = 30
|
||||
|
||||
# Step 3: Get super topics list
|
||||
task_status.current_step = "get_super_topics"
|
||||
await self._update_task_progress(task_id, 40)
|
||||
|
||||
super_topics = await self._get_super_topics_list(account)
|
||||
if not super_topics:
|
||||
logger.warning(f"No super topics found for account {account_id}")
|
||||
|
||||
task_status.steps_completed.append("get_super_topics")
|
||||
task_status.steps_remaining.remove("get_super_topics")
|
||||
task_status.progress_percentage = 50
|
||||
|
||||
# Step 4: Execute signin for each topic
|
||||
task_status.current_step = "execute_signin"
|
||||
signin_results = await self._execute_topic_signin(account, super_topics, task_id)
|
||||
|
||||
task_status.steps_completed.append("execute_signin")
|
||||
task_status.steps_remaining.remove("execute_signin")
|
||||
task_status.progress_percentage = 80
|
||||
|
||||
# Step 5: Record results
|
||||
task_status.current_step = "record_results"
|
||||
await self._update_task_progress(task_id, 90)
|
||||
|
||||
result = SignInResult(
|
||||
task_id=task_id,
|
||||
account_id=account_id,
|
||||
status="success",
|
||||
message=f"Successfully processed {len(signin_results['signed'])} topics",
|
||||
started_at=task_status.started_at,
|
||||
completed_at=datetime.now(),
|
||||
signed_topics=signin_results['signed'],
|
||||
total_topics=len(super_topics) if super_topics else 0,
|
||||
reward_info={
|
||||
"topics_signed": len(signin_results['signed']),
|
||||
"topics_already_signed": len(signin_results['already_signed']),
|
||||
"errors": len(signin_results['errors'])
|
||||
}
|
||||
)
|
||||
|
||||
task_status.status = "success"
|
||||
task_status.progress_percentage = 100
|
||||
task_status.current_step = "completed"
|
||||
|
||||
logger.info(f"✅ Sign-in task {task_id} completed successfully")
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Sign-in task {task_id} failed: {e}")
|
||||
|
||||
# Update task status to failed
|
||||
if task_id in self.active_tasks:
|
||||
task_status = self.active_tasks[task_id]
|
||||
task_status.status = "failed"
|
||||
task_status.error_message = str(e)
|
||||
|
||||
# Return failed result
|
||||
return SignInResult(
|
||||
task_id=task_id,
|
||||
account_id=account_id,
|
||||
status="failed",
|
||||
message=f"Sign-in failed: {str(e)}",
|
||||
started_at=task_status.started_at if task_id in self.active_tasks else datetime.now(),
|
||||
completed_at=datetime.now(),
|
||||
error_message=str(e)
|
||||
)
|
||||
|
||||
async def get_task_status(self, task_id: str) -> Optional[TaskStatus]:
|
||||
"""Get current status of a sign-in task"""
|
||||
return self.active_tasks.get(task_id)
|
||||
|
||||
async def _update_task_progress(self, task_id: str, percentage: int):
|
||||
"""Update task progress percentage"""
|
||||
if task_id in self.active_tasks:
|
||||
self.active_tasks[task_id].progress_percentage = percentage
|
||||
self.active_tasks[task_id].updated_at = datetime.now()
|
||||
|
||||
async def _get_account_info(self, account_id: str) -> Optional[WeiboAccount]:
|
||||
"""Get Weibo account information from database"""
|
||||
try:
|
||||
# Mock implementation - in real system, query database
|
||||
# For demo, return mock account
|
||||
return WeiboAccount(
|
||||
id=UUID(account_id),
|
||||
user_id=UUID("12345678-1234-5678-9012-123456789012"),
|
||||
weibo_user_id="1234567890",
|
||||
remark="Demo Account",
|
||||
encrypted_cookies="mock_encrypted_cookies",
|
||||
iv="mock_iv_16_bytes",
|
||||
status="active",
|
||||
last_checked_at=datetime.now() - timedelta(hours=1)
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Error fetching account {account_id}: {e}")
|
||||
return None
|
||||
|
||||
async def _apply_anti_bot_protection(self):
|
||||
"""Apply anti-bot protection measures"""
|
||||
# Random delay to mimic human behavior
|
||||
delay = random.uniform(
|
||||
self.antibot_config.random_delay_min,
|
||||
self.antibot_config.random_delay_max
|
||||
)
|
||||
logger.debug(f"Applying random delay: {delay:.2f}s")
|
||||
await asyncio.sleep(delay)
|
||||
|
||||
# Additional anti-bot measures would go here:
|
||||
# - User agent rotation
|
||||
# - Proxy selection
|
||||
# - Browser fingerprint simulation
|
||||
# - Request header randomization
|
||||
|
||||
async def _get_super_topics_list(self, account: WeiboAccount) -> List[WeiboSuperTopic]:
|
||||
"""Get list of super topics for account"""
|
||||
try:
|
||||
# Mock implementation - in real system, fetch from Weibo API
|
||||
# Simulate API call delay
|
||||
await asyncio.sleep(1)
|
||||
|
||||
# Return mock super topics
|
||||
return [
|
||||
WeiboSuperTopic(
|
||||
id="topic_001",
|
||||
title="Python编程",
|
||||
url="https://weibo.com/p/100808xxx",
|
||||
is_signed=False,
|
||||
sign_url="https://weibo.com/p/aj/general/button",
|
||||
reward_exp=2,
|
||||
reward_credit=1
|
||||
),
|
||||
WeiboSuperTopic(
|
||||
id="topic_002",
|
||||
title="人工智能",
|
||||
url="https://weibo.com/p/100808yyy",
|
||||
is_signed=False,
|
||||
sign_url="https://weibo.com/p/aj/general/button",
|
||||
reward_exp=2,
|
||||
reward_credit=1
|
||||
),
|
||||
WeiboSuperTopic(
|
||||
id="topic_003",
|
||||
title="机器学习",
|
||||
url="https://weibo.com/p/100808zzz",
|
||||
is_signed=True, # Already signed
|
||||
sign_url="https://weibo.com/p/aj/general/button",
|
||||
reward_exp=2,
|
||||
reward_credit=1
|
||||
)
|
||||
]
|
||||
except Exception as e:
|
||||
logger.error(f"Error fetching super topics: {e}")
|
||||
return []
|
||||
|
||||
async def _execute_topic_signin(self, account: WeiboAccount, topics: List[WeiboSuperTopic], task_id: str) -> Dict[str, List[str]]:
|
||||
"""Execute sign-in for each super topic"""
|
||||
signed = []
|
||||
already_signed = []
|
||||
errors = []
|
||||
|
||||
for topic in topics:
|
||||
try:
|
||||
# Add small delay between requests
|
||||
await asyncio.sleep(random.uniform(0.5, 1.5))
|
||||
|
||||
if topic.is_signed:
|
||||
already_signed.append(topic.title)
|
||||
continue
|
||||
|
||||
# Execute signin for this topic
|
||||
success = await self.weibo_client.sign_super_topic(
|
||||
account=account,
|
||||
topic=topic,
|
||||
task_id=task_id
|
||||
)
|
||||
|
||||
if success:
|
||||
signed.append(topic.title)
|
||||
logger.info(f"✅ Successfully signed topic: {topic.title}")
|
||||
else:
|
||||
errors.append(f"Failed to sign topic: {topic.title}")
|
||||
|
||||
except Exception as e:
|
||||
error_msg = f"Error signing topic {topic.title}: {str(e)}"
|
||||
logger.error(error_msg)
|
||||
errors.append(error_msg)
|
||||
|
||||
return {
|
||||
"signed": signed,
|
||||
"already_signed": already_signed,
|
||||
"errors": errors
|
||||
}
|
||||
167
backend/signin_executor/app/services/weibo_client.py
Normal file
167
backend/signin_executor/app/services/weibo_client.py
Normal file
@@ -0,0 +1,167 @@
|
||||
"""
|
||||
Weibo API Client
|
||||
Handles all interactions with Weibo.com, including login, sign-in, and data fetching
|
||||
"""
|
||||
|
||||
import httpx
|
||||
import asyncio
|
||||
import logging
|
||||
import random
|
||||
from typing import Dict, Any, Optional, List
|
||||
|
||||
from app.config import settings
|
||||
from app.models.signin_models import WeiboAccount, WeiboSuperTopic
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class WeiboClient:
|
||||
"""Client for interacting with Weibo API"""
|
||||
|
||||
def __init__(self):
|
||||
self.base_headers = {
|
||||
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36",
|
||||
"Accept": "application/json, text/plain, */*",
|
||||
"Accept-Language": "en-US,en;q=0.9,zh-CN;q=0.8,zh;q=0.7",
|
||||
"Connection": "keep-alive",
|
||||
"Referer": "https://weibo.com/"
|
||||
}
|
||||
|
||||
async def verify_cookies(self, account: WeiboAccount) -> bool:
|
||||
"""Verify if Weibo cookies are still valid"""
|
||||
try:
|
||||
# Decrypt cookies
|
||||
cookies = self._decrypt_cookies(account.encrypted_cookies, account.iv)
|
||||
|
||||
async with httpx.AsyncClient(cookies=cookies, headers=self.base_headers) as client:
|
||||
response = await client.get("https://weibo.com/mygroups", follow_redirects=True)
|
||||
|
||||
if response.status_code == 200 and "我的首页" in response.text:
|
||||
logger.info(f"Cookies for account {account.weibo_user_id} are valid")
|
||||
return True
|
||||
else:
|
||||
logger.warning(f"Cookies for account {account.weibo_user_id} are invalid")
|
||||
return False
|
||||
except Exception as e:
|
||||
logger.error(f"Error verifying cookies: {e}")
|
||||
return False
|
||||
|
||||
async def get_super_topics(self, account: WeiboAccount) -> List[WeiboSuperTopic]:
|
||||
"""Get list of super topics for an account"""
|
||||
try:
|
||||
# Mock implementation - in real system, this would involve complex API calls
|
||||
# Simulate API call delay
|
||||
await asyncio.sleep(random.uniform(1.0, 2.0))
|
||||
|
||||
# Return mock data
|
||||
return [
|
||||
WeiboSuperTopic(id="topic_001", title="Python编程", url="...", is_signed=False),
|
||||
WeiboSuperTopic(id="topic_002", title="人工智能", url="...", is_signed=False),
|
||||
WeiboSuperTopic(id="topic_003", title="机器学习", url="...", is_signed=True)
|
||||
]
|
||||
except Exception as e:
|
||||
logger.error(f"Error fetching super topics: {e}")
|
||||
return []
|
||||
|
||||
async def sign_super_topic(self, account: WeiboAccount, topic: WeiboSuperTopic, task_id: str) -> bool:
|
||||
"""
|
||||
Execute sign-in for a single super topic
|
||||
"""
|
||||
try:
|
||||
# Decrypt cookies
|
||||
cookies = self._decrypt_cookies(account.encrypted_cookies, account.iv)
|
||||
|
||||
# Prepare request payload
|
||||
payload = {
|
||||
"ajwvr": "6",
|
||||
"api": "http://i.huati.weibo.com/aj/super/checkin",
|
||||
"id": topic.id,
|
||||
"location": "page_100808_super_index",
|
||||
"refer_flag": "100808_-_1",
|
||||
"refer_lflag": "100808_-_1",
|
||||
"ua": self.base_headers["User-Agent"],
|
||||
"is_new": "1",
|
||||
"is_from_ad": "0",
|
||||
"ext": "mi_898_1_0_0"
|
||||
}
|
||||
|
||||
# In a real scenario, we might need to call browser automation service
|
||||
# to get signed parameters or handle JS challenges
|
||||
|
||||
# Simulate API call
|
||||
await asyncio.sleep(random.uniform(0.5, 1.5))
|
||||
|
||||
# Mock response - assume success
|
||||
response_data = {
|
||||
"code": "100000",
|
||||
"msg": "签到成功",
|
||||
"data": {
|
||||
"tip": "签到成功",
|
||||
"alert_title": "签到成功",
|
||||
"alert_subtitle": "恭喜你成为今天第12345位签到的人",
|
||||
"reward": {"exp": 2, "credit": 1}
|
||||
}
|
||||
}
|
||||
|
||||
if response_data.get("code") == "100000":
|
||||
logger.info(f"Successfully signed topic: {topic.title}")
|
||||
return True
|
||||
elif response_data.get("code") == "382004":
|
||||
logger.info(f"Topic {topic.title} already signed today")
|
||||
return True # Treat as success
|
||||
else:
|
||||
logger.error(f"Failed to sign topic {topic.title}: {response_data.get('msg')}")
|
||||
return False
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Exception signing topic {topic.title}: {e}")
|
||||
return False
|
||||
|
||||
def _decrypt_cookies(self, encrypted_cookies: str, iv: str) -> Dict[str, str]:
|
||||
"""
|
||||
Decrypt cookies using AES-256-GCM
|
||||
In a real system, this would use a proper crypto library
|
||||
"""
|
||||
try:
|
||||
# Mock implementation - return dummy cookies
|
||||
return {
|
||||
"SUB": "_2A25z...",
|
||||
"SUBP": "0033Wr...",
|
||||
"ALF": "16...",
|
||||
"SSOLoginState": "16...",
|
||||
"SCF": "...",
|
||||
"UN": "testuser"
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to decrypt cookies: {e}")
|
||||
return {}
|
||||
|
||||
async def get_proxy(self) -> Optional[Dict[str, str]]:
|
||||
"""Get a proxy from the proxy pool service"""
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=5.0) as client:
|
||||
response = await client.get(f"{settings.PROXY_POOL_URL}/get")
|
||||
if response.status_code == 200:
|
||||
proxy_info = response.json()
|
||||
return {
|
||||
"http://": f"http://{proxy_info['proxy']}",
|
||||
"https://": f"https://{proxy_info['proxy']}"
|
||||
}
|
||||
else:
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get proxy: {e}")
|
||||
return None
|
||||
|
||||
async def get_browser_fingerprint(self) -> Dict[str, Any]:
|
||||
"""Get a browser fingerprint from the generator service"""
|
||||
try:
|
||||
# Mock implementation
|
||||
return {
|
||||
"user_agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36",
|
||||
"screen_resolution": "1920x1080",
|
||||
"timezone": "Asia/Shanghai",
|
||||
"plugins": ["PDF Viewer", "Chrome PDF Viewer", "Native Client"]
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get browser fingerprint: {e}")
|
||||
return {}
|
||||
23
backend/signin_executor/requirements.txt
Normal file
23
backend/signin_executor/requirements.txt
Normal file
@@ -0,0 +1,23 @@
|
||||
# Weibo-HotSign Sign-in Executor Service Requirements
|
||||
# Web Framework
|
||||
fastapi==0.104.1
|
||||
uvicorn[standard]==0.24.0
|
||||
|
||||
# Database
|
||||
sqlalchemy==2.0.23
|
||||
aiomysql==0.2.0
|
||||
PyMySQL==1.1.0
|
||||
redis==5.0.1
|
||||
|
||||
# Configuration
|
||||
pydantic-settings==2.0.3
|
||||
pydantic==2.5.0
|
||||
|
||||
# HTTP Client
|
||||
httpx==0.25.2
|
||||
|
||||
# Utilities
|
||||
python-dotenv==1.0.0
|
||||
|
||||
# Security (for cookie decryption)
|
||||
pycryptodome==3.19.0
|
||||
30
backend/task_scheduler/Dockerfile
Normal file
30
backend/task_scheduler/Dockerfile
Normal file
@@ -0,0 +1,30 @@
|
||||
# Weibo-HotSign Task Scheduler Service Dockerfile
|
||||
FROM python:3.11-slim
|
||||
|
||||
# Set working directory
|
||||
WORKDIR /app
|
||||
|
||||
# Install system dependencies
|
||||
RUN apt-get update && apt-get install -y \
|
||||
gcc \
|
||||
default-libmysqlclient-dev \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Copy requirements first for better caching
|
||||
COPY requirements.txt .
|
||||
|
||||
# Install Python dependencies
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
# Copy application code
|
||||
COPY app/ ./app/
|
||||
|
||||
# Create non-root user for security
|
||||
RUN groupadd -r appuser && useradd -r -g appuser appuser
|
||||
USER appuser
|
||||
|
||||
# Expose port (optional, as scheduler doesn't need external access)
|
||||
# EXPOSE 8000
|
||||
|
||||
# Start Celery Beat scheduler
|
||||
CMD ["celery", "-A", "app.celery_app", "beat", "--loglevel=info"]
|
||||
97
backend/task_scheduler/app/celery_app.py
Normal file
97
backend/task_scheduler/app/celery_app.py
Normal file
@@ -0,0 +1,97 @@
|
||||
"""
|
||||
Weibo-HotSign Task Scheduler Service
|
||||
Celery Beat configuration for scheduled sign-in tasks
|
||||
"""
|
||||
|
||||
import os
|
||||
from celery import Celery
|
||||
from celery.schedules import crontab
|
||||
from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from sqlalchemy import select
|
||||
import asyncio
|
||||
from datetime import datetime
|
||||
|
||||
from ..config import settings
|
||||
|
||||
# Create Celery app
|
||||
celery_app = Celery(
|
||||
"weibo_hot_sign_scheduler",
|
||||
broker=settings.CELERY_BROKER_URL,
|
||||
backend=settings.CELERY_RESULT_BACKEND,
|
||||
include=["app.tasks.signin_tasks"]
|
||||
)
|
||||
|
||||
# Celery configuration
|
||||
celery_app.conf.update(
|
||||
task_serializer="json",
|
||||
accept_content=["json"],
|
||||
result_serializer="json",
|
||||
timezone="Asia/Shanghai",
|
||||
enable_utc=True,
|
||||
beat_schedule_filename="celerybeat-schedule",
|
||||
beat_max_loop_interval=5,
|
||||
)
|
||||
|
||||
# Database configuration for task scheduler
|
||||
engine = create_async_engine(
|
||||
settings.DATABASE_URL,
|
||||
echo=settings.DEBUG,
|
||||
pool_size=10,
|
||||
max_overflow=20
|
||||
)
|
||||
|
||||
AsyncSessionLocal = sessionmaker(
|
||||
engine,
|
||||
class_=AsyncSession,
|
||||
expire_on_commit=False
|
||||
)
|
||||
|
||||
async def get_db():
|
||||
"""Get database session for task scheduler"""
|
||||
async with AsyncSessionLocal() as session:
|
||||
try:
|
||||
yield session
|
||||
finally:
|
||||
await session.close()
|
||||
|
||||
class TaskSchedulerService:
|
||||
"""Service to manage scheduled tasks from database"""
|
||||
|
||||
def __init__(self):
|
||||
self.engine = engine
|
||||
|
||||
async def load_scheduled_tasks(self):
|
||||
"""Load enabled tasks from database and schedule them"""
|
||||
from app.models.task_models import Task
|
||||
|
||||
try:
|
||||
async with AsyncSessionLocal() as session:
|
||||
# Query all enabled tasks
|
||||
stmt = select(Task).where(Task.is_enabled == True)
|
||||
result = await session.execute(stmt)
|
||||
tasks = result.scalars().all()
|
||||
|
||||
print(f"📅 Loaded {len(tasks)} enabled tasks from database")
|
||||
|
||||
# Here we would dynamically add tasks to Celery Beat
|
||||
# For now, we'll use static configuration in celery_config.py
|
||||
return tasks
|
||||
|
||||
except Exception as e:
|
||||
print(f"❌ Error loading tasks from database: {e}")
|
||||
return []
|
||||
|
||||
# Synchronous wrapper for async function
|
||||
def sync_load_tasks():
|
||||
"""Synchronous wrapper to load tasks"""
|
||||
service = TaskSchedulerService()
|
||||
loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(loop)
|
||||
try:
|
||||
return loop.run_until_complete(service.load_scheduled_tasks())
|
||||
finally:
|
||||
loop.close()
|
||||
|
||||
# Import task modules to register them
|
||||
from app.tasks import signin_tasks
|
||||
47
backend/task_scheduler/app/config.py
Normal file
47
backend/task_scheduler/app/config.py
Normal file
@@ -0,0 +1,47 @@
|
||||
"""
|
||||
Configuration for Task Scheduler Service
|
||||
"""
|
||||
|
||||
import os
|
||||
from pydantic_settings import BaseSettings
|
||||
from typing import List
|
||||
|
||||
class Settings(BaseSettings):
|
||||
"""Task Scheduler settings"""
|
||||
|
||||
# Database settings
|
||||
DATABASE_URL: str = os.getenv(
|
||||
"DATABASE_URL",
|
||||
"mysql+aiomysql://weibo:123456789@43.134.68.207/weibo"
|
||||
)
|
||||
|
||||
# Celery settings
|
||||
CELERY_BROKER_URL: str = os.getenv("CELERY_BROKER_URL", "redis://redis:6379/0")
|
||||
CELERY_RESULT_BACKEND: str = os.getenv("CELERY_RESULT_BACKEND", "redis://redis:6379/0")
|
||||
|
||||
# Task execution settings
|
||||
MAX_CONCURRENT_TASKS: int = int(os.getenv("MAX_CONCURRENT_TASKS", "10"))
|
||||
TASK_TIMEOUT_SECONDS: int = int(os.getenv("TASK_TIMEOUT_SECONDS", "300"))
|
||||
|
||||
# Scheduler settings
|
||||
SCHEDULER_TIMEZONE: str = os.getenv("SCHEDULER_TIMEZONE", "Asia/Shanghai")
|
||||
BEAT_SCHEDULE_FILE: str = os.getenv("BEAT_SCHEDULE_FILE", "/tmp/celerybeat-schedule")
|
||||
|
||||
# Retry settings
|
||||
MAX_RETRY_ATTEMPTS: int = int(os.getenv("MAX_RETRY_ATTEMPTS", "3"))
|
||||
RETRY_DELAY_SECONDS: int = int(os.getenv("RETRY_DELAY_SECONDS", "60"))
|
||||
|
||||
# Logging
|
||||
LOG_LEVEL: str = os.getenv("LOG_LEVEL", "INFO")
|
||||
DEBUG: bool = os.getenv("DEBUG", "False").lower() == "true"
|
||||
|
||||
# Service URLs
|
||||
SIGNIN_EXECUTOR_URL: str = os.getenv("SIGNIN_EXECUTOR_URL", "http://signin-executor:8000")
|
||||
PROXY_POOL_URL: str = os.getenv("PROXY_POOL_URL", "http://proxy-pool:8080")
|
||||
BROWSER_AUTOMATION_URL: str = os.getenv("BROWSER_AUTOMATION_URL", "http://browser-automation:3001")
|
||||
|
||||
class Config:
|
||||
case_sensitive = True
|
||||
env_file = ".env"
|
||||
|
||||
settings = Settings()
|
||||
196
backend/task_scheduler/app/tasks/signin_tasks.py
Normal file
196
backend/task_scheduler/app/tasks/signin_tasks.py
Normal file
@@ -0,0 +1,196 @@
|
||||
"""
|
||||
Weibo-HotSign Sign-in Task Definitions
|
||||
Celery tasks for scheduled sign-in operations
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import httpx
|
||||
import json
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from typing import Dict, Any, Optional
|
||||
|
||||
from celery import current_task
|
||||
from ..celery_app import celery_app
|
||||
from ..config import settings
|
||||
|
||||
# Configure logger
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@celery_app.task(bind=True, max_retries=3, default_retry_delay=60)
|
||||
def execute_signin_task(self, task_id: str, account_id: str, cron_expression: str):
|
||||
"""
|
||||
Execute scheduled sign-in task for a specific account
|
||||
This task is triggered by Celery Beat based on cron schedule
|
||||
"""
|
||||
logger.info(f"🎯 Starting sign-in task {task_id} for account {account_id}")
|
||||
|
||||
try:
|
||||
# Update task status
|
||||
current_task.update_state(
|
||||
state="PROGRESS",
|
||||
meta={
|
||||
"current": 10,
|
||||
"total": 100,
|
||||
"status": "Initializing sign-in process...",
|
||||
"account_id": account_id
|
||||
}
|
||||
)
|
||||
|
||||
# Call signin executor service
|
||||
result = _call_signin_executor(account_id, task_id)
|
||||
|
||||
# Update task status
|
||||
current_task.update_state(
|
||||
state="SUCCESS",
|
||||
meta={
|
||||
"current": 100,
|
||||
"total": 100,
|
||||
"status": "Sign-in completed successfully",
|
||||
"result": result,
|
||||
"account_id": account_id
|
||||
}
|
||||
)
|
||||
|
||||
logger.info(f"✅ Sign-in task {task_id} completed successfully for account {account_id}")
|
||||
return result
|
||||
|
||||
except Exception as exc:
|
||||
logger.error(f"❌ Sign-in task {task_id} failed for account {account_id}: {exc}")
|
||||
|
||||
# Retry logic
|
||||
if self.request.retries < settings.MAX_RETRY_ATTEMPTS:
|
||||
logger.info(f"🔄 Retrying task {task_id} (attempt {self.request.retries + 1})")
|
||||
raise self.retry(exc=exc, countdown=settings.RETRY_DELAY_SECONDS)
|
||||
|
||||
# Final failure
|
||||
current_task.update_state(
|
||||
state="FAILURE",
|
||||
meta={
|
||||
"current": 100,
|
||||
"total": 100,
|
||||
"status": f"Task failed after {settings.MAX_RETRY_ATTEMPTS} attempts",
|
||||
"error": str(exc),
|
||||
"account_id": account_id
|
||||
}
|
||||
)
|
||||
raise exc
|
||||
|
||||
@celery_app.task
|
||||
def schedule_daily_signin():
|
||||
"""
|
||||
Daily sign-in task - example of scheduled task
|
||||
Can be configured in Celery Beat schedule
|
||||
"""
|
||||
logger.info("📅 Executing daily sign-in schedule")
|
||||
|
||||
# This would typically query database for accounts that need daily sign-in
|
||||
# For demo purposes, we'll simulate processing multiple accounts
|
||||
|
||||
accounts = ["account_1", "account_2", "account_3"] # Mock account IDs
|
||||
results = []
|
||||
|
||||
for account_id in accounts:
|
||||
try:
|
||||
# Submit individual sign-in task for each account
|
||||
task = execute_signin_task.delay(
|
||||
task_id=f"daily_{datetime.now().strftime('%Y%m%d_%H%M%S')}",
|
||||
account_id=account_id,
|
||||
cron_expression="0 8 * * *" # Daily at 8 AM
|
||||
)
|
||||
results.append({
|
||||
"account_id": account_id,
|
||||
"task_id": task.id,
|
||||
"status": "submitted"
|
||||
})
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to submit task for account {account_id}: {e}")
|
||||
results.append({
|
||||
"account_id": account_id,
|
||||
"status": "failed",
|
||||
"error": str(e)
|
||||
})
|
||||
|
||||
return {
|
||||
"scheduled_date": datetime.now().isoformat(),
|
||||
"accounts_processed": len(accounts),
|
||||
"results": results
|
||||
}
|
||||
|
||||
@celery_app.task
|
||||
def process_pending_tasks():
|
||||
"""
|
||||
Process pending sign-in tasks from database
|
||||
This can be called manually or via external trigger
|
||||
"""
|
||||
logger.info("🔄 Processing pending sign-in tasks from database")
|
||||
|
||||
# In real implementation, this would:
|
||||
# 1. Query database for tasks that need to be executed
|
||||
# 2. Check if they're due based on cron expressions
|
||||
# 3. Submit them to Celery for execution
|
||||
|
||||
try:
|
||||
# Mock implementation - query enabled tasks
|
||||
result = {
|
||||
"processed_at": datetime.now().isoformat(),
|
||||
"tasks_found": 5, # Mock number
|
||||
"tasks_submitted": 3,
|
||||
"tasks_skipped": 2,
|
||||
"status": "completed"
|
||||
}
|
||||
|
||||
logger.info(f"✅ Processed pending tasks: {result}")
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Failed to process pending tasks: {e}")
|
||||
raise
|
||||
|
||||
def _call_signin_executor(account_id: str, task_id: str) -> Dict[str, Any]:
|
||||
"""
|
||||
Call the signin executor service to perform actual sign-in
|
||||
"""
|
||||
try:
|
||||
signin_data = {
|
||||
"task_id": task_id,
|
||||
"account_id": account_id,
|
||||
"timestamp": datetime.now().isoformat(),
|
||||
"requested_by": "task_scheduler"
|
||||
}
|
||||
|
||||
# Call signin executor service
|
||||
with httpx.Client(timeout=30.0) as client:
|
||||
response = client.post(
|
||||
f"{settings.SIGNIN_EXECUTOR_URL}/api/v1/signin/execute",
|
||||
json=signin_data
|
||||
)
|
||||
|
||||
if response.status_code == 200:
|
||||
result = response.json()
|
||||
logger.info(f"Sign-in executor response: {result}")
|
||||
return result
|
||||
else:
|
||||
raise Exception(f"Sign-in executor returned error: {response.status_code} - {response.text}")
|
||||
|
||||
except httpx.RequestError as e:
|
||||
logger.error(f"Network error calling signin executor: {e}")
|
||||
raise Exception(f"Failed to connect to signin executor: {e}")
|
||||
except Exception as e:
|
||||
logger.error(f"Error calling signin executor: {e}")
|
||||
raise
|
||||
|
||||
# Periodic task definitions for Celery Beat
|
||||
celery_app.conf.beat_schedule = {
|
||||
"daily-signin-at-8am": {
|
||||
"task": "app.tasks.signin_tasks.schedule_daily_signin",
|
||||
"schedule": {
|
||||
"hour": 8,
|
||||
"minute": 0,
|
||||
},
|
||||
},
|
||||
"process-pending-every-15-minutes": {
|
||||
"task": "app.tasks.signin_tasks.process_pending_tasks",
|
||||
"schedule": 900.0, # Every 15 minutes
|
||||
},
|
||||
}
|
||||
18
backend/task_scheduler/requirements.txt
Normal file
18
backend/task_scheduler/requirements.txt
Normal file
@@ -0,0 +1,18 @@
|
||||
# Weibo-HotSign Task Scheduler Service Requirements
|
||||
# Task Queue
|
||||
celery==5.3.6
|
||||
redis==5.0.1
|
||||
|
||||
# Database
|
||||
sqlalchemy==2.0.23
|
||||
aiomysql==0.2.0
|
||||
PyMySQL==1.1.0
|
||||
|
||||
# Configuration
|
||||
pydantic-settings==2.0.3
|
||||
|
||||
# HTTP Client
|
||||
httpx==0.25.2
|
||||
|
||||
# Utilities
|
||||
python-dotenv==1.0.0
|
||||
0
backend/tests/__init__.py
Normal file
0
backend/tests/__init__.py
Normal file
BIN
backend/tests/__pycache__/__init__.cpython-311.pyc
Normal file
BIN
backend/tests/__pycache__/__init__.cpython-311.pyc
Normal file
Binary file not shown.
BIN
backend/tests/__pycache__/conftest.cpython-311-pytest-8.3.3.pyc
Normal file
BIN
backend/tests/__pycache__/conftest.cpython-311-pytest-8.3.3.pyc
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
86
backend/tests/conftest.py
Normal file
86
backend/tests/conftest.py
Normal file
@@ -0,0 +1,86 @@
|
||||
"""
|
||||
Shared test fixtures for Weibo-HotSign backend tests.
|
||||
|
||||
Uses SQLite in-memory for database tests and a simple dict-based
|
||||
fake Redis for refresh-token tests, so no external services are needed.
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import sys
|
||||
import os
|
||||
from typing import AsyncGenerator
|
||||
|
||||
import pytest
|
||||
import pytest_asyncio
|
||||
from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
# Ensure backend/ is on sys.path so `shared` and `app` imports work
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), ".."))
|
||||
|
||||
# --------------- override shared settings BEFORE any app import ---------------
|
||||
os.environ["DATABASE_URL"] = "sqlite+aiosqlite://"
|
||||
os.environ["REDIS_URL"] = "redis://localhost:6379/0"
|
||||
os.environ["JWT_SECRET_KEY"] = "test-secret-key"
|
||||
os.environ["COOKIE_ENCRYPTION_KEY"] = "test-cookie-key"
|
||||
|
||||
# Create the test engine BEFORE importing shared.models so we can swap it in
|
||||
TEST_ENGINE = create_async_engine("sqlite+aiosqlite://", echo=False)
|
||||
TestSessionLocal = sessionmaker(TEST_ENGINE, class_=AsyncSession, expire_on_commit=False)
|
||||
|
||||
# Now patch shared.models.base module-level objects before they get used
|
||||
import shared.models.base as _base_mod # noqa: E402
|
||||
|
||||
_base_mod.engine = TEST_ENGINE
|
||||
_base_mod.AsyncSessionLocal = TestSessionLocal
|
||||
|
||||
from shared.models.base import Base # noqa: E402
|
||||
from shared.models import User # noqa: E402
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def event_loop():
|
||||
"""Create a single event loop for the whole test session."""
|
||||
loop = asyncio.new_event_loop()
|
||||
yield loop
|
||||
loop.close()
|
||||
|
||||
|
||||
@pytest_asyncio.fixture(autouse=True)
|
||||
async def setup_db():
|
||||
"""Create all tables before each test, drop after."""
|
||||
async with TEST_ENGINE.begin() as conn:
|
||||
await conn.run_sync(Base.metadata.create_all)
|
||||
yield
|
||||
async with TEST_ENGINE.begin() as conn:
|
||||
await conn.run_sync(Base.metadata.drop_all)
|
||||
|
||||
|
||||
@pytest_asyncio.fixture
|
||||
async def db_session() -> AsyncGenerator[AsyncSession, None]:
|
||||
"""Yield a fresh async DB session."""
|
||||
async with TestSessionLocal() as session:
|
||||
yield session
|
||||
|
||||
|
||||
# --------------- Fake Redis for refresh-token tests ---------------
|
||||
|
||||
class FakeRedis:
|
||||
"""Minimal async Redis stand-in backed by a plain dict."""
|
||||
|
||||
def __init__(self):
|
||||
self._store: dict[str, str] = {}
|
||||
|
||||
async def setex(self, key: str, ttl: int, value: str):
|
||||
self._store[key] = value
|
||||
|
||||
async def get(self, key: str):
|
||||
return self._store.get(key)
|
||||
|
||||
async def delete(self, key: str):
|
||||
self._store.pop(key, None)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def fake_redis():
|
||||
return FakeRedis()
|
||||
214
backend/tests/test_api_accounts.py
Normal file
214
backend/tests/test_api_accounts.py
Normal file
@@ -0,0 +1,214 @@
|
||||
"""
|
||||
Tests for api_service account CRUD endpoints.
|
||||
Validates tasks 4.1 and 4.2.
|
||||
"""
|
||||
|
||||
import pytest
|
||||
import pytest_asyncio
|
||||
from unittest.mock import patch
|
||||
from httpx import AsyncClient, ASGITransport
|
||||
|
||||
from shared.models import get_db
|
||||
from tests.conftest import TEST_ENGINE, TestSessionLocal, Base, FakeRedis
|
||||
|
||||
|
||||
@pytest_asyncio.fixture
|
||||
async def client():
|
||||
"""
|
||||
Provide an httpx AsyncClient wired to the api_service app,
|
||||
with DB overridden to test SQLite and a fake Redis for auth tokens.
|
||||
"""
|
||||
fake_redis = FakeRedis()
|
||||
|
||||
async with TEST_ENGINE.begin() as conn:
|
||||
await conn.run_sync(Base.metadata.create_all)
|
||||
|
||||
# Import apps after DB is ready
|
||||
from api_service.app.main import app as api_app
|
||||
from auth_service.app.main import app as auth_app
|
||||
|
||||
async def override_get_db():
|
||||
async with TestSessionLocal() as session:
|
||||
yield session
|
||||
|
||||
async def _fake_get_redis():
|
||||
return fake_redis
|
||||
|
||||
api_app.dependency_overrides[get_db] = override_get_db
|
||||
auth_app.dependency_overrides[get_db] = override_get_db
|
||||
|
||||
with patch(
|
||||
"auth_service.app.utils.security.get_redis",
|
||||
new=_fake_get_redis,
|
||||
):
|
||||
# We need both clients: auth for getting tokens, api for account ops
|
||||
async with AsyncClient(
|
||||
transport=ASGITransport(app=auth_app), base_url="http://auth"
|
||||
) as auth_client, AsyncClient(
|
||||
transport=ASGITransport(app=api_app), base_url="http://api"
|
||||
) as api_client:
|
||||
yield auth_client, api_client
|
||||
|
||||
api_app.dependency_overrides.clear()
|
||||
auth_app.dependency_overrides.clear()
|
||||
|
||||
|
||||
async def _register_and_login(auth_client: AsyncClient, suffix: str = "1") -> str:
|
||||
"""Helper: register a user and return an access token."""
|
||||
reg = await auth_client.post("/auth/register", json={
|
||||
"username": f"acctuser{suffix}",
|
||||
"email": f"acct{suffix}@example.com",
|
||||
"password": "Str0ng!Pass1",
|
||||
})
|
||||
assert reg.status_code == 201, f"Register failed: {reg.json()}"
|
||||
resp = await auth_client.post("/auth/login", json={
|
||||
"email": f"acct{suffix}@example.com",
|
||||
"password": "Str0ng!Pass1",
|
||||
})
|
||||
login_body = resp.json()
|
||||
assert resp.status_code == 200, f"Login failed: {login_body}"
|
||||
# Handle both wrapped (success_response) and unwrapped token formats
|
||||
if "data" in login_body:
|
||||
return login_body["data"]["access_token"]
|
||||
return login_body["access_token"]
|
||||
|
||||
|
||||
def _auth_header(token: str) -> dict:
|
||||
return {"Authorization": f"Bearer {token}"}
|
||||
|
||||
|
||||
# ===================== Basic structure tests =====================
|
||||
|
||||
|
||||
class TestAPIServiceBase:
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_health(self, client):
|
||||
_, api = client
|
||||
resp = await api.get("/health")
|
||||
assert resp.status_code == 200
|
||||
assert resp.json()["success"] is True
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_root(self, client):
|
||||
_, api = client
|
||||
resp = await api.get("/")
|
||||
assert resp.status_code == 200
|
||||
assert "API Service" in resp.json()["data"]["service"]
|
||||
|
||||
|
||||
# ===================== Account CRUD tests =====================
|
||||
|
||||
|
||||
class TestAccountCRUD:
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_account(self, client):
|
||||
auth, api = client
|
||||
token = await _register_and_login(auth)
|
||||
|
||||
resp = await api.post("/api/v1/accounts", json={
|
||||
"weibo_user_id": "12345",
|
||||
"cookie": "SUB=abc; SUBP=xyz;",
|
||||
"remark": "test account",
|
||||
}, headers=_auth_header(token))
|
||||
|
||||
assert resp.status_code == 201
|
||||
body = resp.json()
|
||||
assert body["success"] is True
|
||||
assert body["data"]["weibo_user_id"] == "12345"
|
||||
assert body["data"]["status"] == "pending"
|
||||
assert body["data"]["remark"] == "test account"
|
||||
# Cookie plaintext must NOT appear in response
|
||||
assert "SUB=abc" not in str(body)
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_list_accounts(self, client):
|
||||
auth, api = client
|
||||
token = await _register_and_login(auth, "list")
|
||||
|
||||
# Create two accounts
|
||||
for i in range(2):
|
||||
await api.post("/api/v1/accounts", json={
|
||||
"weibo_user_id": f"uid{i}",
|
||||
"cookie": f"cookie{i}",
|
||||
}, headers=_auth_header(token))
|
||||
|
||||
resp = await api.get("/api/v1/accounts", headers=_auth_header(token))
|
||||
assert resp.status_code == 200
|
||||
data = resp.json()["data"]
|
||||
assert len(data) == 2
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_account_detail(self, client):
|
||||
auth, api = client
|
||||
token = await _register_and_login(auth, "detail")
|
||||
|
||||
create_resp = await api.post("/api/v1/accounts", json={
|
||||
"weibo_user_id": "99",
|
||||
"cookie": "c=1",
|
||||
"remark": "my remark",
|
||||
}, headers=_auth_header(token))
|
||||
account_id = create_resp.json()["data"]["id"]
|
||||
|
||||
resp = await api.get(f"/api/v1/accounts/{account_id}", headers=_auth_header(token))
|
||||
assert resp.status_code == 200
|
||||
assert resp.json()["data"]["remark"] == "my remark"
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_update_account_remark(self, client):
|
||||
auth, api = client
|
||||
token = await _register_and_login(auth, "upd")
|
||||
|
||||
create_resp = await api.post("/api/v1/accounts", json={
|
||||
"weibo_user_id": "55",
|
||||
"cookie": "c=old",
|
||||
}, headers=_auth_header(token))
|
||||
account_id = create_resp.json()["data"]["id"]
|
||||
|
||||
resp = await api.put(f"/api/v1/accounts/{account_id}", json={
|
||||
"remark": "updated remark",
|
||||
}, headers=_auth_header(token))
|
||||
assert resp.status_code == 200
|
||||
assert resp.json()["data"]["remark"] == "updated remark"
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_delete_account(self, client):
|
||||
auth, api = client
|
||||
token = await _register_and_login(auth, "del")
|
||||
|
||||
create_resp = await api.post("/api/v1/accounts", json={
|
||||
"weibo_user_id": "77",
|
||||
"cookie": "c=del",
|
||||
}, headers=_auth_header(token))
|
||||
account_id = create_resp.json()["data"]["id"]
|
||||
|
||||
resp = await api.delete(f"/api/v1/accounts/{account_id}", headers=_auth_header(token))
|
||||
assert resp.status_code == 200
|
||||
|
||||
# Verify it's gone
|
||||
resp2 = await api.get(f"/api/v1/accounts/{account_id}", headers=_auth_header(token))
|
||||
assert resp2.status_code == 404
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_access_other_users_account_forbidden(self, client):
|
||||
auth, api = client
|
||||
token_a = await _register_and_login(auth, "ownerA")
|
||||
token_b = await _register_and_login(auth, "ownerB")
|
||||
|
||||
# User A creates an account
|
||||
create_resp = await api.post("/api/v1/accounts", json={
|
||||
"weibo_user_id": "111",
|
||||
"cookie": "c=a",
|
||||
}, headers=_auth_header(token_a))
|
||||
account_id = create_resp.json()["data"]["id"]
|
||||
|
||||
# User B tries to access it
|
||||
resp = await api.get(f"/api/v1/accounts/{account_id}", headers=_auth_header(token_b))
|
||||
assert resp.status_code == 403
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_unauthenticated_request_rejected(self, client):
|
||||
_, api = client
|
||||
resp = await api.get("/api/v1/accounts")
|
||||
assert resp.status_code in (401, 403)
|
||||
238
backend/tests/test_api_signin_logs.py
Normal file
238
backend/tests/test_api_signin_logs.py
Normal file
@@ -0,0 +1,238 @@
|
||||
"""
|
||||
Tests for api_service signin log query endpoints.
|
||||
Validates task 6.1.
|
||||
"""
|
||||
|
||||
import pytest
|
||||
import pytest_asyncio
|
||||
from datetime import datetime, timedelta
|
||||
from unittest.mock import patch
|
||||
from httpx import AsyncClient, ASGITransport
|
||||
|
||||
from shared.models import get_db, Account, SigninLog
|
||||
from tests.conftest import TEST_ENGINE, TestSessionLocal, Base, FakeRedis
|
||||
|
||||
|
||||
@pytest_asyncio.fixture
|
||||
async def client():
|
||||
"""
|
||||
Provide an httpx AsyncClient wired to the api_service app,
|
||||
with DB overridden to test SQLite and a fake Redis for auth tokens.
|
||||
"""
|
||||
fake_redis = FakeRedis()
|
||||
|
||||
async with TEST_ENGINE.begin() as conn:
|
||||
await conn.run_sync(Base.metadata.create_all)
|
||||
|
||||
# Import apps after DB is ready
|
||||
from api_service.app.main import app as api_app
|
||||
from auth_service.app.main import app as auth_app
|
||||
|
||||
async def override_get_db():
|
||||
async with TestSessionLocal() as session:
|
||||
yield session
|
||||
|
||||
async def _fake_get_redis():
|
||||
return fake_redis
|
||||
|
||||
api_app.dependency_overrides[get_db] = override_get_db
|
||||
auth_app.dependency_overrides[get_db] = override_get_db
|
||||
|
||||
with patch(
|
||||
"auth_service.app.utils.security.get_redis",
|
||||
new=_fake_get_redis,
|
||||
):
|
||||
async with AsyncClient(
|
||||
transport=ASGITransport(app=auth_app), base_url="http://auth"
|
||||
) as auth_client, AsyncClient(
|
||||
transport=ASGITransport(app=api_app), base_url="http://api"
|
||||
) as api_client:
|
||||
yield auth_client, api_client
|
||||
|
||||
api_app.dependency_overrides.clear()
|
||||
auth_app.dependency_overrides.clear()
|
||||
|
||||
|
||||
async def _register_and_login(auth_client: AsyncClient, suffix: str = "1") -> str:
|
||||
"""Helper: register a user and return an access token."""
|
||||
reg = await auth_client.post("/auth/register", json={
|
||||
"username": f"loguser{suffix}",
|
||||
"email": f"log{suffix}@example.com",
|
||||
"password": "Str0ng!Pass1",
|
||||
})
|
||||
assert reg.status_code == 201
|
||||
resp = await auth_client.post("/auth/login", json={
|
||||
"email": f"log{suffix}@example.com",
|
||||
"password": "Str0ng!Pass1",
|
||||
})
|
||||
login_body = resp.json()
|
||||
assert resp.status_code == 200
|
||||
if "data" in login_body:
|
||||
return login_body["data"]["access_token"]
|
||||
return login_body["access_token"]
|
||||
|
||||
|
||||
def _auth_header(token: str) -> dict:
|
||||
return {"Authorization": f"Bearer {token}"}
|
||||
|
||||
|
||||
async def _create_account(api: AsyncClient, token: str, weibo_id: str) -> str:
|
||||
"""Helper: create an account and return its ID."""
|
||||
resp = await api.post("/api/v1/accounts", json={
|
||||
"weibo_user_id": weibo_id,
|
||||
"cookie": f"cookie_{weibo_id}",
|
||||
}, headers=_auth_header(token))
|
||||
assert resp.status_code == 201
|
||||
return resp.json()["data"]["id"]
|
||||
|
||||
|
||||
async def _create_signin_logs(db, account_id: str, count: int, statuses: list = None):
|
||||
"""Helper: create signin logs for testing."""
|
||||
if statuses is None:
|
||||
statuses = ["success"] * count
|
||||
|
||||
base_time = datetime.utcnow()
|
||||
for i in range(count):
|
||||
log = SigninLog(
|
||||
account_id=account_id,
|
||||
topic_title=f"Topic {i}",
|
||||
status=statuses[i] if i < len(statuses) else "success",
|
||||
signed_at=base_time - timedelta(hours=i), # Descending order
|
||||
)
|
||||
db.add(log)
|
||||
await db.commit()
|
||||
|
||||
|
||||
# ===================== Signin Log Query Tests =====================
|
||||
|
||||
|
||||
class TestSigninLogQuery:
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_signin_logs_empty(self, client):
|
||||
"""Test querying logs for an account with no logs."""
|
||||
auth, api = client
|
||||
token = await _register_and_login(auth, "empty")
|
||||
account_id = await _create_account(api, token, "empty_acc")
|
||||
|
||||
resp = await api.get(
|
||||
f"/api/v1/accounts/{account_id}/signin-logs",
|
||||
headers=_auth_header(token)
|
||||
)
|
||||
assert resp.status_code == 200
|
||||
data = resp.json()["data"]
|
||||
assert data["total"] == 0
|
||||
assert len(data["items"]) == 0
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_signin_logs_with_data(self, client):
|
||||
"""Test querying logs returns data in descending order."""
|
||||
auth, api = client
|
||||
token = await _register_and_login(auth, "data")
|
||||
account_id = await _create_account(api, token, "data_acc")
|
||||
|
||||
# Create logs directly in DB
|
||||
async with TestSessionLocal() as db:
|
||||
await _create_signin_logs(db, account_id, 5)
|
||||
|
||||
resp = await api.get(
|
||||
f"/api/v1/accounts/{account_id}/signin-logs",
|
||||
headers=_auth_header(token)
|
||||
)
|
||||
assert resp.status_code == 200
|
||||
data = resp.json()["data"]
|
||||
assert data["total"] == 5
|
||||
assert len(data["items"]) == 5
|
||||
|
||||
# Verify descending order by signed_at
|
||||
items = data["items"]
|
||||
for i in range(len(items) - 1):
|
||||
assert items[i]["signed_at"] >= items[i + 1]["signed_at"]
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_signin_logs_pagination(self, client):
|
||||
"""Test pagination works correctly."""
|
||||
auth, api = client
|
||||
token = await _register_and_login(auth, "page")
|
||||
account_id = await _create_account(api, token, "page_acc")
|
||||
|
||||
# Create 10 logs
|
||||
async with TestSessionLocal() as db:
|
||||
await _create_signin_logs(db, account_id, 10)
|
||||
|
||||
# Page 1, size 3
|
||||
resp = await api.get(
|
||||
f"/api/v1/accounts/{account_id}/signin-logs?page=1&size=3",
|
||||
headers=_auth_header(token)
|
||||
)
|
||||
assert resp.status_code == 200
|
||||
data = resp.json()["data"]
|
||||
assert data["total"] == 10
|
||||
assert len(data["items"]) == 3
|
||||
assert data["page"] == 1
|
||||
assert data["size"] == 3
|
||||
assert data["total_pages"] == 4
|
||||
|
||||
# Page 2, size 3
|
||||
resp2 = await api.get(
|
||||
f"/api/v1/accounts/{account_id}/signin-logs?page=2&size=3",
|
||||
headers=_auth_header(token)
|
||||
)
|
||||
data2 = resp2.json()["data"]
|
||||
assert len(data2["items"]) == 3
|
||||
assert data2["page"] == 2
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_signin_logs_status_filter(self, client):
|
||||
"""Test status filtering works correctly."""
|
||||
auth, api = client
|
||||
token = await _register_and_login(auth, "filter")
|
||||
account_id = await _create_account(api, token, "filter_acc")
|
||||
|
||||
# Create logs with different statuses
|
||||
async with TestSessionLocal() as db:
|
||||
statuses = ["success", "success", "failed_network", "success", "failed_already_signed"]
|
||||
await _create_signin_logs(db, account_id, 5, statuses)
|
||||
|
||||
# Filter by success
|
||||
resp = await api.get(
|
||||
f"/api/v1/accounts/{account_id}/signin-logs?status=success",
|
||||
headers=_auth_header(token)
|
||||
)
|
||||
assert resp.status_code == 200
|
||||
data = resp.json()["data"]
|
||||
assert data["total"] == 3
|
||||
assert all(item["status"] == "success" for item in data["items"])
|
||||
|
||||
# Filter by failed_network
|
||||
resp2 = await api.get(
|
||||
f"/api/v1/accounts/{account_id}/signin-logs?status=failed_network",
|
||||
headers=_auth_header(token)
|
||||
)
|
||||
data2 = resp2.json()["data"]
|
||||
assert data2["total"] == 1
|
||||
assert data2["items"][0]["status"] == "failed_network"
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_access_other_users_logs_forbidden(self, client):
|
||||
"""Test that users cannot access other users' signin logs."""
|
||||
auth, api = client
|
||||
token_a = await _register_and_login(auth, "logA")
|
||||
token_b = await _register_and_login(auth, "logB")
|
||||
|
||||
# User A creates an account
|
||||
account_id = await _create_account(api, token_a, "logA_acc")
|
||||
|
||||
# User B tries to access logs
|
||||
resp = await api.get(
|
||||
f"/api/v1/accounts/{account_id}/signin-logs",
|
||||
headers=_auth_header(token_b)
|
||||
)
|
||||
assert resp.status_code == 403
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_unauthenticated_logs_request_rejected(self, client):
|
||||
"""Test that unauthenticated requests are rejected."""
|
||||
_, api = client
|
||||
resp = await api.get("/api/v1/accounts/fake-id/signin-logs")
|
||||
assert resp.status_code in (401, 403)
|
||||
226
backend/tests/test_api_tasks.py
Normal file
226
backend/tests/test_api_tasks.py
Normal file
@@ -0,0 +1,226 @@
|
||||
"""
|
||||
Tests for API_Service task management endpoints.
|
||||
"""
|
||||
|
||||
import pytest
|
||||
import pytest_asyncio
|
||||
from httpx import AsyncClient
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from shared.models import User, Account, Task
|
||||
from auth_service.app.utils.security import create_access_token
|
||||
from shared.crypto import encrypt_cookie, derive_key
|
||||
|
||||
|
||||
@pytest_asyncio.fixture
|
||||
async def test_user(db_session: AsyncSession) -> User:
|
||||
"""Create a test user."""
|
||||
user = User(
|
||||
username="testuser",
|
||||
email="test@example.com",
|
||||
hashed_password="hashed_password",
|
||||
)
|
||||
db_session.add(user)
|
||||
await db_session.commit()
|
||||
await db_session.refresh(user)
|
||||
return user
|
||||
|
||||
|
||||
@pytest_asyncio.fixture
|
||||
async def test_account(db_session: AsyncSession, test_user: User) -> Account:
|
||||
"""Create a test account."""
|
||||
key = derive_key("test-cookie-key")
|
||||
ciphertext, iv = encrypt_cookie("test_cookie_data", key)
|
||||
|
||||
account = Account(
|
||||
user_id=test_user.id,
|
||||
weibo_user_id="123456",
|
||||
remark="Test Account",
|
||||
encrypted_cookies=ciphertext,
|
||||
iv=iv,
|
||||
status="pending",
|
||||
)
|
||||
db_session.add(account)
|
||||
await db_session.commit()
|
||||
await db_session.refresh(account)
|
||||
return account
|
||||
|
||||
|
||||
@pytest_asyncio.fixture
|
||||
async def auth_headers(test_user: User) -> dict:
|
||||
"""Generate JWT auth headers for test user."""
|
||||
token = create_access_token({"sub": test_user.id})
|
||||
return {"Authorization": f"Bearer {token}"}
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_task_valid_cron(
|
||||
db_session: AsyncSession,
|
||||
test_user: User,
|
||||
test_account: Account,
|
||||
auth_headers: dict,
|
||||
):
|
||||
"""Test creating a task with valid cron expression."""
|
||||
from api_service.app.main import app
|
||||
|
||||
async with AsyncClient(app=app, base_url="http://test") as client:
|
||||
response = await client.post(
|
||||
f"/api/v1/accounts/{test_account.id}/tasks",
|
||||
json={"cron_expression": "0 9 * * *"},
|
||||
headers=auth_headers,
|
||||
)
|
||||
|
||||
assert response.status_code == 201
|
||||
data = response.json()
|
||||
assert data["success"] is True
|
||||
assert data["data"]["cron_expression"] == "0 9 * * *"
|
||||
assert data["data"]["is_enabled"] is True
|
||||
assert data["data"]["account_id"] == test_account.id
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_task_invalid_cron(
|
||||
db_session: AsyncSession,
|
||||
test_user: User,
|
||||
test_account: Account,
|
||||
auth_headers: dict,
|
||||
):
|
||||
"""Test creating a task with invalid cron expression."""
|
||||
from api_service.app.main import app
|
||||
|
||||
async with AsyncClient(app=app, base_url="http://test") as client:
|
||||
response = await client.post(
|
||||
f"/api/v1/accounts/{test_account.id}/tasks",
|
||||
json={"cron_expression": "invalid cron"},
|
||||
headers=auth_headers,
|
||||
)
|
||||
|
||||
assert response.status_code == 400
|
||||
data = response.json()
|
||||
assert data["success"] is False
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_list_tasks(
|
||||
db_session: AsyncSession,
|
||||
test_user: User,
|
||||
test_account: Account,
|
||||
auth_headers: dict,
|
||||
):
|
||||
"""Test listing tasks for an account."""
|
||||
# Create two tasks
|
||||
task1 = Task(account_id=test_account.id, cron_expression="0 9 * * *", is_enabled=True)
|
||||
task2 = Task(account_id=test_account.id, cron_expression="0 18 * * *", is_enabled=False)
|
||||
db_session.add_all([task1, task2])
|
||||
await db_session.commit()
|
||||
|
||||
from api_service.app.main import app
|
||||
|
||||
async with AsyncClient(app=app, base_url="http://test") as client:
|
||||
response = await client.get(
|
||||
f"/api/v1/accounts/{test_account.id}/tasks",
|
||||
headers=auth_headers,
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["success"] is True
|
||||
assert len(data["data"]) == 2
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_update_task(
|
||||
db_session: AsyncSession,
|
||||
test_user: User,
|
||||
test_account: Account,
|
||||
auth_headers: dict,
|
||||
):
|
||||
"""Test updating a task (enable/disable)."""
|
||||
task = Task(account_id=test_account.id, cron_expression="0 9 * * *", is_enabled=True)
|
||||
db_session.add(task)
|
||||
await db_session.commit()
|
||||
await db_session.refresh(task)
|
||||
|
||||
from api_service.app.main import app
|
||||
|
||||
async with AsyncClient(app=app, base_url="http://test") as client:
|
||||
response = await client.put(
|
||||
f"/api/v1/tasks/{task.id}",
|
||||
json={"is_enabled": False},
|
||||
headers=auth_headers,
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["success"] is True
|
||||
assert data["data"]["is_enabled"] is False
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_delete_task(
|
||||
db_session: AsyncSession,
|
||||
test_user: User,
|
||||
test_account: Account,
|
||||
auth_headers: dict,
|
||||
):
|
||||
"""Test deleting a task."""
|
||||
task = Task(account_id=test_account.id, cron_expression="0 9 * * *", is_enabled=True)
|
||||
db_session.add(task)
|
||||
await db_session.commit()
|
||||
await db_session.refresh(task)
|
||||
|
||||
from api_service.app.main import app
|
||||
|
||||
async with AsyncClient(app=app, base_url="http://test") as client:
|
||||
response = await client.delete(
|
||||
f"/api/v1/tasks/{task.id}",
|
||||
headers=auth_headers,
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["success"] is True
|
||||
|
||||
# Verify task is deleted
|
||||
from sqlalchemy import select
|
||||
result = await db_session.execute(select(Task).where(Task.id == task.id))
|
||||
deleted_task = result.scalar_one_or_none()
|
||||
assert deleted_task is None
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_access_other_user_task_forbidden(
|
||||
db_session: AsyncSession,
|
||||
test_account: Account,
|
||||
):
|
||||
"""Test that users cannot access tasks from other users' accounts."""
|
||||
# Create another user
|
||||
other_user = User(
|
||||
username="otheruser",
|
||||
email="other@example.com",
|
||||
hashed_password="hashed_password",
|
||||
)
|
||||
db_session.add(other_user)
|
||||
await db_session.commit()
|
||||
await db_session.refresh(other_user)
|
||||
|
||||
# Create a task for test_account
|
||||
task = Task(account_id=test_account.id, cron_expression="0 9 * * *", is_enabled=True)
|
||||
db_session.add(task)
|
||||
await db_session.commit()
|
||||
await db_session.refresh(task)
|
||||
|
||||
# Try to access with other_user's token
|
||||
other_token = create_access_token({"sub": other_user.id})
|
||||
other_headers = {"Authorization": f"Bearer {other_token}"}
|
||||
|
||||
from api_service.app.main import app
|
||||
|
||||
async with AsyncClient(app=app, base_url="http://test") as client:
|
||||
response = await client.put(
|
||||
f"/api/v1/tasks/{task.id}",
|
||||
json={"is_enabled": False},
|
||||
headers=other_headers,
|
||||
)
|
||||
|
||||
assert response.status_code == 403
|
||||
317
backend/tests/test_auth_service.py
Normal file
317
backend/tests/test_auth_service.py
Normal file
@@ -0,0 +1,317 @@
|
||||
"""
|
||||
Tests for auth_service: security utils, AuthService logic, and API endpoints.
|
||||
Validates tasks 2.1 – 2.3.
|
||||
"""
|
||||
|
||||
import pytest
|
||||
import pytest_asyncio
|
||||
from unittest.mock import patch, AsyncMock
|
||||
from fastapi import HTTPException
|
||||
|
||||
from shared.models import User
|
||||
from tests.conftest import TestSessionLocal, FakeRedis
|
||||
|
||||
# Import security utilities
|
||||
from auth_service.app.utils.security import (
|
||||
hash_password,
|
||||
verify_password,
|
||||
validate_password_strength,
|
||||
create_access_token,
|
||||
decode_access_token,
|
||||
)
|
||||
from auth_service.app.services.auth_service import AuthService
|
||||
from auth_service.app.schemas.user import UserCreate, UserLogin
|
||||
|
||||
|
||||
# ===================== Password utilities =====================
|
||||
|
||||
|
||||
class TestPasswordUtils:
|
||||
|
||||
def test_hash_and_verify(self):
|
||||
raw = "MyP@ssw0rd"
|
||||
hashed = hash_password(raw)
|
||||
assert verify_password(raw, hashed)
|
||||
|
||||
def test_wrong_password_rejected(self):
|
||||
hashed = hash_password("Correct1!")
|
||||
assert not verify_password("Wrong1!", hashed)
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"pwd, expected_valid",
|
||||
[
|
||||
("Ab1!abcd", True), # meets all criteria
|
||||
("short1A!", True), # 8 chars, has upper/lower/digit/special – valid
|
||||
("alllower1!", False), # no uppercase
|
||||
("ALLUPPER1!", False), # no lowercase
|
||||
("NoDigits!Aa", False), # no digit
|
||||
("NoSpecial1a", False), # no special char
|
||||
],
|
||||
)
|
||||
def test_password_strength(self, pwd, expected_valid):
|
||||
is_valid, _ = validate_password_strength(pwd)
|
||||
assert is_valid == expected_valid
|
||||
|
||||
def test_password_too_short(self):
|
||||
is_valid, msg = validate_password_strength("Ab1!")
|
||||
assert not is_valid
|
||||
assert "8 characters" in msg
|
||||
|
||||
|
||||
# ===================== JWT utilities =====================
|
||||
|
||||
|
||||
class TestJWT:
|
||||
|
||||
def test_create_and_decode(self):
|
||||
token = create_access_token({"sub": "user-123", "username": "alice"})
|
||||
payload = decode_access_token(token)
|
||||
assert payload is not None
|
||||
assert payload["sub"] == "user-123"
|
||||
|
||||
def test_invalid_token_returns_none(self):
|
||||
assert decode_access_token("not.a.valid.token") is None
|
||||
|
||||
|
||||
# ===================== Refresh token helpers (with fake Redis) =====================
|
||||
|
||||
|
||||
class TestRefreshToken:
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_verify_revoke(self, fake_redis):
|
||||
"""Full lifecycle: create → verify → revoke → verify again returns None."""
|
||||
|
||||
async def _fake_get_redis():
|
||||
return fake_redis
|
||||
|
||||
with patch(
|
||||
"auth_service.app.utils.security.get_redis",
|
||||
new=_fake_get_redis,
|
||||
):
|
||||
from auth_service.app.utils.security import (
|
||||
create_refresh_token,
|
||||
verify_refresh_token,
|
||||
revoke_refresh_token,
|
||||
)
|
||||
|
||||
token = await create_refresh_token("user-42")
|
||||
assert isinstance(token, str) and len(token) > 0
|
||||
|
||||
uid = await verify_refresh_token(token)
|
||||
assert uid == "user-42"
|
||||
|
||||
await revoke_refresh_token(token)
|
||||
assert await verify_refresh_token(token) is None
|
||||
|
||||
|
||||
# ===================== AuthService business logic =====================
|
||||
|
||||
|
||||
class TestAuthServiceLogic:
|
||||
|
||||
@pytest_asyncio.fixture
|
||||
async def auth_svc(self, db_session):
|
||||
return AuthService(db_session)
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_user_success(self, auth_svc, db_session):
|
||||
data = UserCreate(username="newuser", email="new@example.com", password="Str0ng!Pass")
|
||||
user = await auth_svc.create_user(data)
|
||||
assert user.username == "newuser"
|
||||
assert user.email == "new@example.com"
|
||||
assert user.hashed_password != "Str0ng!Pass"
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_user_weak_password_rejected(self, auth_svc):
|
||||
# Use a password that passes Pydantic min_length=8 but fails strength check
|
||||
data = UserCreate(username="weakuser", email="weak@example.com", password="weakpassword")
|
||||
with pytest.raises(HTTPException) as exc_info:
|
||||
await auth_svc.create_user(data)
|
||||
assert exc_info.value.status_code == 400
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_user_by_email(self, auth_svc, db_session):
|
||||
data = UserCreate(username="findme", email="find@example.com", password="Str0ng!Pass")
|
||||
await auth_svc.create_user(data)
|
||||
found = await auth_svc.get_user_by_email("find@example.com")
|
||||
assert found is not None
|
||||
assert found.username == "findme"
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_check_user_exists(self, auth_svc, db_session):
|
||||
data = UserCreate(username="exists", email="exists@example.com", password="Str0ng!Pass")
|
||||
await auth_svc.create_user(data)
|
||||
email_u, username_u = await auth_svc.check_user_exists("exists@example.com", "other")
|
||||
assert email_u is not None
|
||||
assert username_u is None
|
||||
|
||||
|
||||
# ===================== Auth API endpoint tests =====================
|
||||
|
||||
|
||||
class TestAuthAPI:
|
||||
"""Integration tests hitting the FastAPI app via httpx."""
|
||||
|
||||
@pytest_asyncio.fixture
|
||||
async def client(self, fake_redis):
|
||||
"""
|
||||
Provide an httpx AsyncClient wired to the auth_service app,
|
||||
with DB session overridden to use the test SQLite engine.
|
||||
"""
|
||||
from shared.models import get_db
|
||||
from auth_service.app.main import app
|
||||
from httpx import AsyncClient, ASGITransport
|
||||
from tests.conftest import TEST_ENGINE, TestSessionLocal, Base
|
||||
|
||||
async with TEST_ENGINE.begin() as conn:
|
||||
await conn.run_sync(Base.metadata.create_all)
|
||||
|
||||
async def override_get_db():
|
||||
async with TestSessionLocal() as session:
|
||||
yield session
|
||||
|
||||
async def _fake_get_redis():
|
||||
return fake_redis
|
||||
|
||||
app.dependency_overrides[get_db] = override_get_db
|
||||
|
||||
with patch(
|
||||
"auth_service.app.utils.security.get_redis",
|
||||
new=_fake_get_redis,
|
||||
):
|
||||
async with AsyncClient(
|
||||
transport=ASGITransport(app=app), base_url="http://test"
|
||||
) as ac:
|
||||
yield ac
|
||||
|
||||
app.dependency_overrides.clear()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_register_and_login(self, client):
|
||||
# Register
|
||||
resp = await client.post("/auth/register", json={
|
||||
"username": "apiuser",
|
||||
"email": "api@example.com",
|
||||
"password": "Str0ng!Pass1",
|
||||
})
|
||||
assert resp.status_code == 201
|
||||
body = resp.json()
|
||||
assert body["success"] is True
|
||||
assert body["data"]["username"] == "apiuser"
|
||||
|
||||
# Login
|
||||
resp = await client.post("/auth/login", json={
|
||||
"email": "api@example.com",
|
||||
"password": "Str0ng!Pass1",
|
||||
})
|
||||
assert resp.status_code == 200
|
||||
body = resp.json()
|
||||
assert body["success"] is True
|
||||
assert "access_token" in body["data"]
|
||||
assert "refresh_token" in body["data"]
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_login_wrong_password(self, client):
|
||||
await client.post("/auth/register", json={
|
||||
"username": "wrongpw",
|
||||
"email": "wrongpw@example.com",
|
||||
"password": "Str0ng!Pass1",
|
||||
})
|
||||
resp = await client.post("/auth/login", json={
|
||||
"email": "wrongpw@example.com",
|
||||
"password": "WrongPassword1!",
|
||||
})
|
||||
assert resp.status_code == 401
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_register_duplicate_email(self, client):
|
||||
await client.post("/auth/register", json={
|
||||
"username": "dup1",
|
||||
"email": "dup@example.com",
|
||||
"password": "Str0ng!Pass1",
|
||||
})
|
||||
resp = await client.post("/auth/register", json={
|
||||
"username": "dup2",
|
||||
"email": "dup@example.com",
|
||||
"password": "Str0ng!Pass1",
|
||||
})
|
||||
assert resp.status_code == 409
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_register_weak_password(self, client):
|
||||
resp = await client.post("/auth/register", json={
|
||||
"username": "weakpwd",
|
||||
"email": "weakpwd@example.com",
|
||||
"password": "weakpassword",
|
||||
})
|
||||
assert resp.status_code == 400
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_me_endpoint(self, client):
|
||||
await client.post("/auth/register", json={
|
||||
"username": "meuser",
|
||||
"email": "me@example.com",
|
||||
"password": "Str0ng!Pass1",
|
||||
})
|
||||
login_resp = await client.post("/auth/login", json={
|
||||
"email": "me@example.com",
|
||||
"password": "Str0ng!Pass1",
|
||||
})
|
||||
token = login_resp.json()["data"]["access_token"]
|
||||
|
||||
resp = await client.get(
|
||||
"/auth/me",
|
||||
headers={"Authorization": f"Bearer {token}"},
|
||||
)
|
||||
assert resp.status_code == 200
|
||||
body = resp.json()
|
||||
assert body["data"]["username"] == "meuser"
|
||||
assert body["data"]["email"] == "me@example.com"
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_refresh_endpoint(self, client):
|
||||
await client.post("/auth/register", json={
|
||||
"username": "refreshuser",
|
||||
"email": "refresh@example.com",
|
||||
"password": "Str0ng!Pass1",
|
||||
})
|
||||
login_resp = await client.post("/auth/login", json={
|
||||
"email": "refresh@example.com",
|
||||
"password": "Str0ng!Pass1",
|
||||
})
|
||||
refresh_token = login_resp.json()["data"]["refresh_token"]
|
||||
|
||||
# Refresh
|
||||
resp = await client.post("/auth/refresh", json={
|
||||
"refresh_token": refresh_token,
|
||||
})
|
||||
assert resp.status_code == 200
|
||||
body = resp.json()
|
||||
assert body["success"] is True
|
||||
assert "access_token" in body["data"]
|
||||
new_refresh = body["data"]["refresh_token"]
|
||||
assert new_refresh != refresh_token # rotation
|
||||
|
||||
# Old token should be revoked
|
||||
resp2 = await client.post("/auth/refresh", json={
|
||||
"refresh_token": refresh_token,
|
||||
})
|
||||
assert resp2.status_code == 401
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_me_without_token(self, client):
|
||||
resp = await client.get("/auth/me")
|
||||
assert resp.status_code in (401, 403)
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_unified_error_format(self, client):
|
||||
"""Verify error responses follow the unified format."""
|
||||
resp = await client.post("/auth/login", json={
|
||||
"email": "nobody@example.com",
|
||||
"password": "Whatever1!",
|
||||
})
|
||||
body = resp.json()
|
||||
assert body["success"] is False
|
||||
assert body["data"] is None
|
||||
assert "error" in body
|
||||
171
backend/tests/test_shared.py
Normal file
171
backend/tests/test_shared.py
Normal file
@@ -0,0 +1,171 @@
|
||||
"""
|
||||
Tests for the shared module: crypto, response format, and ORM models.
|
||||
Validates tasks 1.1 – 1.5 (excluding optional PBT task 1.4).
|
||||
"""
|
||||
|
||||
import pytest
|
||||
import pytest_asyncio
|
||||
from sqlalchemy import select
|
||||
|
||||
from shared.crypto import derive_key, encrypt_cookie, decrypt_cookie
|
||||
from shared.response import success_response, error_response
|
||||
from shared.models import User, Account, Task, SigninLog
|
||||
|
||||
from tests.conftest import TestSessionLocal
|
||||
|
||||
|
||||
# ===================== Crypto tests =====================
|
||||
|
||||
|
||||
class TestCrypto:
|
||||
"""Verify AES-256-GCM encrypt/decrypt round-trip and error handling."""
|
||||
|
||||
def setup_method(self):
|
||||
self.key = derive_key("test-encryption-key")
|
||||
|
||||
def test_encrypt_decrypt_roundtrip(self):
|
||||
original = "SUB=abc123; SUBP=xyz789;"
|
||||
ct, iv = encrypt_cookie(original, self.key)
|
||||
assert decrypt_cookie(ct, iv, self.key) == original
|
||||
|
||||
def test_different_plaintexts_produce_different_ciphertexts(self):
|
||||
ct1, _ = encrypt_cookie("cookie_a", self.key)
|
||||
ct2, _ = encrypt_cookie("cookie_b", self.key)
|
||||
assert ct1 != ct2
|
||||
|
||||
def test_wrong_key_raises(self):
|
||||
ct, iv = encrypt_cookie("secret", self.key)
|
||||
wrong_key = derive_key("wrong-key")
|
||||
with pytest.raises(Exception):
|
||||
decrypt_cookie(ct, iv, wrong_key)
|
||||
|
||||
def test_empty_string_roundtrip(self):
|
||||
ct, iv = encrypt_cookie("", self.key)
|
||||
assert decrypt_cookie(ct, iv, self.key) == ""
|
||||
|
||||
def test_unicode_roundtrip(self):
|
||||
original = "微博Cookie=值; 中文=测试"
|
||||
ct, iv = encrypt_cookie(original, self.key)
|
||||
assert decrypt_cookie(ct, iv, self.key) == original
|
||||
|
||||
|
||||
# ===================== Response format tests =====================
|
||||
|
||||
|
||||
class TestResponseFormat:
|
||||
"""Verify unified response helpers."""
|
||||
|
||||
def test_success_response_structure(self):
|
||||
resp = success_response({"id": 1}, "ok")
|
||||
assert resp["success"] is True
|
||||
assert resp["data"] == {"id": 1}
|
||||
assert resp["message"] == "ok"
|
||||
|
||||
def test_success_response_defaults(self):
|
||||
resp = success_response()
|
||||
assert resp["success"] is True
|
||||
assert resp["data"] is None
|
||||
assert "Operation successful" in resp["message"]
|
||||
|
||||
def test_error_response_structure(self):
|
||||
resp = error_response("bad", "VALIDATION_ERROR", [{"field": "email"}], 400)
|
||||
assert resp.status_code == 400
|
||||
import json
|
||||
body = json.loads(resp.body)
|
||||
assert body["success"] is False
|
||||
assert body["data"] is None
|
||||
assert body["error"]["code"] == "VALIDATION_ERROR"
|
||||
assert len(body["error"]["details"]) == 1
|
||||
|
||||
|
||||
# ===================== ORM model smoke tests =====================
|
||||
|
||||
|
||||
class TestORMModels:
|
||||
"""Verify ORM models can be created and queried with SQLite."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_user(self, db_session):
|
||||
user = User(
|
||||
username="testuser",
|
||||
email="test@example.com",
|
||||
hashed_password="hashed",
|
||||
)
|
||||
db_session.add(user)
|
||||
await db_session.commit()
|
||||
|
||||
result = await db_session.execute(select(User).where(User.username == "testuser"))
|
||||
fetched = result.scalar_one()
|
||||
assert fetched.email == "test@example.com"
|
||||
assert fetched.is_active is True
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_account_linked_to_user(self, db_session):
|
||||
user = User(username="u1", email="u1@x.com", hashed_password="h")
|
||||
db_session.add(user)
|
||||
await db_session.commit()
|
||||
|
||||
acct = Account(
|
||||
user_id=user.id,
|
||||
weibo_user_id="12345",
|
||||
remark="test",
|
||||
encrypted_cookies="enc",
|
||||
iv="iv123",
|
||||
)
|
||||
db_session.add(acct)
|
||||
await db_session.commit()
|
||||
|
||||
result = await db_session.execute(select(Account).where(Account.user_id == user.id))
|
||||
fetched = result.scalar_one()
|
||||
assert fetched.weibo_user_id == "12345"
|
||||
assert fetched.status == "pending"
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_task_linked_to_account(self, db_session):
|
||||
user = User(username="u2", email="u2@x.com", hashed_password="h")
|
||||
db_session.add(user)
|
||||
await db_session.commit()
|
||||
|
||||
acct = Account(
|
||||
user_id=user.id, weibo_user_id="99", remark="r",
|
||||
encrypted_cookies="e", iv="i",
|
||||
)
|
||||
db_session.add(acct)
|
||||
await db_session.commit()
|
||||
|
||||
task = Task(account_id=acct.id, cron_expression="0 8 * * *")
|
||||
db_session.add(task)
|
||||
await db_session.commit()
|
||||
|
||||
result = await db_session.execute(select(Task).where(Task.account_id == acct.id))
|
||||
fetched = result.scalar_one()
|
||||
assert fetched.cron_expression == "0 8 * * *"
|
||||
assert fetched.is_enabled is True
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_signin_log(self, db_session):
|
||||
user = User(username="u3", email="u3@x.com", hashed_password="h")
|
||||
db_session.add(user)
|
||||
await db_session.commit()
|
||||
|
||||
acct = Account(
|
||||
user_id=user.id, weibo_user_id="77", remark="r",
|
||||
encrypted_cookies="e", iv="i",
|
||||
)
|
||||
db_session.add(acct)
|
||||
await db_session.commit()
|
||||
|
||||
log = SigninLog(
|
||||
account_id=acct.id,
|
||||
topic_title="超话A",
|
||||
status="success",
|
||||
)
|
||||
db_session.add(log)
|
||||
await db_session.commit()
|
||||
|
||||
result = await db_session.execute(
|
||||
select(SigninLog).where(SigninLog.account_id == acct.id)
|
||||
)
|
||||
fetched = result.scalar_one()
|
||||
assert fetched.status == "success"
|
||||
assert fetched.topic_title == "超话A"
|
||||
Reference in New Issue
Block a user