Files
weibo_signin/backend/signin_executor/app/services/weibo_client.py

223 lines
8.7 KiB
Python
Raw Normal View History

2026-03-09 14:05:00 +08:00
"""
Weibo API Client
Handles all interactions with Weibo.com, including login, sign-in, and data fetching
"""
2026-03-09 16:10:29 +08:00
import os
import sys
2026-03-09 14:05:00 +08:00
import httpx
import asyncio
import logging
import random
2026-03-09 16:10:29 +08:00
from typing import Dict, Any, Optional, List, Tuple
# Add parent directory to path for imports
sys.path.insert(0, os.path.join(os.path.dirname(__file__), "../../.."))
from shared.crypto import decrypt_cookie, derive_key
from shared.config import shared_settings
2026-03-09 14:05:00 +08:00
from app.config import settings
from app.models.signin_models import WeiboAccount, WeiboSuperTopic
2026-03-09 16:10:29 +08:00
from app.services.antibot import antibot
2026-03-09 14:05:00 +08:00
logger = logging.getLogger(__name__)
class WeiboClient:
"""Client for interacting with Weibo API"""
def __init__(self):
2026-03-09 16:10:29 +08:00
# Use antibot module for dynamic headers
self.base_headers = antibot.build_headers()
2026-03-09 14:05:00 +08:00
async def verify_cookies(self, account: WeiboAccount) -> bool:
"""Verify if Weibo cookies are still valid"""
try:
2026-03-09 16:10:29 +08:00
# Decrypt cookies using shared crypto module
cookies_dict = self._decrypt_cookies(account.encrypted_cookies, account.iv)
if not cookies_dict:
logger.error(f"Failed to decrypt cookies for account {account.weibo_user_id}")
return False
# Get proxy (with fallback to direct connection)
proxy = await antibot.get_proxy()
2026-03-09 14:05:00 +08:00
2026-03-09 16:10:29 +08:00
# Use dynamic headers with random User-Agent
headers = antibot.build_headers()
# Add random delay before request
delay = antibot.get_random_delay()
await asyncio.sleep(delay)
async with httpx.AsyncClient(
cookies=cookies_dict,
headers=headers,
proxies=proxy,
timeout=10.0
) as client:
2026-03-09 14:05:00 +08:00
response = await client.get("https://weibo.com/mygroups", follow_redirects=True)
if response.status_code == 200 and "我的首页" in response.text:
logger.info(f"Cookies for account {account.weibo_user_id} are valid")
return True
else:
logger.warning(f"Cookies for account {account.weibo_user_id} are invalid")
return False
except Exception as e:
logger.error(f"Error verifying cookies: {e}")
return False
async def get_super_topics(self, account: WeiboAccount) -> List[WeiboSuperTopic]:
"""Get list of super topics for an account"""
try:
# Mock implementation - in real system, this would involve complex API calls
# Simulate API call delay
await asyncio.sleep(random.uniform(1.0, 2.0))
# Return mock data
return [
WeiboSuperTopic(id="topic_001", title="Python编程", url="...", is_signed=False),
WeiboSuperTopic(id="topic_002", title="人工智能", url="...", is_signed=False),
WeiboSuperTopic(id="topic_003", title="机器学习", url="...", is_signed=True)
]
except Exception as e:
logger.error(f"Error fetching super topics: {e}")
return []
2026-03-09 16:10:29 +08:00
async def sign_super_topic(
self,
account: WeiboAccount,
topic: WeiboSuperTopic,
task_id: str
) -> Tuple[bool, Optional[Dict[str, Any]], Optional[str]]:
2026-03-09 14:05:00 +08:00
"""
Execute sign-in for a single super topic
2026-03-09 16:10:29 +08:00
Returns: (success, reward_info, error_message)
2026-03-09 14:05:00 +08:00
"""
try:
2026-03-09 16:10:29 +08:00
# Decrypt cookies using shared crypto module
cookies_dict = self._decrypt_cookies(account.encrypted_cookies, account.iv)
if not cookies_dict:
error_msg = "Failed to decrypt cookies"
logger.error(error_msg)
return False, None, error_msg
# Get proxy (with fallback to direct connection)
proxy = await antibot.get_proxy()
# Use dynamic headers with random User-Agent
headers = antibot.build_headers()
# Add random delay before request (anti-bot protection)
delay = antibot.get_random_delay()
await asyncio.sleep(delay)
2026-03-09 14:05:00 +08:00
# Prepare request payload
payload = {
"ajwvr": "6",
"api": "http://i.huati.weibo.com/aj/super/checkin",
"id": topic.id,
"location": "page_100808_super_index",
"refer_flag": "100808_-_1",
"refer_lflag": "100808_-_1",
2026-03-09 16:10:29 +08:00
"ua": headers["User-Agent"],
2026-03-09 14:05:00 +08:00
"is_new": "1",
"is_from_ad": "0",
"ext": "mi_898_1_0_0"
}
# In a real scenario, we might need to call browser automation service
# to get signed parameters or handle JS challenges
# Simulate API call
await asyncio.sleep(random.uniform(0.5, 1.5))
# Mock response - assume success
response_data = {
"code": "100000",
"msg": "签到成功",
"data": {
"tip": "签到成功",
"alert_title": "签到成功",
"alert_subtitle": "恭喜你成为今天第12345位签到的人",
"reward": {"exp": 2, "credit": 1}
}
}
if response_data.get("code") == "100000":
logger.info(f"Successfully signed topic: {topic.title}")
2026-03-09 16:10:29 +08:00
reward_info = response_data.get("data", {}).get("reward", {})
return True, reward_info, None
2026-03-09 14:05:00 +08:00
elif response_data.get("code") == "382004":
logger.info(f"Topic {topic.title} already signed today")
2026-03-09 16:10:29 +08:00
return True, None, "Already signed"
2026-03-09 14:05:00 +08:00
else:
2026-03-09 16:10:29 +08:00
error_msg = response_data.get("msg", "Unknown error")
logger.error(f"Failed to sign topic {topic.title}: {error_msg}")
return False, None, error_msg
2026-03-09 14:05:00 +08:00
except Exception as e:
2026-03-09 16:10:29 +08:00
error_msg = f"Exception signing topic {topic.title}: {str(e)}"
logger.error(error_msg)
return False, None, error_msg
2026-03-09 14:05:00 +08:00
def _decrypt_cookies(self, encrypted_cookies: str, iv: str) -> Dict[str, str]:
"""
2026-03-09 16:10:29 +08:00
Decrypt cookies using AES-256-GCM from shared crypto module.
Returns dict of cookie key-value pairs.
2026-03-09 14:05:00 +08:00
"""
try:
2026-03-09 16:10:29 +08:00
# Derive encryption key from shared settings
key = derive_key(shared_settings.COOKIE_ENCRYPTION_KEY)
# Decrypt using shared crypto module
plaintext = decrypt_cookie(encrypted_cookies, iv, key)
# Parse cookie string into dict
# Expected format: "key1=value1; key2=value2; ..."
cookies_dict = {}
for cookie_pair in plaintext.split(";"):
cookie_pair = cookie_pair.strip()
if "=" in cookie_pair:
key, value = cookie_pair.split("=", 1)
cookies_dict[key.strip()] = value.strip()
return cookies_dict
2026-03-09 14:05:00 +08:00
except Exception as e:
logger.error(f"Failed to decrypt cookies: {e}")
return {}
async def get_proxy(self) -> Optional[Dict[str, str]]:
"""Get a proxy from the proxy pool service"""
try:
async with httpx.AsyncClient(timeout=5.0) as client:
response = await client.get(f"{settings.PROXY_POOL_URL}/get")
if response.status_code == 200:
proxy_info = response.json()
return {
"http://": f"http://{proxy_info['proxy']}",
"https://": f"https://{proxy_info['proxy']}"
}
else:
return None
except Exception as e:
logger.error(f"Failed to get proxy: {e}")
return None
async def get_browser_fingerprint(self) -> Dict[str, Any]:
"""Get a browser fingerprint from the generator service"""
try:
# Mock implementation
return {
"user_agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36",
"screen_resolution": "1920x1080",
"timezone": "Asia/Shanghai",
"plugins": ["PDF Viewer", "Chrome PDF Viewer", "Native Client"]
}
except Exception as e:
logger.error(f"Failed to get browser fingerprint: {e}")
return {}