168 lines
6.7 KiB
Python
168 lines
6.7 KiB
Python
"""
|
|
Weibo API Client
|
|
Handles all interactions with Weibo.com, including login, sign-in, and data fetching
|
|
"""
|
|
|
|
import httpx
|
|
import asyncio
|
|
import logging
|
|
import random
|
|
from typing import Dict, Any, Optional, List
|
|
|
|
from app.config import settings
|
|
from app.models.signin_models import WeiboAccount, WeiboSuperTopic
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
class WeiboClient:
|
|
"""Client for interacting with Weibo API"""
|
|
|
|
def __init__(self):
|
|
self.base_headers = {
|
|
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36",
|
|
"Accept": "application/json, text/plain, */*",
|
|
"Accept-Language": "en-US,en;q=0.9,zh-CN;q=0.8,zh;q=0.7",
|
|
"Connection": "keep-alive",
|
|
"Referer": "https://weibo.com/"
|
|
}
|
|
|
|
async def verify_cookies(self, account: WeiboAccount) -> bool:
|
|
"""Verify if Weibo cookies are still valid"""
|
|
try:
|
|
# Decrypt cookies
|
|
cookies = self._decrypt_cookies(account.encrypted_cookies, account.iv)
|
|
|
|
async with httpx.AsyncClient(cookies=cookies, headers=self.base_headers) as client:
|
|
response = await client.get("https://weibo.com/mygroups", follow_redirects=True)
|
|
|
|
if response.status_code == 200 and "我的首页" in response.text:
|
|
logger.info(f"Cookies for account {account.weibo_user_id} are valid")
|
|
return True
|
|
else:
|
|
logger.warning(f"Cookies for account {account.weibo_user_id} are invalid")
|
|
return False
|
|
except Exception as e:
|
|
logger.error(f"Error verifying cookies: {e}")
|
|
return False
|
|
|
|
async def get_super_topics(self, account: WeiboAccount) -> List[WeiboSuperTopic]:
|
|
"""Get list of super topics for an account"""
|
|
try:
|
|
# Mock implementation - in real system, this would involve complex API calls
|
|
# Simulate API call delay
|
|
await asyncio.sleep(random.uniform(1.0, 2.0))
|
|
|
|
# Return mock data
|
|
return [
|
|
WeiboSuperTopic(id="topic_001", title="Python编程", url="...", is_signed=False),
|
|
WeiboSuperTopic(id="topic_002", title="人工智能", url="...", is_signed=False),
|
|
WeiboSuperTopic(id="topic_003", title="机器学习", url="...", is_signed=True)
|
|
]
|
|
except Exception as e:
|
|
logger.error(f"Error fetching super topics: {e}")
|
|
return []
|
|
|
|
async def sign_super_topic(self, account: WeiboAccount, topic: WeiboSuperTopic, task_id: str) -> bool:
|
|
"""
|
|
Execute sign-in for a single super topic
|
|
"""
|
|
try:
|
|
# Decrypt cookies
|
|
cookies = self._decrypt_cookies(account.encrypted_cookies, account.iv)
|
|
|
|
# Prepare request payload
|
|
payload = {
|
|
"ajwvr": "6",
|
|
"api": "http://i.huati.weibo.com/aj/super/checkin",
|
|
"id": topic.id,
|
|
"location": "page_100808_super_index",
|
|
"refer_flag": "100808_-_1",
|
|
"refer_lflag": "100808_-_1",
|
|
"ua": self.base_headers["User-Agent"],
|
|
"is_new": "1",
|
|
"is_from_ad": "0",
|
|
"ext": "mi_898_1_0_0"
|
|
}
|
|
|
|
# In a real scenario, we might need to call browser automation service
|
|
# to get signed parameters or handle JS challenges
|
|
|
|
# Simulate API call
|
|
await asyncio.sleep(random.uniform(0.5, 1.5))
|
|
|
|
# Mock response - assume success
|
|
response_data = {
|
|
"code": "100000",
|
|
"msg": "签到成功",
|
|
"data": {
|
|
"tip": "签到成功",
|
|
"alert_title": "签到成功",
|
|
"alert_subtitle": "恭喜你成为今天第12345位签到的人",
|
|
"reward": {"exp": 2, "credit": 1}
|
|
}
|
|
}
|
|
|
|
if response_data.get("code") == "100000":
|
|
logger.info(f"Successfully signed topic: {topic.title}")
|
|
return True
|
|
elif response_data.get("code") == "382004":
|
|
logger.info(f"Topic {topic.title} already signed today")
|
|
return True # Treat as success
|
|
else:
|
|
logger.error(f"Failed to sign topic {topic.title}: {response_data.get('msg')}")
|
|
return False
|
|
|
|
except Exception as e:
|
|
logger.error(f"Exception signing topic {topic.title}: {e}")
|
|
return False
|
|
|
|
def _decrypt_cookies(self, encrypted_cookies: str, iv: str) -> Dict[str, str]:
|
|
"""
|
|
Decrypt cookies using AES-256-GCM
|
|
In a real system, this would use a proper crypto library
|
|
"""
|
|
try:
|
|
# Mock implementation - return dummy cookies
|
|
return {
|
|
"SUB": "_2A25z...",
|
|
"SUBP": "0033Wr...",
|
|
"ALF": "16...",
|
|
"SSOLoginState": "16...",
|
|
"SCF": "...",
|
|
"UN": "testuser"
|
|
}
|
|
except Exception as e:
|
|
logger.error(f"Failed to decrypt cookies: {e}")
|
|
return {}
|
|
|
|
async def get_proxy(self) -> Optional[Dict[str, str]]:
|
|
"""Get a proxy from the proxy pool service"""
|
|
try:
|
|
async with httpx.AsyncClient(timeout=5.0) as client:
|
|
response = await client.get(f"{settings.PROXY_POOL_URL}/get")
|
|
if response.status_code == 200:
|
|
proxy_info = response.json()
|
|
return {
|
|
"http://": f"http://{proxy_info['proxy']}",
|
|
"https://": f"https://{proxy_info['proxy']}"
|
|
}
|
|
else:
|
|
return None
|
|
except Exception as e:
|
|
logger.error(f"Failed to get proxy: {e}")
|
|
return None
|
|
|
|
async def get_browser_fingerprint(self) -> Dict[str, Any]:
|
|
"""Get a browser fingerprint from the generator service"""
|
|
try:
|
|
# Mock implementation
|
|
return {
|
|
"user_agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36",
|
|
"screen_resolution": "1920x1080",
|
|
"timezone": "Asia/Shanghai",
|
|
"plugins": ["PDF Viewer", "Chrome PDF Viewer", "Native Client"]
|
|
}
|
|
except Exception as e:
|
|
logger.error(f"Failed to get browser fingerprint: {e}")
|
|
return {}
|