扫码登录,获取cookies
This commit is contained in:
162
backend/signin_executor/app/services/antibot.py
Normal file
162
backend/signin_executor/app/services/antibot.py
Normal file
@@ -0,0 +1,162 @@
|
||||
"""
|
||||
Anti-bot protection module
|
||||
Implements various techniques to avoid detection by anti-crawling systems
|
||||
"""
|
||||
|
||||
import random
|
||||
import logging
|
||||
from typing import Optional, Dict, Any, List
|
||||
import httpx
|
||||
|
||||
from app.config import settings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# Predefined User-Agent list for rotation
|
||||
USER_AGENTS = [
|
||||
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/119.0.0.0 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/17.1 Safari/605.1.15",
|
||||
"Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:121.0) Gecko/20100101 Firefox/121.0",
|
||||
"Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:120.0) Gecko/20100101 Firefox/120.0",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/119.0.0.0 Safari/537.36",
|
||||
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Edge/120.0.0.0 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/118.0.0.0 Safari/537.36",
|
||||
]
|
||||
|
||||
|
||||
class AntiBotProtection:
|
||||
"""Anti-bot protection service"""
|
||||
|
||||
def __init__(self):
|
||||
self.proxy_pool_url = settings.PROXY_POOL_URL
|
||||
self.random_delay_min = settings.RANDOM_DELAY_MIN
|
||||
self.random_delay_max = settings.RANDOM_DELAY_MAX
|
||||
|
||||
def get_random_delay(self) -> float:
|
||||
"""
|
||||
Generate random delay within configured range.
|
||||
Returns delay in seconds.
|
||||
|
||||
Validates: Requirements 7.1
|
||||
"""
|
||||
delay = random.uniform(self.random_delay_min, self.random_delay_max)
|
||||
logger.debug(f"Generated random delay: {delay:.2f}s")
|
||||
return delay
|
||||
|
||||
def get_random_user_agent(self) -> str:
|
||||
"""
|
||||
Select random User-Agent from predefined list.
|
||||
Returns User-Agent string.
|
||||
|
||||
Validates: Requirements 7.2
|
||||
"""
|
||||
user_agent = random.choice(USER_AGENTS)
|
||||
logger.debug(f"Selected User-Agent: {user_agent[:50]}...")
|
||||
return user_agent
|
||||
|
||||
async def get_proxy(self) -> Optional[Dict[str, str]]:
|
||||
"""
|
||||
Get proxy from proxy pool service.
|
||||
Returns proxy dict or None if unavailable.
|
||||
Falls back to direct connection if proxy pool is unavailable.
|
||||
|
||||
Validates: Requirements 7.3, 7.4
|
||||
"""
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=5.0) as client:
|
||||
response = await client.get(f"{self.proxy_pool_url}/get")
|
||||
|
||||
if response.status_code == 200:
|
||||
proxy_info = response.json()
|
||||
proxy_url = proxy_info.get("proxy")
|
||||
|
||||
if proxy_url:
|
||||
proxy_dict = {
|
||||
"http://": f"http://{proxy_url}",
|
||||
"https://": f"https://{proxy_url}"
|
||||
}
|
||||
logger.info(f"Obtained proxy: {proxy_url}")
|
||||
return proxy_dict
|
||||
else:
|
||||
logger.warning("Proxy pool returned empty proxy")
|
||||
return None
|
||||
else:
|
||||
logger.warning(f"Proxy pool returned status {response.status_code}")
|
||||
return None
|
||||
|
||||
except httpx.RequestError as e:
|
||||
logger.warning(f"Proxy pool service unavailable: {e}, falling back to direct connection")
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting proxy: {e}")
|
||||
return None
|
||||
|
||||
def build_headers(self, user_agent: Optional[str] = None) -> Dict[str, str]:
|
||||
"""
|
||||
Build HTTP headers with random User-Agent and common headers.
|
||||
|
||||
Args:
|
||||
user_agent: Optional custom User-Agent, otherwise random one is selected
|
||||
|
||||
Returns:
|
||||
Dict of HTTP headers
|
||||
"""
|
||||
if user_agent is None:
|
||||
user_agent = self.get_random_user_agent()
|
||||
|
||||
headers = {
|
||||
"User-Agent": user_agent,
|
||||
"Accept": "application/json, text/plain, */*",
|
||||
"Accept-Language": "zh-CN,zh;q=0.9,en;q=0.8",
|
||||
"Accept-Encoding": "gzip, deflate, br",
|
||||
"Connection": "keep-alive",
|
||||
"Referer": "https://weibo.com/",
|
||||
"Sec-Fetch-Dest": "empty",
|
||||
"Sec-Fetch-Mode": "cors",
|
||||
"Sec-Fetch-Site": "same-origin",
|
||||
}
|
||||
|
||||
return headers
|
||||
|
||||
def get_fingerprint_data(self) -> Dict[str, Any]:
|
||||
"""
|
||||
Generate browser fingerprint data for simulation.
|
||||
|
||||
Returns:
|
||||
Dict containing fingerprint information
|
||||
"""
|
||||
screen_resolutions = [
|
||||
"1920x1080", "1366x768", "1440x900", "1536x864",
|
||||
"1280x720", "2560x1440", "3840x2160"
|
||||
]
|
||||
|
||||
timezones = [
|
||||
"Asia/Shanghai", "Asia/Beijing", "Asia/Hong_Kong",
|
||||
"Asia/Taipei", "Asia/Singapore"
|
||||
]
|
||||
|
||||
languages = [
|
||||
"zh-CN", "zh-CN,zh;q=0.9", "zh-CN,zh;q=0.9,en;q=0.8",
|
||||
"zh-CN,zh;q=0.9,en-US;q=0.8,en;q=0.7"
|
||||
]
|
||||
|
||||
fingerprint = {
|
||||
"screen_resolution": random.choice(screen_resolutions),
|
||||
"timezone": random.choice(timezones),
|
||||
"language": random.choice(languages),
|
||||
"color_depth": random.choice([24, 32]),
|
||||
"platform": random.choice(["Win32", "MacIntel", "Linux x86_64"]),
|
||||
"hardware_concurrency": random.choice([4, 8, 12, 16]),
|
||||
"device_memory": random.choice([4, 8, 16, 32]),
|
||||
}
|
||||
|
||||
logger.debug(f"Generated fingerprint: {fingerprint}")
|
||||
return fingerprint
|
||||
|
||||
|
||||
# Global instance
|
||||
antibot = AntiBotProtection()
|
||||
@@ -3,6 +3,8 @@ Core sign-in business logic service
|
||||
Handles Weibo super topic sign-in operations
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import asyncio
|
||||
import httpx
|
||||
import logging
|
||||
@@ -10,10 +12,21 @@ import random
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Dict, Any, List, Optional
|
||||
from uuid import UUID
|
||||
from sqlalchemy import select, update
|
||||
|
||||
# Add parent directory to path for imports
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), "../../.."))
|
||||
|
||||
from shared.models.base import AsyncSessionLocal
|
||||
from shared.models.account import Account
|
||||
from shared.models.signin_log import SigninLog
|
||||
from shared.crypto import decrypt_cookie, derive_key
|
||||
from shared.config import shared_settings
|
||||
|
||||
from app.config import settings
|
||||
from app.models.signin_models import SignInRequest, SignInResult, TaskStatus, WeiboAccount, WeiboSuperTopic, AntiBotConfig
|
||||
from app.services.weibo_client import WeiboClient
|
||||
from app.services.antibot import antibot
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -72,6 +85,15 @@ class SignInService:
|
||||
|
||||
# Step 2: Setup session with proxy and fingerprint
|
||||
task_status.current_step = "setup_session"
|
||||
|
||||
# Verify cookies before proceeding
|
||||
cookies_valid = await self.weibo_client.verify_cookies(account)
|
||||
if not cookies_valid:
|
||||
logger.error(f"Cookies invalid for account {account_id}")
|
||||
# Update account status to invalid_cookie
|
||||
await self._update_account_status(account_id, "invalid_cookie")
|
||||
raise Exception("Cookie validation failed - cookies are invalid or expired")
|
||||
|
||||
await self._apply_anti_bot_protection()
|
||||
|
||||
task_status.steps_completed.append("setup_session")
|
||||
@@ -156,20 +178,31 @@ class SignInService:
|
||||
self.active_tasks[task_id].updated_at = datetime.now()
|
||||
|
||||
async def _get_account_info(self, account_id: str) -> Optional[WeiboAccount]:
|
||||
"""Get Weibo account information from database"""
|
||||
"""
|
||||
Get Weibo account information from database (replaces mock data).
|
||||
Returns account dict or None if not found.
|
||||
"""
|
||||
try:
|
||||
# Mock implementation - in real system, query database
|
||||
# For demo, return mock account
|
||||
return WeiboAccount(
|
||||
id=UUID(account_id),
|
||||
user_id=UUID("12345678-1234-5678-9012-123456789012"),
|
||||
weibo_user_id="1234567890",
|
||||
remark="Demo Account",
|
||||
encrypted_cookies="mock_encrypted_cookies",
|
||||
iv="mock_iv_16_bytes",
|
||||
status="active",
|
||||
last_checked_at=datetime.now() - timedelta(hours=1)
|
||||
)
|
||||
async with AsyncSessionLocal() as session:
|
||||
stmt = select(Account).where(Account.id == account_id)
|
||||
result = await session.execute(stmt)
|
||||
account = result.scalar_one_or_none()
|
||||
|
||||
if not account:
|
||||
logger.error(f"Account {account_id} not found in database")
|
||||
return None
|
||||
|
||||
# Convert ORM model to Pydantic model
|
||||
return WeiboAccount(
|
||||
id=UUID(account.id),
|
||||
user_id=UUID(account.user_id),
|
||||
weibo_user_id=account.weibo_user_id,
|
||||
remark=account.remark or "",
|
||||
encrypted_cookies=account.encrypted_cookies,
|
||||
iv=account.iv,
|
||||
status=account.status,
|
||||
last_checked_at=account.last_checked_at or datetime.now()
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Error fetching account {account_id}: {e}")
|
||||
return None
|
||||
@@ -177,18 +210,24 @@ class SignInService:
|
||||
async def _apply_anti_bot_protection(self):
|
||||
"""Apply anti-bot protection measures"""
|
||||
# Random delay to mimic human behavior
|
||||
delay = random.uniform(
|
||||
self.antibot_config.random_delay_min,
|
||||
self.antibot_config.random_delay_max
|
||||
)
|
||||
delay = antibot.get_random_delay()
|
||||
logger.debug(f"Applying random delay: {delay:.2f}s")
|
||||
await asyncio.sleep(delay)
|
||||
|
||||
# Additional anti-bot measures would go here:
|
||||
# - User agent rotation
|
||||
# - Proxy selection
|
||||
# - Browser fingerprint simulation
|
||||
# - Request header randomization
|
||||
# Get random User-Agent
|
||||
user_agent = antibot.get_random_user_agent()
|
||||
logger.debug(f"Using User-Agent: {user_agent[:50]}...")
|
||||
|
||||
# Try to get proxy (falls back to direct connection if unavailable)
|
||||
proxy = await antibot.get_proxy()
|
||||
if proxy:
|
||||
logger.info(f"Using proxy for requests")
|
||||
else:
|
||||
logger.info("Using direct connection (no proxy available)")
|
||||
|
||||
# Get browser fingerprint
|
||||
fingerprint = antibot.get_fingerprint_data()
|
||||
logger.debug(f"Browser fingerprint: {fingerprint}")
|
||||
|
||||
async def _get_super_topics_list(self, account: WeiboAccount) -> List[WeiboSuperTopic]:
|
||||
"""Get list of super topics for account"""
|
||||
@@ -244,10 +283,18 @@ class SignInService:
|
||||
|
||||
if topic.is_signed:
|
||||
already_signed.append(topic.title)
|
||||
# Write log for already signed
|
||||
await self._write_signin_log(
|
||||
account_id=str(account.id),
|
||||
topic_title=topic.title,
|
||||
status="failed_already_signed",
|
||||
reward_info=None,
|
||||
error_message="Already signed today"
|
||||
)
|
||||
continue
|
||||
|
||||
# Execute signin for this topic
|
||||
success = await self.weibo_client.sign_super_topic(
|
||||
success, reward_info, error_msg = await self.weibo_client.sign_super_topic(
|
||||
account=account,
|
||||
topic=topic,
|
||||
task_id=task_id
|
||||
@@ -256,16 +303,88 @@ class SignInService:
|
||||
if success:
|
||||
signed.append(topic.title)
|
||||
logger.info(f"✅ Successfully signed topic: {topic.title}")
|
||||
|
||||
# Write success log
|
||||
await self._write_signin_log(
|
||||
account_id=str(account.id),
|
||||
topic_title=topic.title,
|
||||
status="success",
|
||||
reward_info=reward_info,
|
||||
error_message=None
|
||||
)
|
||||
else:
|
||||
errors.append(f"Failed to sign topic: {topic.title}")
|
||||
|
||||
# Write failure log
|
||||
await self._write_signin_log(
|
||||
account_id=str(account.id),
|
||||
topic_title=topic.title,
|
||||
status="failed_network",
|
||||
reward_info=None,
|
||||
error_message=error_msg
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
error_msg = f"Error signing topic {topic.title}: {str(e)}"
|
||||
logger.error(error_msg)
|
||||
errors.append(error_msg)
|
||||
|
||||
# Write error log
|
||||
await self._write_signin_log(
|
||||
account_id=str(account.id),
|
||||
topic_title=topic.title,
|
||||
status="failed_network",
|
||||
reward_info=None,
|
||||
error_message=str(e)
|
||||
)
|
||||
|
||||
return {
|
||||
"signed": signed,
|
||||
"already_signed": already_signed,
|
||||
"errors": errors
|
||||
}
|
||||
|
||||
async def _write_signin_log(
|
||||
self,
|
||||
account_id: str,
|
||||
topic_title: str,
|
||||
status: str,
|
||||
reward_info: Optional[Dict[str, Any]],
|
||||
error_message: Optional[str]
|
||||
):
|
||||
"""
|
||||
Write signin result to signin_logs table.
|
||||
Replaces mock implementation with real database write.
|
||||
"""
|
||||
try:
|
||||
async with AsyncSessionLocal() as session:
|
||||
log = SigninLog(
|
||||
account_id=account_id,
|
||||
topic_title=topic_title,
|
||||
status=status,
|
||||
reward_info=reward_info,
|
||||
error_message=error_message,
|
||||
)
|
||||
session.add(log)
|
||||
await session.commit()
|
||||
logger.debug(f"Wrote signin log for account {account_id}, topic {topic_title}, status {status}")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to write signin log: {e}")
|
||||
|
||||
async def _update_account_status(self, account_id: str, status: str):
|
||||
"""
|
||||
Update account status in database.
|
||||
Used when cookie is invalid or account is banned.
|
||||
"""
|
||||
try:
|
||||
async with AsyncSessionLocal() as session:
|
||||
stmt = (
|
||||
update(Account)
|
||||
.where(Account.id == account_id)
|
||||
.values(status=status, last_checked_at=datetime.now())
|
||||
)
|
||||
await session.execute(stmt)
|
||||
await session.commit()
|
||||
logger.info(f"Updated account {account_id} status to {status}")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to update account status: {e}")
|
||||
|
||||
@@ -3,14 +3,23 @@ Weibo API Client
|
||||
Handles all interactions with Weibo.com, including login, sign-in, and data fetching
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import httpx
|
||||
import asyncio
|
||||
import logging
|
||||
import random
|
||||
from typing import Dict, Any, Optional, List
|
||||
from typing import Dict, Any, Optional, List, Tuple
|
||||
|
||||
# Add parent directory to path for imports
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), "../../.."))
|
||||
|
||||
from shared.crypto import decrypt_cookie, derive_key
|
||||
from shared.config import shared_settings
|
||||
|
||||
from app.config import settings
|
||||
from app.models.signin_models import WeiboAccount, WeiboSuperTopic
|
||||
from app.services.antibot import antibot
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -18,21 +27,35 @@ class WeiboClient:
|
||||
"""Client for interacting with Weibo API"""
|
||||
|
||||
def __init__(self):
|
||||
self.base_headers = {
|
||||
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36",
|
||||
"Accept": "application/json, text/plain, */*",
|
||||
"Accept-Language": "en-US,en;q=0.9,zh-CN;q=0.8,zh;q=0.7",
|
||||
"Connection": "keep-alive",
|
||||
"Referer": "https://weibo.com/"
|
||||
}
|
||||
# Use antibot module for dynamic headers
|
||||
self.base_headers = antibot.build_headers()
|
||||
|
||||
async def verify_cookies(self, account: WeiboAccount) -> bool:
|
||||
"""Verify if Weibo cookies are still valid"""
|
||||
try:
|
||||
# Decrypt cookies
|
||||
cookies = self._decrypt_cookies(account.encrypted_cookies, account.iv)
|
||||
# Decrypt cookies using shared crypto module
|
||||
cookies_dict = self._decrypt_cookies(account.encrypted_cookies, account.iv)
|
||||
|
||||
async with httpx.AsyncClient(cookies=cookies, headers=self.base_headers) as client:
|
||||
if not cookies_dict:
|
||||
logger.error(f"Failed to decrypt cookies for account {account.weibo_user_id}")
|
||||
return False
|
||||
|
||||
# Get proxy (with fallback to direct connection)
|
||||
proxy = await antibot.get_proxy()
|
||||
|
||||
# Use dynamic headers with random User-Agent
|
||||
headers = antibot.build_headers()
|
||||
|
||||
# Add random delay before request
|
||||
delay = antibot.get_random_delay()
|
||||
await asyncio.sleep(delay)
|
||||
|
||||
async with httpx.AsyncClient(
|
||||
cookies=cookies_dict,
|
||||
headers=headers,
|
||||
proxies=proxy,
|
||||
timeout=10.0
|
||||
) as client:
|
||||
response = await client.get("https://weibo.com/mygroups", follow_redirects=True)
|
||||
|
||||
if response.status_code == 200 and "我的首页" in response.text:
|
||||
@@ -62,13 +85,34 @@ class WeiboClient:
|
||||
logger.error(f"Error fetching super topics: {e}")
|
||||
return []
|
||||
|
||||
async def sign_super_topic(self, account: WeiboAccount, topic: WeiboSuperTopic, task_id: str) -> bool:
|
||||
async def sign_super_topic(
|
||||
self,
|
||||
account: WeiboAccount,
|
||||
topic: WeiboSuperTopic,
|
||||
task_id: str
|
||||
) -> Tuple[bool, Optional[Dict[str, Any]], Optional[str]]:
|
||||
"""
|
||||
Execute sign-in for a single super topic
|
||||
Returns: (success, reward_info, error_message)
|
||||
"""
|
||||
try:
|
||||
# Decrypt cookies
|
||||
cookies = self._decrypt_cookies(account.encrypted_cookies, account.iv)
|
||||
# Decrypt cookies using shared crypto module
|
||||
cookies_dict = self._decrypt_cookies(account.encrypted_cookies, account.iv)
|
||||
|
||||
if not cookies_dict:
|
||||
error_msg = "Failed to decrypt cookies"
|
||||
logger.error(error_msg)
|
||||
return False, None, error_msg
|
||||
|
||||
# Get proxy (with fallback to direct connection)
|
||||
proxy = await antibot.get_proxy()
|
||||
|
||||
# Use dynamic headers with random User-Agent
|
||||
headers = antibot.build_headers()
|
||||
|
||||
# Add random delay before request (anti-bot protection)
|
||||
delay = antibot.get_random_delay()
|
||||
await asyncio.sleep(delay)
|
||||
|
||||
# Prepare request payload
|
||||
payload = {
|
||||
@@ -78,7 +122,7 @@ class WeiboClient:
|
||||
"location": "page_100808_super_index",
|
||||
"refer_flag": "100808_-_1",
|
||||
"refer_lflag": "100808_-_1",
|
||||
"ua": self.base_headers["User-Agent"],
|
||||
"ua": headers["User-Agent"],
|
||||
"is_new": "1",
|
||||
"is_from_ad": "0",
|
||||
"ext": "mi_898_1_0_0"
|
||||
@@ -104,33 +148,44 @@ class WeiboClient:
|
||||
|
||||
if response_data.get("code") == "100000":
|
||||
logger.info(f"Successfully signed topic: {topic.title}")
|
||||
return True
|
||||
reward_info = response_data.get("data", {}).get("reward", {})
|
||||
return True, reward_info, None
|
||||
elif response_data.get("code") == "382004":
|
||||
logger.info(f"Topic {topic.title} already signed today")
|
||||
return True # Treat as success
|
||||
return True, None, "Already signed"
|
||||
else:
|
||||
logger.error(f"Failed to sign topic {topic.title}: {response_data.get('msg')}")
|
||||
return False
|
||||
error_msg = response_data.get("msg", "Unknown error")
|
||||
logger.error(f"Failed to sign topic {topic.title}: {error_msg}")
|
||||
return False, None, error_msg
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Exception signing topic {topic.title}: {e}")
|
||||
return False
|
||||
error_msg = f"Exception signing topic {topic.title}: {str(e)}"
|
||||
logger.error(error_msg)
|
||||
return False, None, error_msg
|
||||
|
||||
def _decrypt_cookies(self, encrypted_cookies: str, iv: str) -> Dict[str, str]:
|
||||
"""
|
||||
Decrypt cookies using AES-256-GCM
|
||||
In a real system, this would use a proper crypto library
|
||||
Decrypt cookies using AES-256-GCM from shared crypto module.
|
||||
Returns dict of cookie key-value pairs.
|
||||
"""
|
||||
try:
|
||||
# Mock implementation - return dummy cookies
|
||||
return {
|
||||
"SUB": "_2A25z...",
|
||||
"SUBP": "0033Wr...",
|
||||
"ALF": "16...",
|
||||
"SSOLoginState": "16...",
|
||||
"SCF": "...",
|
||||
"UN": "testuser"
|
||||
}
|
||||
# Derive encryption key from shared settings
|
||||
key = derive_key(shared_settings.COOKIE_ENCRYPTION_KEY)
|
||||
|
||||
# Decrypt using shared crypto module
|
||||
plaintext = decrypt_cookie(encrypted_cookies, iv, key)
|
||||
|
||||
# Parse cookie string into dict
|
||||
# Expected format: "key1=value1; key2=value2; ..."
|
||||
cookies_dict = {}
|
||||
for cookie_pair in plaintext.split(";"):
|
||||
cookie_pair = cookie_pair.strip()
|
||||
if "=" in cookie_pair:
|
||||
key, value = cookie_pair.split("=", 1)
|
||||
cookies_dict[key.strip()] = value.strip()
|
||||
|
||||
return cookies_dict
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to decrypt cookies: {e}")
|
||||
return {}
|
||||
|
||||
Reference in New Issue
Block a user