DailyMotion/DB.py

334 lines
11 KiB
Python
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

import json
import redis
import pymysql
import time
import functools
from redis.exceptions import ConnectionError, TimeoutError
def mysql_retry(max_retries: int = 3, base_delay: float = 2.0):
"""
装饰器工厂:捕获 InterfaceError 后断线重连并重试,
重试间隔按指数级增长base_delay * 2**(attempt-1) 秒。
"""
def decorator(fn):
@functools.wraps(fn)
def wrapper(self, *args, **kwargs):
for attempt in range(1, max_retries + 1):
try:
# 确保连接存活reconnect=True 会在 ping 失败时重连
self.conn.ping(reconnect=True)
return fn(self, *args, **kwargs)
except pymysql.InterfaceError as e:
wait = base_delay * (2 ** (attempt - 1))
print(f"[MySQL][{fn.__name__}] 第{attempt}次 InterfaceError{e},等待 {wait:.1f}s 后重连…")
time.sleep(wait)
self._reconnect_mysql()
if attempt == max_retries:
print("[MySQL] 重试多次仍失败,抛出异常")
raise
return wrapper
return decorator
def redis_retry(max_retries: int = 3):
"""
装饰器工厂:指定最大重试次数。
"""
def decorator(fn):
@functools.wraps(fn)
def wrapper(self, *args, **kwargs):
for attempt in range(1, max_retries + 1):
try:
return fn(self, *args, **kwargs)
except (ConnectionError, TimeoutError) as e:
print(f"[Redis][{fn.__name__}] 第 {attempt} 次失败:{e}")
self.reconnect_redis()
if attempt == max_retries:
print("[Redis] 连接彻底失败")
raise
print(f"[Redis] 重连后第 {attempt + 1} 次重试…")
return wrapper
return decorator
class DBVidcon:
_MYSQL_CONF = {
"host": "192.144.230.75", # "127.0.0.1", #
"port": 3306, # 3307, #
"user": "db_vidcon",
"password": "rexdK4fhCCiRE4BZ",
"database": "db_vidcon",
"charset": "utf8mb4",
"cursorclass": pymysql.cursors.DictCursor,
}
_REDIS_CONF = {
"host": "192.144.230.75", # "127.0.0.1", #
"port": 6379, # 6380, #
"password": "qwert@$123!&",
"decode_responses": True,
}
def __init__(self):
self.l1_list_key = "video_l1_queue"
self.error_list_key = "error_save_queue"
self.l0_list_key = "video_l0_queue"
self.conn = pymysql.connect(**self._MYSQL_CONF)
self.cursor = self.conn.cursor()
self.redis = redis.Redis(**self._REDIS_CONF)
def _connect_redis(self):
"""初始化或重建 Redis 客户端"""
self.redis = redis.Redis(**self._REDIS_CONF)
def reconnect_redis(self):
"""当捕获到 ConnectionError 时,重连 Redis"""
try:
self._connect_redis()
except Exception as e:
print("[Redis reconnect error]", e)
time.sleep(2)
@redis_retry(max_retries=3)
def push_record(self, data: dict):
raw = json.dumps(data, ensure_ascii=False)
self.redis.lpush(self.error_list_key, raw)
# def fetch_records(self, count: int = 100):
# try:
# raws = self.redis.lpop(self.record_list_key, count)
# except TypeError:
# raws = []
# for _ in range(count):
# item = self.redis.rpop(self.record_list_key)
# if item is None:
# break
# raws.append(item)
# except redis.exceptions.ConnectionError:
# self.reconnect_redis()
# return self.fetch_records(count)
#
# if not raws:
# return []
# if isinstance(raws, str):
# raws = [raws]
#
# out = []
# for raw in raws:
# try:
# data = json.loads(raw)
# out.append((raw, data))
# except json.JSONDecodeError:
# continue
# return out
@redis_retry(max_retries=3)
def fetch_from_redis(self, count: int = 100, list_key: str = None):
key = list_key
try:
raws = self.redis.lpop(key, count)
except TypeError:
raws = []
for _ in range(count):
item = self.redis.rpop(key)
if item is None:
break
raws.append(item)
except redis.exceptions.ConnectionError as e:
print("[Redis pop error]", e)
self.reconnect_redis()
return []
if not raws:
return []
if isinstance(raws, str):
raws = [raws]
out = []
for raw in raws:
try:
out.append((raw, json.loads(raw)))
except json.JSONDecodeError:
continue
return out
@mysql_retry()
def get_proxy_agent_dict(self) -> dict:
sql = "SELECT rn, parameter FROM proxy_agent"
self.cursor.execute(sql)
rows = self.cursor.fetchall()
result = {row['rn']: row['parameter'] for row in rows}
return result
@mysql_retry()
def get_proxy_parameter(self, rn: str) -> str:
sql = "SELECT parameter FROM proxy_agent WHERE rn = %s LIMIT 1"
self.cursor.execute(sql, (rn,))
result = self.cursor.fetchone()
print(result)
return result['parameter'] if result else None
@redis_retry(max_retries=3)
def rollback_records(self, raws):
if isinstance(raws, str):
raws = [raws]
self.redis.lpush(self.l0_list_key, *raws)
@redis_retry(max_retries=3)
def rollback_urgent(self, raws):
if isinstance(raws, str):
raws = [raws]
try:
self.redis.lpush(self.l0_list_key, *raws)
except redis.exceptions.ConnectionError as e:
print("[Redis urgent rollback error]", e)
self.reconnect_redis()
self.redis.lpush(self.l0_list_key, *raws)
@redis_retry(max_retries=3)
def item_keyword(self, count: int = 20):
try:
urgent_items = self.fetch_from_redis(count, list_key=self.l0_list_key)
except Exception as e:
print("[Redis urgent pop error]", e)
self.reconnect_redis()
urgent_items = []
if urgent_items:
return urgent_items, 1
try:
items = self.fetch_from_redis(count, list_key=self.l1_list_key)
except Exception as e:
print("[Redis normal pop error]", e)
self.reconnect_redis()
return [], 0
return items, 2
@redis_retry(max_retries=3)
def rollback(self, payloads):
if not payloads:
return
if isinstance(payloads, str):
payloads = [payloads]
self.redis.rpush(self.l1_list_key, *payloads)
print(f"[回滚] 已退回 {len(payloads)}")
@mysql_retry()
def upsert_video(self, data: dict):
data.setdefault("a_id", 0)
data.setdefault("history_status", "")
data.setdefault("is_piracy", 3)
data.setdefault("is_repeat", 3)
data["sort"] = data.get("index", 0)
max_retries = 1 # 除了第一次外,再重试一次
attempt = 0
while True:
try:
sql_op = """
INSERT INTO sh_dm_video_op_v2 (
v_id, v_xid, a_id, level, name_title,
keyword, rn, history_status, is_repeat,
sort, createtime, updatetime, batch, machine
) VALUES (
%(v_id)s, %(v_xid)s, %(a_id)s, %(level)s, %(v_name)s,
%(keyword)s, %(rn)s, %(history_status)s, %(is_repeat)s,
%(sort)s, UNIX_TIMESTAMP(), UNIX_TIMESTAMP(), %(batch)s, %(machine_id)s
)
"""
self.cursor.execute(sql_op, data)
sql_update = """
INSERT INTO sh_dm_video_v2 (
v_id, v_xid, rn, v_name, title, link,
edition, duration,
public_time, cover_pic, sort,
u_xid, u_id, u_pic, u_name,
status, createtime, updatetime
) VALUES (
%(v_id)s, %(v_xid)s, %(rn)s, %(v_name)s, %(title)s, %(link)s,
'', %(duration)s,
%(create_time)s, %(cover_pic)s, %(sort)s,
%(u_xid)s, %(u_id)s, %(u_pic)s, %(u_name)s,
1, UNIX_TIMESTAMP(), UNIX_TIMESTAMP()
)
ON DUPLICATE KEY UPDATE
title = VALUES(title),
duration = VALUES(duration),
cover_pic = VALUES(cover_pic),
sort = VALUES(sort),
updatetime = UNIX_TIMESTAMP();
"""
self.cursor.execute(sql_update, data)
break # 成功跳出重试循环
except Exception as e:
# 回滚这次未提交的改动
self.conn.rollback()
print("[数据库写入异常]", str(e))
print("[出错数据]:", data)
if attempt < max_retries:
attempt += 1
print(f"{attempt + 1} 次重试…")
continue
else:
# 重试过后依然失败,推入 Redis 备用
print("重试失败,将数据写入 Redis 以便后续处理")
self.push_record(data)
print("[交由Redis处理]")
break
@mysql_retry()
def flush(self):
"""批量执行完后手动提交。"""
self.conn.commit()
@mysql_retry()
def close(self):
self.cursor.close()
self.conn.close()
@redis_retry(max_retries=3)
def get_proxy(self, region_code: str) -> str:
"""
从 Redis 队列 proxy_queue:<region_code> 弹出一个代理并返回。
如果队列为空,阻塞
"""
proxy = ""
while True:
key = f"proxy_queue:{region_code}"
proxy = self.redis.lpop(key)
if proxy is None:
time.sleep(10)
else:
break
return proxy
@redis_retry(max_retries=3)
def queues_empty(self) -> bool:
"""
判断 urgent_list_key 和 list_key 两个队列是否都为空。
如果都空,返回 True只要有一个不空就返回 False。
"""
# 注意redis.llen 返回 int
return (
self.redis.llen(self.l0_list_key) == 0
and self.redis.llen(self.l1_list_key) == 0
)
@redis_retry(max_retries=3)
def pop_error_item(self):
"""
从 error_list_key 中弹出一个错误记录lpop
如果队列为空,返回 None。
"""
item = self.redis.lpop(self.error_list_key)
# 如果你存入的是 JSON 字符串,可以在这里做一次反序列化:
return json.loads(item) if item is not None else None