feat: 添加脚本以将关键字/标题行导入Redis列表
This commit is contained in:
parent
594ef9350b
commit
d25177cd22
6
DB.py
6
DB.py
@ -631,11 +631,13 @@ class DBVidcon:
|
||||
logger.info("[Redis close error]", e)
|
||||
|
||||
@redis_retry(max_retries=3)
|
||||
def get_proxy(self, region_code: str) -> str:
|
||||
def get_proxy(self, region_code: str, c="-1") -> str:
|
||||
region_code = region_code.upper()
|
||||
list_json_str = self.redis.hget("proxy_config", "list") or "[]"
|
||||
if c == "-1":
|
||||
conf_str = self.redis.hget("proxy_config", "conf") or "0"
|
||||
|
||||
else:
|
||||
conf_str = c
|
||||
try:
|
||||
proxies = json.loads(list_json_str)
|
||||
idx = int(conf_str)
|
||||
|
32
oneget.py
32
oneget.py
@ -6,6 +6,9 @@ import copy
|
||||
from threading import Lock
|
||||
import logging
|
||||
from DB import DBVidcon
|
||||
import json
|
||||
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
db = DBVidcon()
|
||||
@ -39,11 +42,6 @@ class DMHeaderManager:
|
||||
'x-dm-visitor-id': '',
|
||||
}
|
||||
|
||||
_proxies = {
|
||||
"http": "http://127.0.0.1:10808",
|
||||
"https": "http://127.0.0.1:10808"
|
||||
}
|
||||
|
||||
_user_agents = [
|
||||
'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-us) AppleWebKit/534.50 (KHTML, like Gecko) Version/5.1 Safari/534.50',
|
||||
'Mozilla/5.0 (Windows NT 6.1; rv:2.0.1) Gecko/20100101 Firefox/4.0.1',
|
||||
@ -53,9 +51,10 @@ class DMHeaderManager:
|
||||
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/128.0.0.0 Safari/537.36',
|
||||
]
|
||||
|
||||
def __init__(self):
|
||||
def __init__(self, proxies: dict = None):
|
||||
self._headers_cache = None
|
||||
self._cache_lock = Lock()
|
||||
self._proxies = proxies
|
||||
|
||||
def get_headers(self, retry: int = 2) -> dict:
|
||||
for attempt in range(retry + 1):
|
||||
@ -127,12 +126,25 @@ class DMHeaderManager:
|
||||
return new_headers
|
||||
|
||||
|
||||
dmheader_manager = DMHeaderManager()
|
||||
kwdata = db.get_web_items()
|
||||
if not kwdata:
|
||||
logger.error("没有获取到关键词数据")
|
||||
exit(1)
|
||||
|
||||
kwdata = kwdata[0][1]
|
||||
rn = kwdata['rn']
|
||||
proxy_name = proxiesdict.get(rn)
|
||||
proxies_str = db.get_proxy(proxy_name,'-1')
|
||||
proxies = {
|
||||
'http': proxies_str,
|
||||
'https': proxies_str
|
||||
}
|
||||
dmheader_manager = DMHeaderManager(proxies=proxies)
|
||||
|
||||
|
||||
|
||||
headers = dmheader_manager.get_headers()
|
||||
|
||||
import json
|
||||
|
||||
data = {
|
||||
"operationName": "SEARCH_QUERY",
|
||||
"variables": {
|
||||
@ -503,7 +515,7 @@ query SEARCH_QUERY(
|
||||
payload = json.dumps(data).encode()
|
||||
|
||||
response = requests.post('https://graphql.api.dailymotion.com/', headers=headers, data=payload,
|
||||
proxies=dmheader_manager._proxies)
|
||||
proxies=proxies)
|
||||
|
||||
data = response.json()
|
||||
edges = data['data']['search']['stories']['edges']
|
||||
|
Loading…
x
Reference in New Issue
Block a user