diff --git a/multi_proxy_refill.py b/multi_proxy_refill.py index f033043..38e1a38 100644 --- a/multi_proxy_refill.py +++ b/multi_proxy_refill.py @@ -4,9 +4,11 @@ import time import sys import argparse +from json.decoder import JSONObject from urllib.parse import quote import redis import requests +from pydantic.v1 import JsonError from requests import RequestException import DB @@ -46,6 +48,24 @@ def fetch_proxies2(region_code, n): url = "http://" + region_code.lower() + "-pr.thordata.net:25000" return [url for _ in range(n)] +def fetch_proxies1(region_code, n): + url = f"http://api.proxy.ipidea.io/getProxyIp?num={n}&return_type=json&lb=1&sb=0&flow=1®ions={region_code}&protocol=http" + try: + res = requests.get(url) + data_json = res.json() + except (RequestException, ValueError): + time.sleep(1) + return fetch_proxies1(region_code, n) + arr = data_json["data"] or [] + if not arr: + time.sleep(1) + return fetch_proxies1(region_code, n) + result = [] + for item in arr: + ip = item["ip"] + port = item["port"] + result.append(f"http://{ip}:{port}") + return result def fetch_proxies(region_code, n): """ @@ -90,7 +110,7 @@ def fetch_proxies(region_code, n): arr = data.get("data") or [] if not arr: time.sleep(1) - return fetch_proxies(region_code, n) + return fetch_proxies1(region_code, n) result = [] for item in arr: @@ -113,7 +133,7 @@ def refill_queue(r: redis.Redis, region_name: str, region_code: str, to_fetch = batch - length print(f"[{time.strftime('%H:%M:%S')}] {key} 长度 {length} < {low},一次性拉取 {to_fetch} 条…") - proxies = fetch_proxies(region_code, to_fetch) + proxies = fetch_proxies1(region_code, to_fetch) if proxies: r.rpush(key, *proxies) print(f"[{time.strftime('%H:%M:%S')}] 已入队 {len(proxies)} 条 → 新长度 {r.llen(key)}")