fix: 修改 traffic_segment 的生成范围以确保更合理的值并更新 fetch_proxies3 的返回格式

This commit is contained in:
晓丰 2025-05-18 22:59:51 +08:00
parent 39d4bc111a
commit 0d35030f6d
2 changed files with 33 additions and 17 deletions

View File

@ -186,6 +186,7 @@ def gettoken():
'sec-ch-ua-mobile': '?0', 'sec-ch-ua-mobile': '?0',
'sec-ch-ua-platform': '"Windows"', 'sec-ch-ua-platform': '"Windows"',
} }
u = uuid.uuid4() u = uuid.uuid4()
uuid_with_dash = str(u) uuid_with_dash = str(u)
uuid_no_dash = u.hex uuid_no_dash = u.hex

View File

@ -4,6 +4,7 @@
import time import time
import sys import sys
import argparse import argparse
from urllib.parse import quote
import redis import redis
import requests import requests
from requests import RequestException from requests import RequestException
@ -23,11 +24,6 @@ LOW_WATERMARK = 200
REFILL_BATCH = 1000 REFILL_BATCH = 1000
SLEEP_INTERVAL = 10 SLEEP_INTERVAL = 10
# 第三方 API 参数
PROXY_API_URL = "http://api.proxy.roxlabs.io/getProxyIp"
ACCESS_ID = "2207189"
SIGN = "10099426b05c7119e9c4dbd6a7a0aa4e"
db = DB.DBVidcon() db = DB.DBVidcon()
def fetch_proxies3(region_code, n): def fetch_proxies3(region_code, n):
@ -55,17 +51,36 @@ def fetch_proxies(region_code, n):
""" """
从第三方一次性请求 n 条代理返回格式化列表 从第三方一次性请求 n 条代理返回格式化列表
""" """
headers = {
"accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7",
"accept-language": "zh-CN,zh;q=0.9",
"priority": "u=0, i",
"sec-ch-ua": "\"Chromium\";v=\"136\", \"Microsoft Edge\";v=\"136\", \"Not.A/Brand\";v=\"99\"",
"sec-ch-ua-mobile": "?0",
"sec-ch-ua-platform": "\"Windows\"",
"sec-fetch-dest": "document",
"sec-fetch-mode": "navigate",
"sec-fetch-site": "none",
"sec-fetch-user": "?1",
"upgrade-insecure-requests": "1",
"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/136.0.0.0 Safari/537.36 Edg/136.0.0.0"
}
url = "https://www.kookeey.com/pickdynamicips"
params = { params = {
"num": n, "auth": "pwd",
"return_type": "json", "format": "1",
"lb": "4", "n": n,
"sb": "", "p": "http",
"flow": "1", "gate": "hk",
"regions": region_code, "g": region_code,
"protocol": "http" "r": "0",
"type": "json",
"sign": "10099426b05c7119e9c4dbd6a7a0aa4e",
"accessid": "2207189",
"dl": ","
} }
try: try:
resp = requests.get(PROXY_API_URL, params=params, timeout=10) resp = requests.get(url,headers=headers, params=params, timeout=10)
resp.raise_for_status() resp.raise_for_status()
data = resp.json() data = resp.json()
except (RequestException, ValueError): except (RequestException, ValueError):
@ -79,12 +94,12 @@ def fetch_proxies(region_code, n):
result = [] result = []
for item in arr: for item in arr:
# user = quote(item["username"], safe="") user = quote(item["username"], safe="")
# pwd = quote(item["password"], safe="") pwd = quote(item["password"], safe="")
ip = item["ip"] ip = item["ip"]
port = item["port"] port = item["port"]
# result.append(f"http://{user}:{pwd}@{ip}:{port}") result.append(f"http://{user}:{pwd}@{ip}:{port}")
result.append(f"http://{ip}:{port}") # result.append(f"http://{ip}:{port}")
return result return result