diff --git a/oneget.py b/oneget.py index 80d0f2d..b1ab706 100644 --- a/oneget.py +++ b/oneget.py @@ -8,7 +8,6 @@ import random import time import copy from threading import Lock -from logger import logger from DB import DBVidcon, DBSA import json from requests.adapters import HTTPAdapter @@ -67,7 +66,7 @@ def clean_dash_to_zero(val): try: return int(val) except (ValueError, TypeError) as e: - logger.exception(f"[字段异常] val = {val} → {str(e)}") + print(f"[字段异常] val = {val} → {str(e)}") return 0 @@ -76,7 +75,7 @@ def format_create_time(timestr): dt = date_parser.isoparse(timestr) return dt.strftime("%Y-%m-%d %H:%M:%S") except Exception as e: - logger.exception(f"[时间格式错误] {timestr} → {str(e)}") + print(f"[时间格式错误] {timestr} → {str(e)}") return "1970-01-01 00:00:00" @@ -255,10 +254,10 @@ def main(): while True: kwdata = db.get_web_items() if not kwdata: - logger.error("没有获取到关键词数据") + print("没有获取到关键词数据") time.sleep(30) return - logger.info(f"搜索关键词数据: {kwdata}") + print(f"搜索关键词数据: {kwdata}") kwdata = kwdata[0][1] rn = kwdata['rn'] proxy_name = proxiesdict.get(rn) @@ -650,9 +649,9 @@ def main(): edges = data['data']['search']['stories']['edges'] except (TypeError,KeyError): print("stories 为 None 或结构异常,跳过") - return + break edges_len = len(edges) - logger.info(f"第 {i} 页,关键词: {kw},获取到 {edges_len} 条数据") + print(f"第 {i} 页,关键词: {kw},获取到 {edges_len} 条数据") tancks = [] for j, edge in enumerate(edges): node = edge.get("node", {}) @@ -678,7 +677,7 @@ def main(): if __name__ == '__main__': parse_args() start_time = datetime.now() - logger.info(f"开始时间:{start_time.strftime('%Y-%m-%d %H:%M:%S')}") + print(f"开始时间:{start_time.strftime('%Y-%m-%d %H:%M:%S')}") main()