import datetime import sys import redis import concurrent.futures def process_key(r, key): """处理单个 key 的过期时间延长逻辑。""" try: ttl = r.ttl(key) if ttl is not None and 0 < ttl < threshold_second: r.expire(key, ttl + extend_second) #logging.info(f"Key: {key.decode()}, 原 TTL: {ttl} 秒, 延长 {extend_second} 秒") return 1 # 成功续期的 key 数量 return 0 except redis.exceptions.RedisError as e: print(f"处理 Key: {key.decode()} 时发生 Redis 错误: {e}") return 0 except Exception as e: print(f"处理 Key: {key.decode()} 时发生其他错误: {e}") return 0 def clear_redis_key(host='localhost', port=6379, db=0, password=None, num_workers=10): """ 扫描 Redis key,如果过期时间小于 threshold_second,则过期时间延长 extend_second。 使用并行处理,尽量不改动原代码结构。 """ try: print(f'time = {datetime.datetime.now()}') sys.stdout.flush() sys.stderr.flush() r = redis.Redis(host=host, port=port, db=db, password=password) count = 0 clear_count = 0 with concurrent.futures.ThreadPoolExecutor(max_workers=num_workers) as executor: futures = [] for key in r.scan_iter(match='com.weiqu.longvideo.video.message.cache.*', count=1000): futures.append(executor.submit(process_key, r, key)) count += 1 if count % 1000000 == 0: print(f"submit count: {count} time = {datetime.datetime.now()}") sys.stdout.flush() sys.stderr.flush() for future in concurrent.futures.as_completed(futures): try: clear_count += future.result() except Exception as e: print(f"future error: {e}") sys.stdout.flush() sys.stderr.flush() print(f"scan finish count: {count} clear_count: {clear_count} time = {datetime.datetime.now()}") sys.stdout.flush() sys.stderr.flush() except redis.exceptions.ConnectionError as e: print(f"connect Redis error: {e}") sys.stdout.flush() sys.stderr.flush() except Exception as e: print(f"error: {e}") sys.stdout.flush() sys.stderr.flush() if __name__ == "__main__": num_workers = 20 # 根据 CPU 核心数和网络情况调整 # test clear_redis_key(host='r-bp1ps6my7lzg8rdhwx682.redis.rds.aliyuncs.com', port=6379, db=0, password='Wqsd@2019') # prod # clear_redis_key(host='r-bp1oyhyx4mxgs6klyt561.redis.rds.aliyuncs.com', port=6379, db=0, password='Wqsd@2019')