|
@@ -91,6 +91,7 @@ class InferenceFetchHandler(FetchHandler):
|
|
|
result_dict = self.result_queue.get(timeout=1) # 非阻塞获取
|
|
|
batch.append(result_dict)
|
|
|
if len(batch) >= self.batch_size:
|
|
|
+ logger.info("write vector")
|
|
|
with open(self.output_file, 'a') as f:
|
|
|
for result in batch:
|
|
|
f.write(json.dumps(result) + '\n')
|