From 9ad5f65b45edc4d28224fb7344ab669b5af74ae0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=BB=84=E5=AE=87=E6=89=AC?= Date: Thu, 25 Jul 2024 17:03:58 +0800 Subject: [PATCH] fix --- tools/fastllm_pytools/llm.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/tools/fastllm_pytools/llm.py b/tools/fastllm_pytools/llm.py index 41dde11a..2e7bf124 100644 --- a/tools/fastllm_pytools/llm.py +++ b/tools/fastllm_pytools/llm.py @@ -687,6 +687,10 @@ def token_healing(self, softmax(out) if (idx < len(real_input)): cur_prob += math.log(out[real_input[idx]]) + while True: + if (ret <= -1): + break + ret = fastllm_lib.fetch_response_logits_llm_model(self.model, handle, array) max_id = -1 for i in cur_set: if max_id == -1 or out[i] > out[max_id]: