Skip to content

Commit

Permalink
chat_template无效时使用fastllm分词
Browse files Browse the repository at this point in the history
  • Loading branch information
黄宇扬 committed Jul 2, 2024
1 parent 3080aa1 commit 83b551f
Showing 1 changed file with 3 additions and 3 deletions.
6 changes: 3 additions & 3 deletions tools/fastllm_pytools/llm.py
Original file line number Diff line number Diff line change
Expand Up @@ -550,7 +550,7 @@ def stop_token_ctypes(self, stop_token_ids):

def get_input_token_len(self, query: str, history: List[Tuple[str, str]] = None) -> int:
prompt = query if self.direct_query else self.get_prompt(query, history);
if (self.hf_tokenizer != None):
if (self.hf_tokenizer != None and hasattr(self.hf_tokenizer, "chat_template") and self.hf_tokenizer.chat_template != ""):
return len(self.hf_tokenizer.encode(prompt))
else:
return len(self.encode(prompt))
Expand Down Expand Up @@ -604,7 +604,7 @@ def stream_response(self,
history: List[Tuple[str, str]] = None,
max_length: int = 8192, do_sample = True, top_p = 0.8, top_k = 1, temperature = 1.0, repeat_penalty = 1.0,
one_by_one = True, stop_token_ids: List[int] = None):
if (self.hf_tokenizer != None):
if (self.hf_tokenizer != None and hasattr(self.hf_tokenizer, "chat_template") and self.hf_tokenizer.chat_template != ""):
lastlen = 0
for cur in self.stream_chat(tokenizer = self.hf_tokenizer,
query = query,
Expand Down Expand Up @@ -657,7 +657,7 @@ async def stream_response_async(self,
history: List[Tuple[str, str]] = None,
max_length: int = 8192, do_sample = True, top_p = 0.8, top_k = 1, temperature = 1.0, repeat_penalty = 1.0,
one_by_one = True, stop_token_ids: List[int] = None):
if (self.hf_tokenizer != None):
if (self.hf_tokenizer != None and hasattr(self.hf_tokenizer, "chat_template") and self.hf_tokenizer.chat_template != ""):
lastlen = 0
async for cur in self.stream_chat_async(tokenizer = self.hf_tokenizer,
query = query,
Expand Down

0 comments on commit 83b551f

Please sign in to comment.