Skip to content

Commit

Permalink
Set early_stopping False and fix TextPredictor's predict
Browse files Browse the repository at this point in the history
  • Loading branch information
gokceuludogan committed Feb 4, 2024
1 parent e068908 commit ab7a410
Showing 1 changed file with 2 additions and 2 deletions.
4 changes: 2 additions & 2 deletions turkish_lm_tuner/predictor.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ class TaskConfig:
max_new_tokens: int = None
length_penalty: float = None
no_repeat_ngram_size: int = None
early_stopping: bool = True
early_stopping: bool = False
decoder_start_token_id: int = None
eos_token_id: int = None
pad_token_id: int = None
Expand Down Expand Up @@ -142,6 +142,6 @@ def __init__(self, model_name, task, task_format='conditional_generation', max_i
self.task_config = TaskConfig(**task_parameters[task])

def predict(self, text, **kwargs):
generation_config = vars(self.task_config, **kwargs) if self.task_format == 'conditional_generation' else {}
generation_config = {**vars(self.task_config), **kwargs} if self.task_format == 'conditional_generation' else {}
return super().predict(text, generation_config)

0 comments on commit ab7a410

Please sign in to comment.