From d0a7dcec1dd2b9f67c9be97d3b9ac05341b5fc9b Mon Sep 17 00:00:00 2001 From: Daniel Han Date: Wed, 31 Jul 2024 12:09:33 -0700 Subject: [PATCH] Update gemma.py --- unsloth/models/gemma.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/unsloth/models/gemma.py b/unsloth/models/gemma.py index e3f1e615..a0894ec7 100644 --- a/unsloth/models/gemma.py +++ b/unsloth/models/gemma.py @@ -14,6 +14,7 @@ from .llama import * from ._utils import __version__ +import math try: from transformers.models.gemma.modeling_gemma import ( @@ -256,7 +257,7 @@ def forward(self, x, position_ids=None, seq_len=None): def extend_rope_embedding(self, x, seq_len): if seq_len <= self.current_rope_size: return # Iteratively grow by increments of 8192 - self.current_rope_size = int(round(seq_len / 8192)) * 8192 + self.current_rope_size = math.ceil(seq_len / 8192) * 8192 self._set_cos_sin_cache(self.current_rope_size, device = "cuda:0", dtype = x.dtype) pass pass