From 37baa5925b86f14553e7b9ea781ea400b20b76d3 Mon Sep 17 00:00:00 2001 From: celll1 Date: Sun, 8 Sep 2024 19:02:17 +0900 Subject: [PATCH] fix: attention mask device. --- modules/model/util/clip_util.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/model/util/clip_util.py b/modules/model/util/clip_util.py index e2f5c3ac..9bbab373 100644 --- a/modules/model/util/clip_util.py +++ b/modules/model/util/clip_util.py @@ -32,7 +32,7 @@ def encode_clip( continue # Create attention mask (1 for non-masked, 0 for masked) - chunk_attention_mask = torch.ones_like(chunk, dtype=torch.bool) + chunk_attention_mask = torch.ones_like(chunk, dtype=torch.bool, device=chunk.device, device=chunk.device) # First, add BOS and EOS tokens bos_tokens = torch.full((chunk.shape[0], 1), text_encoder.config.bos_token_id, dtype=chunk.dtype, device=chunk.device)