Skip to content

Commit

Permalink
Merge pull request #30 from husichao666/master
Browse files Browse the repository at this point in the history
Compatible with MindSpore2.3
  • Loading branch information
suhaibo666 authored Mar 12, 2024
2 parents 12fbf26 + fc2fb00 commit caf8485
Show file tree
Hide file tree
Showing 3 changed files with 9 additions and 3 deletions.
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ python set_up.py bdist_wheel

执行以下命令安装whl包:
```shell
pip install dist/mindpet-1.0.0-py3-none-any.whl
pip install dist/mindpet-1.0.4-py3-none-any.whl
```


Expand Down
8 changes: 7 additions & 1 deletion mindpet/delta/lora.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,12 @@
from mindpet.utils.version_control import get_dropout, get_activation
from mindpet.layers.activation import LeakyReLU, LogSigmoid, LogSoftmax

def check_dense_input_shape(x, prim_name=None):
msg_prefix = f"For '{prim_name}', the" if prim_name else "The"
if len(x) < 2:
raise ValueError(f"{msg_prefix} dimension of 'x' should not be less than 2, but got {len(x)}.")


class LoRADense(nn.Dense):
"""Define a dense layer with LoRA structure.
Expand Down Expand Up @@ -96,7 +102,7 @@ def construct(self, input_tensor):

# Shape operations
x_shape = self.shape_op(input_tensor)
nn.layer.basic.check_dense_input_shape(x_shape, self.cls_name)
check_dense_input_shape(x_shape, self.cls_name)
input_tensor = self.reshape(input_tensor, (-1, x_shape[-1]))

# Dense result
Expand Down
2 changes: 1 addition & 1 deletion set_up.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@


def get_version():
version = '1.0.3'
version = '1.0.4'
return version


Expand Down

0 comments on commit caf8485

Please sign in to comment.