You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
RuntimeError: Creating a Parameter from an instance of type Params4bit requires that detach() returns an instance of the same type, but return type Tensor was found instead. To use the type as a Parameter, please correct the detach() semantics defined by its __torch_dispatch__() implementation.
#418
Open
bai1535 opened this issue
Aug 9, 2024
· 0 comments
运行baichuan2量化模型加载的时候出现的报错,有没有大佬知道怎么解决
You shouldn't move a model when it is dispatched on multiple devices.
Traceback (most recent call last):
File "/home/ls/anaconda3/envs/aichat/bin/lmdeploy", line 8, in
sys.exit(run())
File "/home/ls/anaconda3/envs/aichat/lib/python3.9/site-packages/lmdeploy/cli/entrypoint.py", line 37, in run
args.run(args)
File "/home/ls/anaconda3/envs/aichat/lib/python3.9/site-packages/lmdeploy/cli/serve.py", line 283, in api_server
run_api_server(args.model_path,
File "/home/ls/anaconda3/envs/aichat/lib/python3.9/site-packages/lmdeploy/serve/openai/api_server.py", line 1191, in serve
VariableInterface.async_engine = pipeline_class(
File "/home/ls/anaconda3/envs/aichat/lib/python3.9/site-packages/lmdeploy/serve/async_engine.py", line 212, in init
self._build_pytorch(model_path=model_path,
File "/home/ls/anaconda3/envs/aichat/lib/python3.9/site-packages/lmdeploy/serve/async_engine.py", line 276, in _build_pytorch
self.engine = Engine(model_path=model_path,
File "/home/ls/anaconda3/envs/aichat/lib/python3.9/site-packages/lmdeploy/pytorch/engine/engine.py", line 131, in init
self.model_agent = AutoModelAgent.from_pretrained(
File "/home/ls/anaconda3/envs/aichat/lib/python3.9/site-packages/lmdeploy/pytorch/engine/model_agent.py", line 462, in from_pretrained
return build_model_agent(pretrained_model_name_or_path,
File "/home/ls/anaconda3/envs/aichat/lib/python3.9/site-packages/lmdeploy/pytorch/engine/model_agent.py", line 1116, in build_model_agent
model_agent = TPModelAgent(model_path,
File "/home/ls/anaconda3/envs/aichat/lib/python3.9/site-packages/lmdeploy/pytorch/engine/model_agent.py", line 959, in init
model, cache_engine, cache_config = self._build_model(
File "/home/ls/anaconda3/envs/aichat/lib/python3.9/site-packages/torch/utils/_contextlib.py", line 115, in decorate_context
return func(*args, **kwargs)
File "/home/ls/anaconda3/envs/aichat/lib/python3.9/site-packages/lmdeploy/pytorch/engine/model_agent.py", line 1028, in _build_model
model, cache_engine, cache_config = _tp_build_model(
File "/home/ls/anaconda3/envs/aichat/lib/python3.9/site-packages/torch/utils/_contextlib.py", line 115, in decorate_context
return func(*args, **kwargs)
File "/home/ls/anaconda3/envs/aichat/lib/python3.9/site-packages/lmdeploy/pytorch/engine/model_agent.py", line 745, in _tp_build_model
raise e
File "/home/ls/anaconda3/envs/aichat/lib/python3.9/site-packages/lmdeploy/pytorch/engine/model_agent.py", line 719, in _tp_build_model
param_model = param_model.to('meta')
File "/home/ls/anaconda3/envs/aichat/lib/python3.9/site-packages/accelerate/big_modeling.py", line 416, in wrapper
return fn(*args, **kwargs)
File "/home/ls/anaconda3/envs/aichat/lib/python3.9/site-packages/transformers/modeling_utils.py", line 2556, in to
return super().to(*args, **kwargs)
File "/home/ls/anaconda3/envs/aichat/lib/python3.9/site-packages/torch/nn/modules/module.py", line 1160, in to
return self._apply(convert)
File "/home/ls/anaconda3/envs/aichat/lib/python3.9/site-packages/torch/nn/modules/module.py", line 810, in _apply
module._apply(fn)
File "/home/ls/anaconda3/envs/aichat/lib/python3.9/site-packages/torch/nn/modules/module.py", line 810, in _apply
module._apply(fn)
File "/home/ls/anaconda3/envs/aichat/lib/python3.9/site-packages/torch/nn/modules/module.py", line 810, in _apply
module._apply(fn)
[Previous line repeated 3 more times]
File "/home/ls/anaconda3/envs/aichat/lib/python3.9/site-packages/torch/nn/modules/module.py", line 841, in _apply
out_param = Parameter(param_applied, param.requires_grad)
File "/home/ls/anaconda3/envs/aichat/lib/python3.9/site-packages/torch/nn/parameter.py", line 44, in new
raise RuntimeError(f"Creating a Parameter from an instance of type {type(data).name} "
RuntimeError: Creating a Parameter from an instance of type Params4bit requires that detach() returns an instance of the same type, but return type Tensor was found instead. To use the type as a Parameter, please correct the detach() semantics defined by its torch_dispatch() implementation.
The text was updated successfully, but these errors were encountered:
运行baichuan2量化模型加载的时候出现的报错,有没有大佬知道怎么解决
You shouldn't move a model when it is dispatched on multiple devices.
Traceback (most recent call last):
File "/home/ls/anaconda3/envs/aichat/bin/lmdeploy", line 8, in
sys.exit(run())
File "/home/ls/anaconda3/envs/aichat/lib/python3.9/site-packages/lmdeploy/cli/entrypoint.py", line 37, in run
args.run(args)
File "/home/ls/anaconda3/envs/aichat/lib/python3.9/site-packages/lmdeploy/cli/serve.py", line 283, in api_server
run_api_server(args.model_path,
File "/home/ls/anaconda3/envs/aichat/lib/python3.9/site-packages/lmdeploy/serve/openai/api_server.py", line 1191, in serve
VariableInterface.async_engine = pipeline_class(
File "/home/ls/anaconda3/envs/aichat/lib/python3.9/site-packages/lmdeploy/serve/async_engine.py", line 212, in init
self._build_pytorch(model_path=model_path,
File "/home/ls/anaconda3/envs/aichat/lib/python3.9/site-packages/lmdeploy/serve/async_engine.py", line 276, in _build_pytorch
self.engine = Engine(model_path=model_path,
File "/home/ls/anaconda3/envs/aichat/lib/python3.9/site-packages/lmdeploy/pytorch/engine/engine.py", line 131, in init
self.model_agent = AutoModelAgent.from_pretrained(
File "/home/ls/anaconda3/envs/aichat/lib/python3.9/site-packages/lmdeploy/pytorch/engine/model_agent.py", line 462, in from_pretrained
return build_model_agent(pretrained_model_name_or_path,
File "/home/ls/anaconda3/envs/aichat/lib/python3.9/site-packages/lmdeploy/pytorch/engine/model_agent.py", line 1116, in build_model_agent
model_agent = TPModelAgent(model_path,
File "/home/ls/anaconda3/envs/aichat/lib/python3.9/site-packages/lmdeploy/pytorch/engine/model_agent.py", line 959, in init
model, cache_engine, cache_config = self._build_model(
File "/home/ls/anaconda3/envs/aichat/lib/python3.9/site-packages/torch/utils/_contextlib.py", line 115, in decorate_context
return func(*args, **kwargs)
File "/home/ls/anaconda3/envs/aichat/lib/python3.9/site-packages/lmdeploy/pytorch/engine/model_agent.py", line 1028, in _build_model
model, cache_engine, cache_config = _tp_build_model(
File "/home/ls/anaconda3/envs/aichat/lib/python3.9/site-packages/torch/utils/_contextlib.py", line 115, in decorate_context
return func(*args, **kwargs)
File "/home/ls/anaconda3/envs/aichat/lib/python3.9/site-packages/lmdeploy/pytorch/engine/model_agent.py", line 745, in _tp_build_model
raise e
File "/home/ls/anaconda3/envs/aichat/lib/python3.9/site-packages/lmdeploy/pytorch/engine/model_agent.py", line 719, in _tp_build_model
param_model = param_model.to('meta')
File "/home/ls/anaconda3/envs/aichat/lib/python3.9/site-packages/accelerate/big_modeling.py", line 416, in wrapper
return fn(*args, **kwargs)
File "/home/ls/anaconda3/envs/aichat/lib/python3.9/site-packages/transformers/modeling_utils.py", line 2556, in to
return super().to(*args, **kwargs)
File "/home/ls/anaconda3/envs/aichat/lib/python3.9/site-packages/torch/nn/modules/module.py", line 1160, in to
return self._apply(convert)
File "/home/ls/anaconda3/envs/aichat/lib/python3.9/site-packages/torch/nn/modules/module.py", line 810, in _apply
module._apply(fn)
File "/home/ls/anaconda3/envs/aichat/lib/python3.9/site-packages/torch/nn/modules/module.py", line 810, in _apply
module._apply(fn)
File "/home/ls/anaconda3/envs/aichat/lib/python3.9/site-packages/torch/nn/modules/module.py", line 810, in _apply
module._apply(fn)
[Previous line repeated 3 more times]
File "/home/ls/anaconda3/envs/aichat/lib/python3.9/site-packages/torch/nn/modules/module.py", line 841, in _apply
out_param = Parameter(param_applied, param.requires_grad)
File "/home/ls/anaconda3/envs/aichat/lib/python3.9/site-packages/torch/nn/parameter.py", line 44, in new
raise RuntimeError(f"Creating a Parameter from an instance of type {type(data).name} "
RuntimeError: Creating a Parameter from an instance of type Params4bit requires that detach() returns an instance of the same type, but return type Tensor was found instead. To use the type as a Parameter, please correct the detach() semantics defined by its torch_dispatch() implementation.
The text was updated successfully, but these errors were encountered: