From b1180a6219b3e566cff3d165f5917a01acfcb9bb Mon Sep 17 00:00:00 2001 From: Yufeng Li Date: Tue, 23 Apr 2024 10:39:55 -0700 Subject: [PATCH] don't check max_batch_size for cpu (#298) --- src/models/model.cpp | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/src/models/model.cpp b/src/models/model.cpp index 87ee26a34..3a447a606 100644 --- a/src/models/model.cpp +++ b/src/models/model.cpp @@ -559,10 +559,8 @@ void Model::GetMaxBatchSizeFromGeneratorParams(const GeneratorParams& params) { } use_cuda_graph_ = true; - } else { - if (is_cuda_graph_enabled || max_batch_size_ > 0) { - throw std::runtime_error("CUDA graph is not supported on this device"); - } + } else if (is_cuda_graph_enabled) { + throw std::runtime_error("CUDA graph is not supported on this device"); } }