From e80205d3ff76e4773be43fc1ffb34c07bf30f712 Mon Sep 17 00:00:00 2001 From: liubo-intel Date: Wed, 16 Oct 2024 05:40:47 -0400 Subject: [PATCH] replace ref_f16 Deconv kernels with jit_f32 kernels before oneDNN support for model performance on GNR, e.g. hifigan --- src/plugins/intel_cpu/src/nodes/deconv.cpp | 17 +++++++++++++++-- 1 file changed, 15 insertions(+), 2 deletions(-) diff --git a/src/plugins/intel_cpu/src/nodes/deconv.cpp b/src/plugins/intel_cpu/src/nodes/deconv.cpp index 57046a0a06d55b..6b61b2ee102fb3 100644 --- a/src/plugins/intel_cpu/src/nodes/deconv.cpp +++ b/src/plugins/intel_cpu/src/nodes/deconv.cpp @@ -458,8 +458,21 @@ void Deconvolution::getSupportedDescriptors() { outputDataType = DnnlExtensionUtils::ElementTypeToDataType(outPrecision); if (inputDataType == memory::data_type::bf16 || outputDataType == memory::data_type::bf16) inputDataType = outputDataType = memory::data_type::bf16; - if (inputDataType == memory::data_type::f16 || outputDataType == memory::data_type::f16) - inputDataType = outputDataType = memory::data_type::f16; + + if (inputDataType == memory::data_type::f16 || outputDataType == memory::data_type::f16) { + bool hasStrides = false; + for (size_t i = 0; i < deconvAttrs.stride.size(); i++) { + if (deconvAttrs.stride[i] != 1) { + hasStrides = true; + break; + } + } + if (hasStrides) + inputDataType = outputDataType = memory::data_type::f32; + else + inputDataType = outputDataType = memory::data_type::f16; + } + if (!fusedWith.empty()) { outputDataType = DnnlExtensionUtils::ElementTypeToDataType(fusedWith[fusedWith.size() - 1]->getOriginalOutputPrecisionAtPort(0)); }