From a5bb90dbc4665184061ed46d7aeaf12dd26868ea Mon Sep 17 00:00:00 2001 From: Abhishek Jindal Date: Mon, 18 Sep 2023 22:31:40 -0700 Subject: [PATCH] Shape could be nullptr --- onnxruntime/core/optimizer/layer_norm_fusion.cc | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/onnxruntime/core/optimizer/layer_norm_fusion.cc b/onnxruntime/core/optimizer/layer_norm_fusion.cc index bba7127c99d3a..0b9f5f6444b47 100644 --- a/onnxruntime/core/optimizer/layer_norm_fusion.cc +++ b/onnxruntime/core/optimizer/layer_norm_fusion.cc @@ -422,6 +422,9 @@ Status LayerNormFusion::ApplyImpl(Graph& graph, bool& modified, int graph_level, if (graph_utils::IsGraphInput(graph, mul_node.MutableInputDefs()[i])){ std::cout << "LayerNormFusion Scale GraphIsInput" << std::endl; } + if (mul_node.MutableInputDefs()[i]->Shape() == nullptr){ + continue + } if (mul_node.MutableInputDefs()[i]->Shape()->dim_size() == static_cast(axes_values.size())) { std::cout << "LayerNormFusion Scale determined" << std::endl; scale = mul_node.MutableInputDefs()[i]; @@ -436,6 +439,9 @@ Status LayerNormFusion::ApplyImpl(Graph& graph, bool& modified, int graph_level, if (graph_utils::IsGraphInput(graph, mul_node.MutableInputDefs()[i])){ std::cout << "LayerNormFusion Bias GraphIsInput" << std::endl; } + if (last_add_node.MutableInputDefs()[i]->Shape() == nullptr){ + continue + } if (last_add_node.MutableInputDefs()[i]->Shape()->dim_size() == static_cast(axes_values.size())) { std::cout << "LayerNormFusion bias determined" << std::endl; bias = last_add_node.MutableInputDefs()[i];