Skip to content

Commit

Permalink
Shape could be nullptr
Browse files Browse the repository at this point in the history
  • Loading branch information
ajindal1 committed Sep 19, 2023
1 parent 25f12e9 commit a5bb90d
Showing 1 changed file with 6 additions and 0 deletions.
6 changes: 6 additions & 0 deletions onnxruntime/core/optimizer/layer_norm_fusion.cc
Original file line number Diff line number Diff line change
Expand Up @@ -422,6 +422,9 @@ Status LayerNormFusion::ApplyImpl(Graph& graph, bool& modified, int graph_level,
if (graph_utils::IsGraphInput(graph, mul_node.MutableInputDefs()[i])){
std::cout << "LayerNormFusion Scale GraphIsInput" << std::endl;
}
if (mul_node.MutableInputDefs()[i]->Shape() == nullptr){
continue
}
if (mul_node.MutableInputDefs()[i]->Shape()->dim_size() == static_cast<int>(axes_values.size())) {
std::cout << "LayerNormFusion Scale determined" << std::endl;
scale = mul_node.MutableInputDefs()[i];
Expand All @@ -436,6 +439,9 @@ Status LayerNormFusion::ApplyImpl(Graph& graph, bool& modified, int graph_level,
if (graph_utils::IsGraphInput(graph, mul_node.MutableInputDefs()[i])){
std::cout << "LayerNormFusion Bias GraphIsInput" << std::endl;
}
if (last_add_node.MutableInputDefs()[i]->Shape() == nullptr){
continue
}
if (last_add_node.MutableInputDefs()[i]->Shape()->dim_size() == static_cast<int>(axes_values.size())) {
std::cout << "LayerNormFusion bias determined" << std::endl;
bias = last_add_node.MutableInputDefs()[i];
Expand Down

0 comments on commit a5bb90d

Please sign in to comment.