Skip to content

Commit

Permalink
add more logs
Browse files Browse the repository at this point in the history
  • Loading branch information
ajindal1 committed Sep 19, 2023
1 parent ec4a104 commit bedae09
Showing 1 changed file with 14 additions and 0 deletions.
14 changes: 14 additions & 0 deletions onnxruntime/core/optimizer/layer_norm_fusion.cc
Original file line number Diff line number Diff line change
Expand Up @@ -415,19 +415,33 @@ Status LayerNormFusion::ApplyImpl(Graph& graph, bool& modified, int graph_level,
NodeArg* bias = nullptr;
std::cout << "LayerNormFusion Checking Scale and Bias" << std::endl;
for (size_t i = 0; i < mul_node.MutableInputDefs().size(); i++) {
std::cout << "LayerNormFusion Checking Scale" << std::endl;
if (graph_utils::NodeArgIsConstant(graph, *(mul_node.MutableInputDefs()[i]))){
std::cout << "LayerNormFusion Scale NodeArgIsConstant" << std::endl;
}
std::cout << "LayerNormFusion Scale NodeArgIsConstant Check Complete" << std::endl;
if (graph_utils::IsGraphInput(graph, mul_node.MutableInputDefs()[i])){
std::cout << "LayerNormFusion Scale GraphIsInput" << std::endl;
}
std::cout << "LayerNormFusion Scale GraphIsInput Check Complete" << std::endl;
std::cout << "LayerNorFusion Axes Value dim size:" << static_cast<int>(axes_values.size()) << std::endl;
if (mul_node.MutableInputDefs()[i]->Shape() == nullptr) {
continue;
}
std::cout << "LayerNorFusion Shape dim size:" << mul_node.MutableInputDefs()[i]->Shape()->dim_size() << std::endl;
if (mul_node.MutableInputDefs()[i]->Shape()->dim_size() == static_cast<int>(axes_values.size())) {
std::cout << "LayerNormFusion Scale determined" << std::endl;
scale = mul_node.MutableInputDefs()[i];
}
}

std::cout << "LayerNormFusion Checking Bias Now" << std::endl;
for (size_t i = 0; i < last_add_node.MutableInputDefs().size(); i++) {
std::cout << "LayerNorFusion Bias Axes Value dim size:" << static_cast<int>(axes_values.size()) << std::endl;
if (last_add_node.MutableInputDefs()[i]->Shape() == nullptr) {
continue;
}
std::cout << "LayerNorFusion Bias dim size:" << last_add_node.MutableInputDefs()[i]->Shape()->dim_size() << std::endl;
if (last_add_node.MutableInputDefs()[i]->Shape()->dim_size() == static_cast<int>(axes_values.size())) {
std::cout << "LayerNormFusion bias determined" << std::endl;
bias = last_add_node.MutableInputDefs()[i];
Expand Down

0 comments on commit bedae09

Please sign in to comment.