Skip to content

Commit

Permalink
Eliminate makeInputLayer usage
Browse files Browse the repository at this point in the history
  • Loading branch information
olpipi committed Aug 8, 2023
1 parent f0d44a3 commit c4e1339
Show file tree
Hide file tree
Showing 3 changed files with 21 additions and 17 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -120,8 +120,7 @@ class BasicConvolutionBiasAddActivationLayerTest
for (size_t i = 0; i < biasShape.size(); ++i) {
if (i != channel_dim_index) biasShape[i] = 1;
}
auto biasLayer =
ngraph::builder::makeInputLayer(ngNetPrc, ngraph::helpers::InputLayerType::CONSTANT, biasShape);
auto biasLayer = std::make_shared<ov::op::v0::Constant>(ngNetPrc, biasShape);

auto biasAddLayer = ngraph::builder::makeEltwise(convLayer, biasLayer, ngraph::helpers::EltwiseTypes::ADD);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -85,13 +85,14 @@ class FullyConnectedLayerTest : public testing::WithParamInterface<FullyConnecte
auto ngPrc = FuncTestUtils::PrecisionUtils::convertIE2nGraphPrc(netPrecision);
auto params = ngraph::builder::makeParams(ngPrc, {shapeRelatedParams.input1.first});

auto secondaryInput =
ngraph::builder::makeInputLayer(ngPrc, secondaryInputType, shapeRelatedParams.input2.first);
std::shared_ptr<ov::Node> secondaryInput;
if (secondaryInputType == ngraph::helpers::InputLayerType::PARAMETER) {
params.push_back(std::dynamic_pointer_cast<ov::op::v0::Parameter>(secondaryInput));
secondaryInput = std::make_shared<ov::op::v0::Parameter>(ngPrc, ov::Shape(shapeRelatedParams.input2.first));
params.push_back(std::static_pointer_cast<ov::op::v0::Parameter>(secondaryInput));
} else {
secondaryInput = std::make_shared<ov::op::v0::Constant>(ngPrc, shapeRelatedParams.input2.first);
}
auto thirdInput = ngraph::builder::makeInputLayer(
ngPrc, ngraph::helpers::InputLayerType::CONSTANT, shapeRelatedParams.input3);
auto thirdInput = std::make_shared<ov::op::v0::Constant>(ngPrc, shapeRelatedParams.input3);
auto paramOuts =
ngraph::helpers::convert2OutputVector(ngraph::helpers::castOps2Nodes<ov::op::v0::Parameter>(params));
auto MatMul = std::dynamic_pointer_cast<ov::op::v0::MatMul>(ngraph::builder::makeMatMul(
Expand Down Expand Up @@ -191,15 +192,21 @@ class FullyConnectedLayer2MatMulTest : public testing::WithParamInterface<FullyC
auto ngPrc = FuncTestUtils::PrecisionUtils::convertIE2nGraphPrc(netPrecision);
auto params = ngraph::builder::makeParams(
ngPrc, {shapeRelatedParams.matmul1_input1.first, shapeRelatedParams.matmul2_input1.first});
auto matmul0SecondaryInput =
ngraph::builder::makeInputLayer(ngPrc, secondaryInputType, shapeRelatedParams.matmul1_input2.first);

std::shared_ptr<ov::Node> matmul0SecondaryInput;
if (secondaryInputType == ngraph::helpers::InputLayerType::PARAMETER) {
params.push_back(std::dynamic_pointer_cast<ov::op::v0::Parameter>(matmul0SecondaryInput));
matmul0SecondaryInput = std::make_shared<ov::op::v0::Parameter>(ngPrc, ov::Shape(shapeRelatedParams.matmul1_input2.first));
params.push_back(std::static_pointer_cast<ov::op::v0::Parameter>(matmul0SecondaryInput));
} else {
matmul0SecondaryInput = std::make_shared<ov::op::v0::Constant>(ngPrc, shapeRelatedParams.matmul1_input2.first);
}
auto matmul1SecondaryInput =
ngraph::builder::makeInputLayer(ngPrc, secondaryInputType, shapeRelatedParams.matmul2_input2.first);

std::shared_ptr<ov::Node> matmul1SecondaryInput;
if (secondaryInputType == ngraph::helpers::InputLayerType::PARAMETER) {
params.push_back(std::dynamic_pointer_cast<ov::op::v0::Parameter>(matmul1SecondaryInput));
matmul1SecondaryInput = std::make_shared<ov::op::v0::Parameter>(ngPrc, ov::Shape(shapeRelatedParams.matmul1_input2.first));
params.push_back(std::static_pointer_cast<ov::op::v0::Parameter>(matmul1SecondaryInput));
} else {
matmul1SecondaryInput = std::make_shared<ov::op::v0::Constant>(ngPrc, shapeRelatedParams.matmul1_input2.first);
}

auto paramOuts =
Expand Down
6 changes: 2 additions & 4 deletions modules/nvidia_plugin/tests/unit/test_networks.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -9,14 +9,13 @@
#include <ngraph_functions/utils/ngraph_helpers.hpp>

inline std::shared_ptr<ngraph::Function> CreateMatMulTestNetwork() {
ngraph::helpers::InputLayerType secondaryInputType = ngraph::helpers::InputLayerType::CONSTANT;
auto netPrecision = InferenceEngine::Precision::FP32;
std::map<std::string, std::string> additionalConfig;

auto ngPrc = InferenceEngine::details::convertPrecision(netPrecision);
auto params = ngraph::builder::makeParams(ngPrc, {{3, 2, 10, 10}});

auto secondaryInput = ngraph::builder::makeInputLayer(ngPrc, secondaryInputType, {3, 2, 10, 20});
auto secondaryInput = std::make_shared<ov::op::v0::Constant>(ngPrc, ov::Shape{3, 2, 10, 20});
auto paramOuts =
ngraph::helpers::convert2OutputVector(ngraph::helpers::castOps2Nodes<ov::op::v0::Parameter>(params));
auto MatMul = std::dynamic_pointer_cast<ov::op::v0::MatMul>(
Expand Down Expand Up @@ -63,14 +62,13 @@ class SuperDummyOp : public ov::op::Op {
};

inline std::shared_ptr<ngraph::Function> CreateSuperOperationTestNetwork() {
ngraph::helpers::InputLayerType secondaryInputType = ngraph::helpers::InputLayerType::CONSTANT;
auto netPrecision = InferenceEngine::Precision::FP32;
std::map<std::string, std::string> additionalConfig;

auto ngPrc = InferenceEngine::details::convertPrecision(netPrecision);
auto params = ngraph::builder::makeParams(ngPrc, {{3, 2, 10, 10}});

auto secondaryInput = ngraph::builder::makeInputLayer(ngPrc, secondaryInputType, {3, 2, 10, 20});
auto secondaryInput = std::make_shared<ov::op::v0::Constant>(ngPrc, ov::Shape{3, 2, 10, 20});
auto paramOuts =
ngraph::helpers::convert2OutputVector(ngraph::helpers::castOps2Nodes<ov::op::v0::Parameter>(params));
auto superOp = std::make_shared<SuperDummyOp>(paramOuts[0], secondaryInput);
Expand Down

0 comments on commit c4e1339

Please sign in to comment.