Skip to content

Commit

Permalink
Eliminate builder functions
Browse files Browse the repository at this point in the history
  • Loading branch information
olpipi committed Aug 10, 2023
1 parent 0193980 commit f4e84fc
Show file tree
Hide file tree
Showing 10 changed files with 60 additions and 59 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -92,12 +92,17 @@ class ConvolutionBackpropDataExtendedLayerTest
bool addBiases = false,
const std::vector<float> &filterWeights = {},
const std::vector<float> &biasesWeights = {}) {
bool randomFilterWeights = filterWeights.empty();
auto shape = in.get_shape();
std::vector<size_t> filterWeightsShape = {shape[1], numOutChannels};
filterWeightsShape.insert(filterWeightsShape.end(), filterSize.begin(), filterSize.end());
auto filterWeightsNode =
ngraph::builder::makeConstant(type, filterWeightsShape, filterWeights, randomFilterWeights);
std::shared_ptr<ov::op::v0::Constant> filterWeightsNode;
if (filterWeights.empty()) {
ov::Tensor random_tensor(type, filterWeightsShape);
ov::test::utils::fill_tensor_random(random_tensor);
auto constant = std::make_shared<ov::op::v0::Constant>(random_tensor);
} else {
filterWeightsNode = std::make_shared<ov::op::v0::Constant>(type, filterWeightsShape, filterWeights);
}

return makeConvolutionBackpropData(in,
filterWeightsNode,
Expand Down Expand Up @@ -149,10 +154,9 @@ class ConvolutionBackpropDataExtendedLayerTest
std::tie(kernel, stride, padBegin, padEnd, dilation, convOutChannels, padType, outputPad) =
convBackpropDataParams;
auto ngPrc = FuncTestUtils::PrecisionUtils::convertIE2nGraphPrc(netPrecision);
ov::ParameterVector params {std::make_shared<ov::op::v0::Parameter>(ngPrc, ov::Shape(inputShape)))};
ov::ParameterVector params {std::make_shared<ov::op::v0::Parameter>(ngPrc, ov::Shape(inputShape))};

auto outputShapeNode =
ngraph::builder::makeConstant(ov::element::Type_t::i64, {outputShapeData.size()}, outputShapeData);
auto outputShapeNode = std::make_shared<ov::op::v0::Constant>(ov::element::Type_t::i64, ov::Shape{outputShapeData.size()}, outputShapeData);
auto paramOuts =
ngraph::helpers::convert2OutputVector(ngraph::helpers::castOps2Nodes<ov::op::v0::Parameter>(params));
auto convBackpropData = std::dynamic_pointer_cast<ngraph::opset1::ConvolutionBackpropData>(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -94,9 +94,15 @@ class ConvolutionBackpropDataAddExtendedLayerTest
auto shape = in.get_shape();
std::vector<size_t> filterWeightsShape = {shape[1], numOutChannels};
filterWeightsShape.insert(filterWeightsShape.end(), filterSize.begin(), filterSize.end());
auto filterWeightsNode =
ngraph::builder::makeConstant(type, filterWeightsShape, filterWeights, randomFilterWeights);

std::shared_ptr<ov::Node> filterWeightsNode;
if (filterWeights.empty()) {
ov::Tensor random_tensor(type, filterWeightsShape);
ov::test::utils::fill_tensor_random(random_tensor);
filterWeightsNode = std::make_shared<ov::op::v0::Constant>(random_tensor);
} else {
filterWeightsNode = std::make_shared<ov::op::v0::Constant>(type, filterWeightsShape, filterWeights);
}
return makeConvolutionBackpropData(in,
filterWeightsNode,
output,
Expand Down Expand Up @@ -147,10 +153,9 @@ class ConvolutionBackpropDataAddExtendedLayerTest
std::tie(kernel, stride, padBegin, padEnd, dilation, convOutChannels, padType, outputPad) =
convBackpropDataParams;
auto ngPrc = FuncTestUtils::PrecisionUtils::convertIE2nGraphPrc(netPrecision);
ov::ParameterVector params {std::make_shared<ov::op::v0::Parameter>(ngPrc, ov::Shape(inputShape)))};
ov::ParameterVector params {std::make_shared<ov::op::v0::Parameter>(ngPrc, ov::Shape(inputShape))};

auto outputShapeNode =
ngraph::builder::makeConstant(ov::element::Type_t::i64, {outputShapeData.size()}, outputShapeData);
auto outputShapeNode = std::make_shared<ov::op::v0::Constant>(ov::element::Type_t::i64, ov::Shape{outputShapeData.size()}, outputShapeData);
auto paramOuts =
ngraph::helpers::convert2OutputVector(ngraph::helpers::castOps2Nodes<ov::op::v0::Parameter>(params));
auto convBackpropData = std::dynamic_pointer_cast<ngraph::opset1::ConvolutionBackpropData>(
Expand All @@ -164,8 +169,11 @@ class ConvolutionBackpropDataAddExtendedLayerTest
dilation,
padType,
convOutChannels));
auto addConstant = ngraph::builder::makeConstant(ngPrc, outputShapeData, outputShapeData, true);
auto add = ngraph::builder::makeEltwise(convBackpropData, addConstant, ngraph::helpers::EltwiseTypes::ADD);

ov::Tensor random_tensor(ngPrc, outputShapeData);
ov::test::utils::fill_tensor_random(random_tensor);
auto addConstant = std::make_shared<ov::op::v0::Constant>(random_tensor);
auto add = std::make_shared<ov::op::v1::Add>(convBackpropData, addConstant);
ov::ResultVector results{std::make_shared<ngraph::opset1::Result>(add)};
function = std::make_shared<ngraph::Function>(results, params, "convolutionBackpropData");
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -122,13 +122,13 @@ class BasicConvolutionBiasAddActivationLayerTest
}
auto biasLayer = std::make_shared<ov::op::v0::Constant>(ngNetPrc, biasShape);

auto biasAddLayer = ngraph::builder::makeEltwise(convLayer, biasLayer, ngraph::helpers::EltwiseTypes::ADD);
auto biasAddLayer = std::make_shared<ov::op::v1::Add>(convLayer, biasLayer);

std::shared_ptr<ov::Node> lastNode;
if constexpr (HasAddNode) {
auto addParam = std::make_shared<ngraph::opset1::Parameter>(ngNetPrc, convLayer->get_output_shape(0));
params.push_back(addParam);
auto addLayer = ngraph::builder::makeEltwise(biasAddLayer, addParam, ngraph::helpers::EltwiseTypes::ADD);
auto addLayer = std::make_shared<ov::op::v1::Add>(biasAddLayer, addParam);
lastNode = addLayer;
} else {
lastNode = biasAddLayer;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -247,15 +247,17 @@ void CudaEltwiseLayerTest::SetUp() {
[](const auto& value) { return static_cast<int>(static_cast<float>(value)) == 0; },
1);
}
secondaryInput = ngraph::builder::makeConstant(netType, shape, data);
secondaryInput = std::make_shared<ov::op::v0::Constant>(netType, shape, data);
break;
}
case ngraph::helpers::EltwiseTypes::POWER: {
secondaryInput = ngraph::builder::makeConstant<float>(netType, shape, {}, is_random, 3);
ov::Tensor random_tensor(netType, shape);
ov::test::utils::fill_tensor_random(random_tensor, 3, -3);
secondaryInput = std::make_shared<ov::op::v0::Constant>(random_tensor);
break;
}
default: {
secondaryInput = ngraph::builder::makeConstant<float>(netType, shape, data);
secondaryInput = std::make_shared<ov::op::v0::Constant>(netType, shape, data);
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
//

#include <cuda_test_constants.hpp>
#include <ngraph_functions/builders.hpp>
#include <vector>

#include "finite_comparer.hpp"
Expand Down Expand Up @@ -95,10 +94,8 @@ class FullyConnectedLayerTest : public testing::WithParamInterface<FullyConnecte
auto thirdInput = std::make_shared<ov::op::v0::Constant>(ngPrc, shapeRelatedParams.input3);
auto paramOuts =
ngraph::helpers::convert2OutputVector(ngraph::helpers::castOps2Nodes<ov::op::v0::Parameter>(params));
auto MatMul = std::dynamic_pointer_cast<ov::op::v0::MatMul>(ngraph::builder::makeMatMul(
paramOuts[0], secondaryInput, shapeRelatedParams.input1.second, shapeRelatedParams.input2.second));
auto Add = std::dynamic_pointer_cast<ov::op::v1::Add>(
ngraph::builder::makeEltwise(MatMul, thirdInput, ngraph::helpers::EltwiseTypes::ADD));
auto MatMul = std::make_shared<ov::op::v0::MatMul>(paramOuts[0], secondaryInput, shapeRelatedParams.input1.second, shapeRelatedParams.input2.second);
auto Add = std::make_shared<ov::op::v1::Add>(MatMul, thirdInput);
ov::ResultVector results{std::make_shared<ngraph::opset1::Result>(Add)};
function = std::make_shared<ngraph::Function>(results, params, "FullyConnected");
}
Expand Down Expand Up @@ -210,20 +207,16 @@ class FullyConnectedLayer2MatMulTest : public testing::WithParamInterface<FullyC
matmul1SecondaryInput = std::make_shared<ov::op::v0::Constant>(ngPrc, shapeRelatedParams.matmul1_input2.first);
}

auto paramOuts =
ngraph::helpers::convert2OutputVector(ngraph::helpers::castOps2Nodes<ov::op::v0::Parameter>(params));
auto matMul0 = std::dynamic_pointer_cast<ov::op::v0::MatMul>(
ngraph::builder::makeMatMul(paramOuts[0],
matmul0SecondaryInput,
shapeRelatedParams.matmul1_input1.second,
shapeRelatedParams.matmul1_input2.second));
auto matMul1 = std::dynamic_pointer_cast<ov::op::v0::MatMul>(
ngraph::builder::makeMatMul(paramOuts[1],
matmul1SecondaryInput,
shapeRelatedParams.matmul2_input1.second,
shapeRelatedParams.matmul2_input2.second));
auto Add = std::dynamic_pointer_cast<ov::op::v1::Add>(
ngraph::builder::makeEltwise(matMul0, matMul1, ngraph::helpers::EltwiseTypes::ADD));
auto paramOuts = ngraph::helpers::convert2OutputVector(ngraph::helpers::castOps2Nodes<ov::op::v0::Parameter>(params));
auto matMul0 = std::make_shared<ov::op::v0::MatMul>(paramOuts[0],
matmul0SecondaryInput,
shapeRelatedParams.matmul1_input1.second,
shapeRelatedParams.matmul1_input2.second);
auto matMul1 = std::make_shared<ov::op::v0::MatMul>(paramOuts[1],
matmul1SecondaryInput,
shapeRelatedParams.matmul2_input1.second,
shapeRelatedParams.matmul2_input2.second);
auto Add = std::make_shared<ov::op::v1::Add>(matMul0, matMul1);
ov::ResultVector results{std::make_shared<ngraph::opset1::Result>(Add)};
function = std::make_shared<ngraph::Function>(results, params, "FullyConnected");
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,10 +34,9 @@ class CUDNNGRUCellTest : public UnsymmetricalComparer<GRUCellTest> {
int seed = SEED_FIRST;
for (const auto& op : ops) {
if (std::dynamic_pointer_cast<ngraph::opset1::Constant>(op)) {
const auto constant = ngraph::builder::makeConstant(
op->get_element_type(), op->get_shape(), std::vector<float>{}, true, up_to, start_from, seed);
function->replace_node(op, constant);
++seed;
ov::Tensor random_tensor(op->get_element_type(), op->get_shape());
ov::test::utils::fill_tensor_random(random_tensor, up_to - start_from, start_from, 1, seed++);
function->replace_node(op, std::make_shared<ov::op::v0::Constant>(random_tensor));
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,9 +27,9 @@ class CUDNNGRUSequenceTest : public UnsymmetricalComparer<GRUSequenceTest> {
for (const auto& op : ops) {
if (std::dynamic_pointer_cast<ngraph::opset1::Constant>(op)) {
if (op->get_element_type() == ov::element::Type_t::f32) {
const auto constant = ngraph::builder::makeConstant(
op->get_element_type(), op->get_shape(), std::vector<float>{}, true, up_to, start_from, seed++);
function->replace_node(op, constant);
ov::Tensor random_tensor(op->get_element_type(), op->get_shape());
ov::test::utils::fill_tensor_random(random_tensor, up_to - start_from, start_from, 1, seed++);
function->replace_node(op, std::make_shared<ov::op::v0::Constant>(random_tensor));
}
}
}
Expand Down Expand Up @@ -57,9 +57,9 @@ class LPCNetCUDNNGRUSequenceTest : public UnsymmetricalComparer<GRUSequenceTest>
for (const auto& op : ops) {
if (std::dynamic_pointer_cast<ngraph::opset1::Constant>(op)) {
if (op->get_element_type() == ov::element::Type_t::f32) {
const auto constant = ngraph::builder::makeConstant(
op->get_element_type(), op->get_shape(), std::vector<float>{}, true, up_to, start_from, seed++);
function->replace_node(op, constant);
ov::Tensor random_tensor(op->get_element_type(), op->get_shape());
ov::test::utils::fill_tensor_random(random_tensor, up_to - start_from, start_from, 1, seed++);
function->replace_node(op, std::make_shared<ov::op::v0::Constant>(random_tensor));
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,8 +37,9 @@ class CUDNNLSTMCellTest : public LSTMCellTest {
int seed = SEED_FIRST;
for (const auto& op : ops) {
if (std::dynamic_pointer_cast<ngraph::opset1::Constant>(op)) {
const auto constant = ngraph::builder::makeConstant(
op->get_element_type(), op->get_shape(), std::vector<float>{}, true, up_to, start_from, seed);
ov::Tensor random_tensor(op->get_element_type(), op->get_shape());
ov::test::utils::fill_tensor_random(random_tensor, up_to - start_from, start_from, 1, seed);
auto constant = std::make_shared<ov::op::v0::Constant>(random_tensor);
function->replace_node(op, constant);
++seed;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,14 +26,9 @@ class CUDALSTMSequenceTest : public UnsymmetricalComparer<LSTMSequenceTest> {
for (const auto& op : ops) {
if (std::dynamic_pointer_cast<ngraph::opset1::Constant>(op)) {
if (op->get_element_type() == ov::element::Type_t::f32) {
const auto constant = ngraph::builder::makeConstant(op->get_element_type(),
op->get_shape(),
std::vector<float>{},
true,
up_to,
start_from,
counter++);
function->replace_node(op, constant);
ov::Tensor random_tensor(op->get_element_type(), op->get_shape());
ov::test::utils::fill_tensor_random(random_tensor, up_to - start_from, start_from, 1, seed++);
function->replace_node(op, std::make_shared<ov::op::v0::Constant>(random_tensor));
}
}
}
Expand Down
3 changes: 1 addition & 2 deletions modules/nvidia_plugin/tests/unit/test_networks.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,7 @@ inline std::shared_ptr<ngraph::Function> CreateMatMulTestNetwork() {
auto secondaryInput = std::make_shared<ov::op::v0::Constant>(ngPrc, ov::Shape{3, 2, 10, 20});
auto paramOuts =
ngraph::helpers::convert2OutputVector(ngraph::helpers::castOps2Nodes<ov::op::v0::Parameter>(params));
auto MatMul = std::dynamic_pointer_cast<ov::op::v0::MatMul>(
ngraph::builder::makeMatMul(paramOuts[0], secondaryInput, false, false));
auto MatMul = std::make_shared<ov::op::v0::MatMul>(paramOuts[0], secondaryInput, false, false);
ov::ResultVector results{std::make_shared<ngraph::opset1::Result>(MatMul)};
return std::make_shared<ngraph::Function>(results, params, "MatMul");
}
Expand Down

0 comments on commit f4e84fc

Please sign in to comment.