From ced110f74242f468329c276b947898d5812b4323 Mon Sep 17 00:00:00 2001 From: Pranshu-S Date: Sun, 17 Mar 2024 13:21:31 +0530 Subject: [PATCH] Support complex tensors for Cumsum operation --- .../tensorflow_common/src/op/cumsum.cpp | 24 +++++++ .../tensorflow_tests/test_tf_Cumsum.py | 64 +++++++++++++++++++ 2 files changed, 88 insertions(+) diff --git a/src/frontends/tensorflow_common/src/op/cumsum.cpp b/src/frontends/tensorflow_common/src/op/cumsum.cpp index b95a9b9e1812ed..c0e7d1a79df42b 100644 --- a/src/frontends/tensorflow_common/src/op/cumsum.cpp +++ b/src/frontends/tensorflow_common/src/op/cumsum.cpp @@ -4,6 +4,12 @@ #include "common_op_table.hpp" #include "openvino/op/cum_sum.hpp" +#include "helper_ops/complex_type_mark.hpp" +#include "openvino/op/greater.hpp" +#include "openvino/op/equal.hpp" +#include "openvino/op/logical_or.hpp" +#include "openvino/op/select.hpp" +#include "openvino/op/add.hpp" using namespace std; using namespace ov::op; @@ -20,6 +26,24 @@ OutputVector translate_cumsum_op(const NodeContext& node) { auto exclusive = node.get_attribute("exclusive", false); auto reverse = node.get_attribute("reverse", false); + auto complex_type_mark = as_type_ptr(x.get_node_shared_ptr()); + if (complex_type_mark) { + x = complex_type_mark->input_value(0); + auto zero = create_same_type_const_scalar(x, 0); + + auto is_zero = make_shared(axis, zero); + auto greater_than_zero = make_shared(axis, zero); + + auto logical_or = make_shared(is_zero, greater_than_zero); + + auto const_one = make_shared(element::i32, Shape{}, 1); + auto const_minus_one = make_shared(element::i32, Shape{}, -1); + + auto axis_update = make_shared(logical_or, const_one, const_minus_one); + + auto new_axis = make_shared(axis, axis_update); + } + auto cum_sum = make_shared(x, axis, exclusive, reverse); set_node_name(node.get_name(), cum_sum); return cum_sum->outputs(); diff --git a/tests/layer_tests/tensorflow_tests/test_tf_Cumsum.py b/tests/layer_tests/tensorflow_tests/test_tf_Cumsum.py index fe6186887b60cf..e64b44beafb1f8 100644 --- a/tests/layer_tests/tensorflow_tests/test_tf_Cumsum.py +++ b/tests/layer_tests/tensorflow_tests/test_tf_Cumsum.py @@ -54,3 +54,67 @@ def test_cumsum_basic(self, params, exclusive, reverse, ie_device, precision, ir self._test(*self.create_cumsum_net(**params, exclusive=exclusive, reverse=reverse), ie_device, precision, ir_version, temp_dir=temp_dir, use_legacy_frontend=use_legacy_frontend) + + +class TestComplexCumsum(CommonTFLayerTest): + # input_shape - should be an array + # axis - array which points on axis for the operation + # exclusive - enables exclusive Cumsum + # reverse - enables reverse order of Cumsum + def _prepare_input(self, inputs_info): + rng = np.random.default_rng() + assert 'x_real:0' in inputs_info + assert 'x_imag:0' in inputs_info + x_shape = inputs_info['x_real:0'] + inputs_data = {} + + inputs_data['x_real:0'] = 4 * rng.random(x_shape).astype(np.float64) - 2 + inputs_data['x_imag:0'] = 4 * rng.random(x_shape).astype(np.float64) - 2 + + return inputs_data + + def create_cumsum_net(self, input_shape, axis, exclusive, reverse): + import tensorflow as tf + + tf.compat.v1.reset_default_graph() + + # Create the graph and model + with tf.compat.v1.Session() as sess: + x_real = tf.compat.v1.placeholder(tf.float32, input_shape, 'x_real') + x_imag = tf.compat.v1.placeholder(tf.float32, input_shape, 'x_imag') + + complex_input = tf.complex(x_real, x_imag) + + tf_axis = tf.constant(axis, dtype=tf.int32) + result = tf.raw_ops.Cumsum(x=complex_input, axis=tf_axis, exclusive=exclusive, reverse=reverse) + + tf.compat.v1.global_variables_initializer() + real = tf.raw_ops.Real(input=result) + img = tf.raw_ops.Imag(input=result) + + tf_net = sess.graph_def + + ref_net = None + + return tf_net, ref_net + + test_data = [ + dict(input_shape=[2], axis=-1), + dict(input_shape=[2, 3], axis=0), + dict(input_shape=[2, 3], axis=1), + dict(input_shape=[2, 3], axis=-2), + dict(input_shape=[2, 3, 3, 4], axis=2), + dict(input_shape=[2, 3, 3, 4], axis=-3), + ] + + @pytest.mark.parametrize("params", test_data) + @pytest.mark.parametrize("exclusive", [False, True, None]) + @pytest.mark.parametrize("reverse", [False, True, None]) + @pytest.mark.precommit + @pytest.mark.precommit_tf_fe + @pytest.mark.nightly + def test_cumsum_basic(self, params, exclusive, reverse, ie_device, precision, ir_version, temp_dir, + use_legacy_frontend): + self._test(*self.create_cumsum_net(**params, exclusive=exclusive, reverse=reverse), + ie_device, precision, ir_version, temp_dir=temp_dir, + use_legacy_frontend=use_legacy_frontend) \ No newline at end of file