Skip to content

Commit

Permalink
Merge branch 'master' into new_staging_test
Browse files Browse the repository at this point in the history
  • Loading branch information
ababushk authored Nov 27, 2024
2 parents 3b611e3 + c1575e1 commit 0a6b6c6
Show file tree
Hide file tree
Showing 2 changed files with 85 additions and 54 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -23,14 +23,35 @@
#include "openvino/op/util/unary_elementwise_arithmetic.hpp"
#include "transformations/utils/utils.hpp"

namespace {

void set_source_output_type_shape(const ov::Node& node,
const ov::element::Type& et,
const ov::PartialShape& new_shape,
const size_t port) {
const auto source_output = node.get_input_source_output(port);
source_output.get_node()->set_output_type(source_output.get_index(), et, new_shape);
}

void set_source_output_shape(const ov::Node& node, const ov::PartialShape& new_shape, const size_t port) {
set_source_output_type_shape(node, node.get_input_element_type(port), new_shape, port);
}

void set_source_output_type(const ov::Node& node, const ov::element::Type& et, const size_t port) {
set_source_output_type_shape(node, et, node.get_input_partial_shape(port), port);
}
} // namespace

bool ov::pass::ReverseShapeAndTypeInfer::inherit_output_shape(const std::shared_ptr<ov::Node>& node,
const std::vector<size_t>& input_idxs) {
auto is_changed = false;
auto output_shape = node->get_output_partial_shape(0);

for (auto idx : input_idxs) {
if (idx < node->get_input_size() && node->get_input_partial_shape(idx).compatible(output_shape)) {
PartialShape::merge_into(node->get_input_tensor(idx).m_partial_shape, output_shape);
auto new_shape = node->get_input_partial_shape(idx);
PartialShape::merge_into(new_shape, output_shape);
set_source_output_shape(*node, new_shape, idx);
is_changed = true;
}
}
Expand All @@ -44,7 +65,7 @@ bool ov::pass::ReverseShapeAndTypeInfer::inherit_output_rank(const std::shared_p

for (auto idx : input_idxs) {
if (idx < node->get_input_size() && node->get_input_partial_shape(idx).rank().is_dynamic()) {
node->get_input_tensor(idx).m_partial_shape = ov::PartialShape::dynamic(output_shape.rank());
set_source_output_shape(*node, ov::PartialShape::dynamic(output_shape.rank()), idx);
is_changed = true;
}
}
Expand All @@ -58,7 +79,7 @@ bool ov::pass::ReverseShapeAndTypeInfer::inherit_output_type(const std::shared_p

for (auto idx : input_idxs) {
if (idx < node->get_input_size() && node->get_input_element_type(idx).is_dynamic()) {
node->get_input_tensor(idx).m_element_type = output_type;
set_source_output_type(*node, output_type, idx);
is_changed = true;
}
}
Expand Down Expand Up @@ -92,7 +113,9 @@ bool ov::pass::ReverseShapeAndTypeInfer::run_on_model(const std::shared_ptr<ov::
const auto& weigths_pshape = op->get_input_partial_shape(1);
if (weigths_pshape.rank().is_static() && op->get_input_partial_shape(1).rank().is_static() &&
weigths_pshape[1] != 1) {
op->get_input_tensor(0).m_partial_shape[1] = weigths_pshape[1];
auto new_shape = op->get_input_partial_shape(0);
new_shape[1] = weigths_pshape[1];
set_source_output_shape(*op, new_shape, 0);
}
is_changed |= inherit_output_type(op, {0, 1});
} else if (ov::as_type_ptr<ov::op::v1::GroupConvolution>(op)) {
Expand All @@ -101,7 +124,9 @@ bool ov::pass::ReverseShapeAndTypeInfer::run_on_model(const std::shared_ptr<ov::
const auto& weigths_pshape = op->get_input_partial_shape(1);
if (weigths_pshape.rank().is_static() && op->get_input_partial_shape(1).rank().is_static() &&
weigths_pshape[2] != 1) {
op->get_input_tensor(0).m_partial_shape[1] = weigths_pshape[0] * weigths_pshape[2];
auto new_shape = op->get_input_partial_shape(0);
new_shape[1] = weigths_pshape[0] * weigths_pshape[2];
set_source_output_shape(*op, new_shape, 0);
}
is_changed |= inherit_output_type(op, {0, 1});
} else if (ov::as_type_ptr<ov::op::v1::ConvolutionBackpropData>(op)) {
Expand All @@ -110,7 +135,9 @@ bool ov::pass::ReverseShapeAndTypeInfer::run_on_model(const std::shared_ptr<ov::
const auto& weigths_pshape = op->get_input_partial_shape(1);
if (weigths_pshape.rank().is_static() && op->get_input_partial_shape(1).rank().is_static() &&
weigths_pshape[0] != 1) {
op->get_input_tensor(0).m_partial_shape[1] = weigths_pshape[0];
auto new_shape = op->get_input_partial_shape(0);
new_shape[1] = weigths_pshape[0];
set_source_output_shape(*op, new_shape, 0);
}
is_changed |= inherit_output_type(op, {0, 1});
} else if (ov::as_type_ptr<ov::op::v1::GroupConvolutionBackpropData>(op)) {
Expand All @@ -119,7 +146,9 @@ bool ov::pass::ReverseShapeAndTypeInfer::run_on_model(const std::shared_ptr<ov::
const auto& weigths_pshape = op->get_input_partial_shape(1);
if (weigths_pshape.rank().is_static() && op->get_input_partial_shape(1).rank().is_static() &&
weigths_pshape[1] != 1) {
op->get_input_tensor(0).m_partial_shape[1] = weigths_pshape[0] * weigths_pshape[1];
auto new_shape = op->get_input_partial_shape(0);
new_shape[1] = weigths_pshape[0] * weigths_pshape[1];
set_source_output_shape(*op, new_shape, 0);
}
is_changed |= inherit_output_type(op, {0, 1});
} else if (ov::as_type_ptr<ov::op::v8::DeformableConvolution>(op)) {
Expand All @@ -131,10 +160,10 @@ bool ov::pass::ReverseShapeAndTypeInfer::run_on_model(const std::shared_ptr<ov::
auto pads_begin_shape = op->get_input_partial_shape(1);
auto pads_end_shape = op->get_input_partial_shape(2);
if (pads_begin_shape.is_static() && pads_begin_shape.size() > 0) {
op->get_input_tensor(0).m_partial_shape = PartialShape::dynamic(pads_begin_shape[0]);
set_source_output_shape(*op, PartialShape::dynamic(pads_begin_shape[0]), 0);
is_changed = true;
} else if (pads_end_shape.is_static() && pads_end_shape.size() > 0) {
op->get_input_tensor(0).m_partial_shape = PartialShape::dynamic(pads_end_shape[0]);
set_source_output_shape(*op, PartialShape::dynamic(pads_end_shape[0]), 0);
is_changed = true;
}
}
Expand All @@ -147,15 +176,17 @@ bool ov::pass::ReverseShapeAndTypeInfer::run_on_model(const std::shared_ptr<ov::
auto in0_rank = op->get_input_partial_shape(0).rank();
auto in1_rank = op->get_input_partial_shape(1).rank();
if (in0_rank.is_dynamic() && in1_rank.is_static()) {
if (eltwise->get_autob() == ov::op::AutoBroadcastType::NONE)
op->get_input_tensor(0).m_partial_shape = output_shape;
else if (in1_rank.get_length() < output_shape.rank().get_length())
op->get_input_tensor(0).m_partial_shape = PartialShape::dynamic(output_shape.rank());
if (eltwise->get_autob() == ov::op::AutoBroadcastType::NONE) {
set_source_output_shape(*op, output_shape, 0);
} else if (in1_rank.get_length() < output_shape.rank().get_length()) {
set_source_output_shape(*op, PartialShape::dynamic(output_shape.rank()), 0);
}
} else if (in1_rank.is_dynamic() && in0_rank.is_static()) {
if (eltwise->get_autob() == ov::op::AutoBroadcastType::NONE)
op->get_input_tensor(1).m_partial_shape = output_shape;
else if (in0_rank.get_length() < output_shape.rank().get_length())
op->get_input_tensor(1).m_partial_shape = PartialShape::dynamic(output_shape.rank());
if (eltwise->get_autob() == ov::op::AutoBroadcastType::NONE) {
set_source_output_shape(*op, output_shape, 1);
} else if (in0_rank.get_length() < output_shape.rank().get_length()) {
set_source_output_shape(*op, PartialShape::dynamic(output_shape.rank()), 1);
}
}
}
is_changed |= inherit_output_type(op, {0, 1});
Expand All @@ -172,7 +203,9 @@ bool ov::pass::ReverseShapeAndTypeInfer::run_on_model(const std::shared_ptr<ov::
input_pshape[axis] = Dimension::dynamic();
for (auto idx : input_idxs) {
if (idx < op->get_input_size() && op->get_input_partial_shape(idx).compatible(input_pshape)) {
PartialShape::merge_into(op->get_input_tensor(idx).m_partial_shape, input_pshape);
auto new_shape = op->get_input_partial_shape(idx);
PartialShape::merge_into(new_shape, input_pshape);
set_source_output_shape(*op, new_shape, idx);
is_changed = true;
}
}
Expand All @@ -189,8 +222,9 @@ bool ov::pass::ReverseShapeAndTypeInfer::run_on_model(const std::shared_ptr<ov::
auto in1_pshape = op->get_input_partial_shape(1);
if (in1_pshape.is_static()) {
auto num_dims = in1_pshape.size() == 0 ? 1 : in1_pshape[0].get_length();
op->get_input_tensor(0).m_partial_shape =
PartialShape::dynamic(output_shape.rank().get_length() + num_dims);
set_source_output_shape(*op,
PartialShape::dynamic(output_shape.rank().get_length() + num_dims),
0);
}
} else if (in0_rank.is_static() && op->get_input_size() == 1) {
// attempt to create second input
Expand All @@ -215,8 +249,7 @@ bool ov::pass::ReverseShapeAndTypeInfer::run_on_model(const std::shared_ptr<ov::
auto in1_pshape = op->get_input_partial_shape(1);
if (output_shape.rank().is_static() && in0_rank.is_dynamic() && in1_pshape.is_static()) {
auto num_dims = in1_pshape.size() == 0 ? 1 : in1_pshape[0].get_length();
op->get_input_tensor(0).m_partial_shape =
PartialShape::dynamic(output_shape.rank().get_length() - num_dims);
set_source_output_shape(*op, PartialShape::dynamic(output_shape.rank().get_length() - num_dims), 0);
}
is_changed |= inherit_output_type(op, {0});
} else if (const auto& if_op = ov::as_type_ptr<ov::op::v8::If>(op)) {
Expand All @@ -227,21 +260,24 @@ bool ov::pass::ReverseShapeAndTypeInfer::run_on_model(const std::shared_ptr<ov::
const auto& else_body_results = else_body->get_results();
const auto& then_out_desc = if_op->get_output_descriptions(ov::op::v8::If::THEN_BODY_INDEX);
const auto& else_out_desc = if_op->get_output_descriptions(ov::op::v8::If::ELSE_BODY_INDEX);

for (const auto& out_desc : then_out_desc) {
const auto& out_indx = out_desc->m_output_index;
const auto& body_indx = out_desc->m_body_value_index;
then_body_results[body_indx]->get_input_tensor(0).m_partial_shape =
if_op->get_output_partial_shape(out_indx);
then_body_results[body_indx]->get_input_tensor(0).m_element_type =
if_op->get_output_element_type(out_indx);

set_source_output_type_shape(*then_body_results[body_indx],
if_op->get_output_element_type(out_indx),
if_op->get_output_partial_shape(out_indx),
0);
}

for (const auto& out_desc : else_out_desc) {
const auto& out_indx = out_desc->m_output_index;
const auto& body_indx = out_desc->m_body_value_index;
else_body_results[body_indx]->get_input_tensor(0).m_partial_shape =
if_op->get_output_partial_shape(out_indx);
else_body_results[body_indx]->get_input_tensor(0).m_element_type =
if_op->get_output_element_type(out_indx);
set_source_output_type_shape(*else_body_results[body_indx],
if_op->get_output_element_type(out_indx),
if_op->get_output_partial_shape(out_indx),
0);
}
is_changed |= run_on_model(then_body);
is_changed |= run_on_model(else_body);
Expand All @@ -252,34 +288,30 @@ bool ov::pass::ReverseShapeAndTypeInfer::run_on_model(const std::shared_ptr<ov::
for (const auto& in_desc : then_in_desc) {
const auto& in_indx = in_desc->m_input_index;
const auto& body_indx = in_desc->m_body_parameter_index;
if (if_op->get_input_tensor(in_indx).get_partial_shape().rank().is_dynamic()) {
if_op->get_input_tensor(in_indx).m_partial_shape =
then_body_params.at(body_indx)->get_partial_shape();
is_changed = true;
}
if (if_op->get_input_tensor(in_indx).get_element_type().is_dynamic()) {
if_op->get_input_tensor(in_indx).m_element_type =
then_body_params.at(body_indx)->get_element_type();
if (if_op->get_input_tensor(in_indx).get_partial_shape().rank().is_dynamic() ||
if_op->get_input_tensor(in_indx).get_element_type().is_dynamic()) {
set_source_output_type_shape(*if_op,
then_body_params.at(body_indx)->get_element_type(),
then_body_params.at(body_indx)->get_partial_shape(),
in_indx);
is_changed = true;
}
}
for (const auto& in_desc : else_in_desc) {
const auto& in_indx = in_desc->m_input_index;
const auto& body_indx = in_desc->m_body_parameter_index;
if (if_op->get_input_tensor(in_indx).get_partial_shape().rank().is_dynamic()) {
if_op->get_input_tensor(in_indx).m_partial_shape =
else_body_params.at(body_indx)->get_partial_shape();
is_changed = true;
}
if (if_op->get_input_tensor(in_indx).get_element_type().is_dynamic()) {
if_op->get_input_tensor(in_indx).m_element_type =
else_body_params.at(body_indx)->get_element_type();
if (if_op->get_input_tensor(in_indx).get_partial_shape().rank().is_dynamic() ||
if_op->get_input_tensor(in_indx).get_element_type().is_dynamic()) {
set_source_output_type_shape(*if_op,
then_body_params.at(body_indx)->get_element_type(),
then_body_params.at(body_indx)->get_partial_shape(),
in_indx);
is_changed = true;
}
}
// Set type for If condition
if (if_op->get_input_element_type(0).is_dynamic()) {
if_op->get_input_tensor(0).m_element_type = element::boolean;
set_source_output_type(*if_op, element::boolean, 0);
is_changed = true;
}

Expand All @@ -288,7 +320,7 @@ bool ov::pass::ReverseShapeAndTypeInfer::run_on_model(const std::shared_ptr<ov::
if (if_op->get_rt_info().count("tf_switch_merge_if") &&
if_op->get_rt_info()["tf_switch_merge_if"].as<bool>() &&
if_op->input_value(0).get_partial_shape().rank().is_dynamic()) {
if_op->get_input_tensor(0).m_partial_shape = ov::PartialShape({});
set_source_output_shape(*if_op, PartialShape{}, 0);
is_changed = true;
}
} else if (ov::as_type_ptr<ov::op::v1::ConvertLike>(op)) {
Expand All @@ -301,24 +333,24 @@ bool ov::pass::ReverseShapeAndTypeInfer::run_on_model(const std::shared_ptr<ov::
// set more precise dimensions during reverse infer
// if transpose order is known
int64_t rank_length = output_shape.rank().get_length();
PartialShape::merge_into(op->get_input_tensor(0).m_partial_shape,
PartialShape::dynamic(output_shape.rank()));
auto new_shape = op->get_input_partial_shape(0);
PartialShape::merge_into(new_shape, PartialShape::dynamic(output_shape.rank()));
auto order_value = transpose_order->cast_vector<int64_t>();
OPENVINO_ASSERT(order_value.size() == static_cast<size_t>(rank_length),
"The length of Transpose order and the input rank mismatch");
for (int64_t dim_idx = 0; dim_idx < rank_length; ++dim_idx) {
OPENVINO_ASSERT(0 <= order_value[dim_idx] && order_value[dim_idx] < rank_length,
"Transpose order is out-of-range");
op->get_input_tensor(0).m_partial_shape[order_value[dim_idx]] = output_shape[dim_idx];
new_shape[order_value[dim_idx]] = output_shape[dim_idx];
}
set_source_output_shape(*op, new_shape, 0);
is_changed = true;
} else {
is_changed |= inherit_output_rank(op, {0});
}
} else if (transpose_order) {
auto order_value = transpose_order->cast_vector<int64_t>();
PartialShape::merge_into(op->get_input_tensor(0).m_partial_shape,
PartialShape::dynamic(order_value.size()));
set_source_output_shape(*op, PartialShape::dynamic(order_value.size()), 0);
is_changed = true;
}
is_changed |= inherit_output_type(op, {0});
Expand Down
1 change: 0 additions & 1 deletion src/core/include/openvino/core/descriptor/tensor.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,6 @@ class OPENVINO_API Tensor {
friend OPENVINO_API void set_tensor_type(Tensor& tensor,
const element::Type& element_type,
const PartialShape& pshape);
friend class pass::ReverseShapeAndTypeInfer;

private:
mutable std::atomic<bool> m_shape_changing{false};
Expand Down

0 comments on commit 0a6b6c6

Please sign in to comment.