Skip to content

Commit

Permalink
Merge branch 'master' into mateuszm/slicescatter/cpu_bugfix
Browse files Browse the repository at this point in the history
  • Loading branch information
mmikolajcz authored Nov 29, 2024
2 parents a4bcad3 + 7e42cb2 commit e355673
Show file tree
Hide file tree
Showing 22 changed files with 468 additions and 94 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ ov::matcher_pass_callback ConvertReduceBase::convert_reduce_to_pooling() {
return [&](ov::pass::pattern::Matcher& m) {
auto reduce = std::dynamic_pointer_cast<T>(m.get_match_root());

if (!reduce || transformation_callback(reduce)) {
if (!reduce || transformation_callback(reduce) || ov::shape_size(reduce->input_value(0).get_shape()) == 0) {
return false;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,10 +11,30 @@
#include "openvino/op/tensor_iterator.hpp"
#include "openvino/op/util/multi_subgraph_base.hpp"
#include "openvino/pass/pattern/op/wrap_type.hpp"
#include "openvino/util/common_util.hpp"
#include "transformations/utils/utils.hpp"

using namespace ov::op::util;

namespace {
/** @brief Value to mark that input idx has been removed (at least one removed so last idx will be always available) */
constexpr auto mark_removed = std::numeric_limits<uint64_t>::max();

constexpr bool is_not_removed_idx(const decltype(mark_removed) idx) {
return mark_removed != idx;
}

uint64_t get_updated_idx(uint64_t idx, uint64_t removed_idx) {
if (idx == removed_idx) {
return mark_removed;
} else if (is_not_removed_idx(idx) && idx > removed_idx) {
return idx - 1;
} else {
return idx;
}
};
} // namespace

bool ov::pass::RemoveMultiSubGraphOpDanglingParamsResults::run_on_model(const std::shared_ptr<ov::Model>& m) {
RUN_ON_MODEL_SCOPE(RemoveMultiSubGraphOpDanglingParamsResults);
bool is_changed = false;
Expand Down Expand Up @@ -117,7 +137,6 @@ bool ov::pass::RemoveMultiSubGraphOpDanglingParamsResults::run_on_model(const st
// Remove inputs
bool pass_required = false;
std::set<uint64_t> required_inputs_indices;
auto op_inputs = multi_subgraph_op->input_values();
std::vector<std::vector<size_t>> to_remove_descriptors_indexes;
to_remove_descriptors_indexes.resize(subgraphs_size);
for (size_t body_idx = 0; body_idx < subgraphs_size; ++body_idx) {
Expand All @@ -142,64 +161,57 @@ bool ov::pass::RemoveMultiSubGraphOpDanglingParamsResults::run_on_model(const st
using DescType = op::util::MultiSubGraphOp::MultiSubgraphInputDescriptionVector;
auto update_body_param_desc = [](DescType& descriptors, uint64_t removed_body_idx) {
for (auto& desc : descriptors) {
if (desc->m_body_parameter_index > removed_body_idx) {
desc->m_body_parameter_index--;
}
desc->m_body_parameter_index = get_updated_idx(desc->m_body_parameter_index, removed_body_idx);
}
};
auto update_op_inputs_desc = [&subgraphs_size](const std::shared_ptr<op::util::MultiSubGraphOp>& op,
std::set<uint64_t>& required_inputs_indices,
uint64_t removed_loop_idx) {
std::set<uint64_t> new_required_inputs_indices;
for (size_t body_idx = 0; body_idx < subgraphs_size; ++body_idx) {
auto& descriptors = op->get_input_descriptions(static_cast<int>(body_idx));
for (auto& desc : descriptors) {
if (desc->m_input_index > removed_loop_idx) {
desc->m_input_index--;
}
desc->m_input_index = get_updated_idx(desc->m_input_index, removed_loop_idx);
}
}
for (auto input_index : required_inputs_indices) {
if (input_index > removed_loop_idx) {
new_required_inputs_indices.insert(input_index - 1);
} else {
new_required_inputs_indices.insert(input_index);
}
};

const auto update_required_input_indicies = [](std::set<uint64_t>& required_inputs_indices,
uint64_t removed_input_idx) {
std::set<uint64_t> new_required_inputs_indices;
for (const auto& input_index : required_inputs_indices) {
new_required_inputs_indices.insert(input_index > removed_input_idx ? input_index - 1 : input_index);
}
required_inputs_indices = new_required_inputs_indices;
required_inputs_indices = std::move(new_required_inputs_indices);
};
// Remove dangling body params and input and update input descriptors
auto op_inputs = multi_subgraph_op->input_values();
for (size_t body_idx = 0; body_idx < subgraphs_size; ++body_idx) {
auto& body_in_descriptors = multi_subgraph_op->get_input_descriptions(static_cast<int>(body_idx));
auto& body_func = multi_subgraph_op->get_function(static_cast<int>(body_idx));
auto& body_params = body_func->get_parameters();
op::util::MultiSubGraphOp::MultiSubgraphInputDescriptionVector updated_body_in_descriptors;

for (size_t desc_idx = 0; desc_idx < body_in_descriptors.size(); ++desc_idx) {
if (std::count(std::begin(to_remove_descriptors_indexes[body_idx]),
std::end(to_remove_descriptors_indexes[body_idx]),
desc_idx) > 0) {
if (body_in_descriptors[desc_idx]->m_body_parameter_index < body_params.size()) {
auto& body_param = body_params[body_in_descriptors[desc_idx]->m_body_parameter_index];
body_func->remove_parameter(body_param);
// Move all body indexes which are after these indicated by to_remove_descriptors_indexes
update_body_param_desc(body_in_descriptors,
body_in_descriptors[desc_idx]->m_body_parameter_index);
}
// remove dangling input of MultiSubGraphOp which was not removed earlier
auto current_input_idx = body_in_descriptors[desc_idx]->m_input_index;
// the same input tensor can go to different input ports
if (current_input_idx < op_inputs.size() &&
std::count(std::begin(required_inputs_indices),
std::end(required_inputs_indices),
current_input_idx) == 0 &&
std::count(std::begin(op_inputs), std::end(op_inputs), op_inputs[current_input_idx]) > 0) {
op_inputs.erase(std::next(op_inputs.begin(), current_input_idx));
// Move all input indexes (in all bodies) which are after these indicated by
// to_remove_descriptors_indexes and are not used in any body
update_op_inputs_desc(multi_subgraph_op, required_inputs_indices, current_input_idx);
}
} else {
updated_body_in_descriptors.emplace_back(body_in_descriptors[desc_idx]);
auto& current_body_desc = body_in_descriptors[desc_idx];
const auto current_body_parameter_idx = current_body_desc->m_body_parameter_index;
if (!util::contains(to_remove_descriptors_indexes[body_idx], desc_idx)) {
updated_body_in_descriptors.emplace_back(current_body_desc);
} else if (is_not_removed_idx(current_body_parameter_idx)) {
auto& body_func = multi_subgraph_op->get_function(body_idx);
const auto& body_params = body_func->get_parameters();

body_func->remove_parameter(body_params[current_body_parameter_idx]);
// Move all body indexes which are after these indicated by to_remove_descriptors_indexes
update_body_param_desc(body_in_descriptors, current_body_parameter_idx);
}

const auto current_input_idx = current_body_desc->m_input_index;
// remove dangling input of MultiSubGraphOp which was not removed earlier
// the same input tensor can go to different input ports
if (!util::contains(required_inputs_indices, current_input_idx) &&
is_not_removed_idx(current_input_idx)) {
op_inputs.erase(op_inputs.begin() + current_input_idx);
// Move all input indexes (in all bodies) which are after these indicated by
// to_remove_descriptors_indexes and are not used in any body
update_op_inputs_desc(multi_subgraph_op, current_input_idx);
update_required_input_indicies(required_inputs_indices, current_input_idx);
}
}
multi_subgraph_op->set_input_descriptions(static_cast<int>(body_idx), updated_body_in_descriptors);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -175,7 +175,7 @@ TEST_F(TransformationTestsF, RemoveLoopDanglingParametersIfConcatEmptyTensor) {
}
}

TEST_F(TransformationTestsF, RemoveIfDanglingParametersFromBodiesAndInputs) {
TEST_F(TransformationTestsF, RemoveIfDanglingParametersFromBodiesAndInputsConsecutive) {
auto X = std::make_shared<Parameter>(element::f32, Shape{2, 4, 1});
auto Y = std::make_shared<Parameter>(element::f32, Shape{3, 4, 1});
auto cond = std::make_shared<Constant>(element::boolean, Shape{1}, true);
Expand All @@ -196,6 +196,8 @@ TEST_F(TransformationTestsF, RemoveIfDanglingParametersFromBodiesAndInputs) {
if_op->set_else_body(else_body);
if_op->set_input(X, Xte, Xte);
if_op->set_input(Y, Yte, Yte);
// if_op descriptors are [desc_0, desc_1, desc_2, desc_3]
// desc_0, desc_2 are dangling, Parameters Y, Yte should be removed
auto res = if_op->set_output(then_op_res, else_op_res);
model = std::make_shared<Model>(OutputVector{res}, ParameterVector{X, Y});

Expand All @@ -213,6 +215,46 @@ TEST_F(TransformationTestsF, RemoveIfDanglingParametersFromBodiesAndInputs) {
}
}

TEST_F(TransformationTestsF, RemoveIfDanglingParametersFromBodiesAndInputsNotConsecutive) {
auto X = std::make_shared<Parameter>(element::f32, Shape{2, 4, 1});
auto Y = std::make_shared<Parameter>(element::f32, Shape{3, 4, 1});
auto cond = std::make_shared<Constant>(element::boolean, Shape{1}, false);

auto Xte = std::make_shared<Parameter>(element::f32, PartialShape::dynamic());
auto Yte = std::make_shared<Parameter>(element::f32, PartialShape::dynamic());

auto then_op = std::make_shared<Add>(Yte, Yte);
auto then_op_res = std::make_shared<Result>(then_op);

auto else_op = std::make_shared<Maximum>(Yte, Yte);
auto else_op_res = std::make_shared<Result>(else_op);
{
auto then_body = std::make_shared<Model>(OutputVector{then_op_res}, ParameterVector{Xte, Yte});
auto else_body = std::make_shared<Model>(OutputVector{else_op_res}, ParameterVector{Xte, Yte});
auto if_op = std::make_shared<If>(cond);
if_op->set_then_body(then_body);
if_op->set_else_body(else_body);
if_op->set_input(X, Xte, Yte);
if_op->set_input(Y, Xte, Xte);
// if_op descriptors are [desc_0, desc_1, desc_2, desc_3]
// desc_0, desc_2, desc_3 are dangling, Parameters Y, Xte should be removed
auto res = if_op->set_output(then_op_res, else_op_res);
model = std::make_shared<Model>(OutputVector{res}, ParameterVector{X, Y});

manager.register_pass<ov::pass::RemoveMultiSubGraphOpDanglingParamsResults>();
}
{
auto then_body = std::make_shared<Model>(OutputVector{then_op_res}, ParameterVector{Yte});
auto else_body = std::make_shared<Model>(OutputVector{else_op_res}, ParameterVector{Yte});
auto if_op = std::make_shared<If>(cond);
if_op->set_then_body(then_body);
if_op->set_else_body(else_body);
if_op->set_input(X, Yte, Yte);
auto res = if_op->set_output(then_op_res, else_op_res);
model_ref = std::make_shared<Model>(OutputVector{res}, ParameterVector{X, Y});
}
}

TEST_F(TransformationTestsF, RemoveIfDanglingParametersOnlyFromBodies) {
auto X = std::make_shared<Parameter>(element::f32, Shape{2, 4, 1});
auto Y = std::make_shared<Parameter>(element::f32, Shape{3, 4, 1});
Expand Down Expand Up @@ -518,23 +560,28 @@ TEST_F(TransformationTestsF, RemoveLoopDanglingParamsAndResults) {
auto ai = std::make_shared<Parameter>(element::f32, Shape{2, 2});
auto b = std::make_shared<Parameter>(element::f32, Shape{2, 2});
auto bi = std::make_shared<Parameter>(element::f32, Shape{2, 2});
auto c = std::make_shared<Parameter>(element::f32, Shape{2, 2});
auto ci = std::make_shared<Parameter>(element::f32, Shape{2, 2});
auto d = std::make_shared<Parameter>(element::f32, Shape{2, 2});

auto mul = std::make_shared<Multiply>(ai, ai);
auto abs1 = std::make_shared<Abs>(mul);
auto add = std::make_shared<Add>(bi, bi);
auto abs2 = std::make_shared<Abs>(add);
{
auto body = std::make_shared<Model>(OutputVector{condition, abs1, abs2}, ParameterVector{ai, bi});
auto body = std::make_shared<Model>(OutputVector{condition, abs1, abs2}, ParameterVector{ai, bi, ci});
auto loop = std::make_shared<Loop>(trip_count, condition);
loop->set_special_body_ports({-1, 0});
loop->set_function(body);
loop->set_invariant_input(ai, a);
loop->set_invariant_input(ci, d);
loop->set_invariant_input(bi, b);
loop->set_invariant_input(ci, c);

auto loop_res = std::make_shared<Result>(loop->get_iter_value(abs1));
loop->get_iter_value(abs2);
// abs2 result is unused
model = std::make_shared<Model>(OutputVector{loop_res}, ParameterVector{a, b});
model = std::make_shared<Model>(OutputVector{loop_res}, ParameterVector{a, b, c, d});

manager.register_pass<ov::pass::RemoveMultiSubGraphOpDanglingParamsResults>();
}
Expand Down
15 changes: 10 additions & 5 deletions src/common/util/include/openvino/util/common_util.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -131,11 +131,16 @@ T ceil_div(const T& x, const T& y) {
return (x == 0 ? 0 : (1 + (x - 1) / y));
}

template <typename T, typename A, typename V>
bool contains(const std::vector<T, A>& vec, const V& v) {
return std::any_of(vec.begin(), vec.end(), [&](const T& x) {
return x == v;
});
/**
* @brief Checks if container contains the specific value.
*
* @param container The container of elements to examine.
* @param value Value to compare the elements to.
* @return True if value found in the container, false otherwise.
*/
template <typename R, typename V>
bool contains(const R& container, const V& value) {
return std::find(std::begin(container), std::end(container), value) != std::end(container);
}

/**
Expand Down
4 changes: 4 additions & 0 deletions src/core/reference/include/openvino/reference/reduce_mean.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,10 @@ void reduce_mean(const T* in, T* out, const Shape& in_shape, const AxisSet& redu
reduce_sum(in, out, in_shape, reduction_axes);

const auto out_shape = util::reduce(in_shape, reduction_axes);
if (shape_size(in_shape) == 0) {
return;
}

const auto out_size = shape_size(out_shape);
const auto count = static_cast<T>(shape_size(in_shape) / out_size);
std::transform(out, std::next(out, out_size), out, [count](const T value) {
Expand Down
Loading

0 comments on commit e355673

Please sign in to comment.