Unverified Commit fe054d67 authored by Adam Osewski's avatar Adam Osewski Committed by GitHub

Unify (static) auto-broadcasting helpers. (#4242)

* Helper function get_axes_mapping.

* Enhance Broadcast:v1 NUMPY broadcasting.

- Enable NUMPY broadcasting mechanism to work in bothdirections:
    target_shape <-> arg_shape

* Add opset1:squeeze and fix bug in reading squeezed axis idx.

* Fix and enhance downgrade pass for Broadcast:v1

* Use Broadcast:v1 in ONNX Expand operator.

* Replace Broadcast:v0 with v1 in some helper functions.

* Remove call to deprecated legacy_broadcasting helper function.

* Add helper get_axes_mapping_output function.

* Use directly Broadcast:v1 instead of helper function.

* Get back operators from v0 in helper function.

* Use helper function and some refactoring.

* Add legacy style broadcast helper function for opset1.

* User helper broadcasting function for arithmetic operators.

* Add empty axis only if its size is equal to one.

* Aplly review remarks:

- Rename broadcasting function deleting _values_ infix
- Remove variables used only once.
- Use STL library where possible.
- Remove unnecessary conditions.

* Add helper for Broadcast:v1.

* Fix merge artifact and force unsigned type for argument.

* Review. Add additional check for static output.

* Apply clang-format.

* Fix: call v0 ops in ngraph::builder namespace.

* Move opset1 boradcasting helpers from util/broadcasting.hpp

* Use autobroadcast built-in mechanism for arithmetic operators in RNN.

* Move helper functions to autobroadcast.hpp file.

- Update calls with new namespace ngraph::builder
- Remove calls using shared_ptr<ngraph::Node> and replace them with
  one accepting Output<ngraph::Node>
- Some small formatting (remove unnecesary namespace prefix)

* Remove unused function.

* Rename error class to reflect it's NumPy related.

* Fix thrown error name in autobroadcast UT.

* Code refactoring.

- Use one one set of helpers to broadcast node according to NumPy scheme

* Documentation formatting.

* Remove include to deleted header.

* Apply style format.

* Remove std:: prefix.

* Do reshape and/or broadcast only when necessary.

* Remove std:: and ngraph:: prefixes.

* UT for numpy_broadcast_for_matmul and legacy boradcast.

* Rename helper function.

* UT for opset1 legacy broadcast helper function.

* Add more UT for get_axes_mapping and style-format.

* Review comments.

* Restrict if with NGRAPH_WARN to NGRAPH_CHECK.
Co-authored-by: 's avatarMichał Karzyński <postrational@users.noreply.github.com>
Co-authored-by: 's avatarSang Ik Lee <sang.ik.lee@intel.com>
parent 639ff3f1
...@@ -445,8 +445,6 @@ set (SRC ...@@ -445,8 +445,6 @@ set (SRC
op/util/binary_elementwise_comparison.hpp op/util/binary_elementwise_comparison.hpp
op/util/binary_elementwise_logical.cpp op/util/binary_elementwise_logical.cpp
op/util/binary_elementwise_logical.hpp op/util/binary_elementwise_logical.hpp
op/util/broadcasting.cpp
op/util/broadcasting.hpp
op/util/fused_op.cpp op/util/fused_op.cpp
op/util/fused_op.hpp op/util/fused_op.hpp
op/util/index_reduction.cpp op/util/index_reduction.cpp
......
...@@ -16,40 +16,34 @@ ...@@ -16,40 +16,34 @@
#include "ngraph/builder/autobroadcast.hpp" #include "ngraph/builder/autobroadcast.hpp"
#include <memory>
#include <numeric>
#include <sstream>
#include "ngraph/axis_vector.hpp" #include "ngraph/axis_vector.hpp"
#include "ngraph/builder/reshape.hpp"
#include "ngraph/check.hpp"
#include "ngraph/op/broadcast.hpp" #include "ngraph/op/broadcast.hpp"
#include "ngraph/op/constant.hpp"
#include "ngraph/op/reshape.hpp" #include "ngraph/op/reshape.hpp"
#include "ngraph/util.hpp" #include "ngraph/util.hpp"
#include <memory>
#include <sstream>
using namespace std; using namespace std;
namespace ngraph namespace ngraph
{ {
namespace builder namespace builder
{ {
autobroadcast_incompatible_shapes::autobroadcast_incompatible_shapes( numpy_autobroadcast_incompatible_shapes::numpy_autobroadcast_incompatible_shapes(
const ngraph::Shape& shape1, const ngraph::Shape& shape2) const Shape& shape1, const Shape& shape2)
: ngraph::ngraph_error(error_str(shape1, shape2)) : ngraph_error(error_str(shape1, shape2))
, m_shape1(shape1) , m_shape1(shape1)
, m_shape2(shape2) , m_shape2(shape2)
{ {
} }
const ngraph::Shape& autobroadcast_incompatible_shapes::get_shape1() const string numpy_autobroadcast_incompatible_shapes::error_str(const Shape& shape1,
{ const Shape& shape2)
return m_shape1;
}
const ngraph::Shape& autobroadcast_incompatible_shapes::get_shape2() const
{
return m_shape2;
}
std::string autobroadcast_incompatible_shapes::error_str(const ngraph::Shape& shape1,
const ngraph::Shape& shape2)
{ {
ostringstream os; ostringstream os;
os << "Auto-broadcast not possible for these input shapes:" os << "Auto-broadcast not possible for these input shapes:"
...@@ -57,132 +51,217 @@ namespace ngraph ...@@ -57,132 +51,217 @@ namespace ngraph
return os.str(); return os.str();
} }
/// A utility struct representing the details computed by the ///
/// compute_shapes_and_broadcast_axes function. /// \brief Calculate the output shape of numpy-style broadcast operation for two
struct Autobroadcast_plan /// shapes.
///
/// \note More info:
/// https://docs.scipy.org/doc/numpy/user/basics.broadcasting.html#general-broadcasting-rules
/// Example: left: [3, 1, 10] right: [5, 1] return: [3, 5, 10]
///
/// \param lhs_shape First input shape.
/// \param rhs_shape Second input Shape.
///
/// \return Broadcast shape of input shapes.
///
static Shape calculate_broadcast_shape(Shape lhs_shape, Shape rhs_shape)
{
Shape result;
auto lhs_rank = lhs_shape.size();
auto rhs_rank = rhs_shape.size();
auto max_rank = max(lhs_rank, rhs_rank);
// left-pad the lhs_shape with ones
lhs_shape.insert(begin(lhs_shape), max_rank - lhs_rank, 1);
// left-pad the rhs_shape with ones
rhs_shape.insert(begin(rhs_shape), max_rank - rhs_rank, 1);
for (size_t index = 0; index < max_rank; ++index)
{ {
ngraph::Shape m_arg1_shape_after_possible_reshaping; size_t lhs_dim = lhs_shape.at(index);
ngraph::Shape m_arg2_shape_after_possible_reshaping; size_t rhs_dim = rhs_shape.at(index);
ngraph::AxisSet m_arg1_broadcast_axes;
ngraph::AxisSet m_arg2_broadcast_axes; if (lhs_dim != rhs_dim && lhs_dim != 1 && rhs_dim != 1)
ngraph::Shape m_final_shape; {
throw numpy_autobroadcast_incompatible_shapes(lhs_shape, rhs_shape);
}
result.push_back(max(lhs_dim, rhs_dim));
}
return result;
}; };
/// \brief Compute the details regarding what reshape and/or broadcast operations must be
/// applied to arg1 and/or arg2, as well as what the final resulting shape shall
/// be.
/// ///
/// If this algorithm cannot handle the particular combination of shapes supplied as /// \brief Calculate the output shape of numpy-style broadcast operation for all input
/// inputs, throw an ngraph::builder::autobroadcast_incompatible_shapes exception. /// shapes.
///
/// This function finds the maximum tensor shape that will be the result of
/// element-wise operation that will be applied to the input shapes vector.
/// The function also prepares the shape of each input for the element-wise
/// operation by left-padding those shapes so that their rank is equal to the
/// left_shape's rank.
///
/// \param input_shapes A vector of input shapes for which a common shape should be
/// found
/// ///
/// \exception ngraph::builder::autobroadcast_incompatible_shapes /// \return A pair that contains the target shape as its first object and a vector of
static Autobroadcast_plan /// padded input shapes ready to be broadcasted as the second object
compute_shapes_and_broadcast_axes(const ngraph::Shape& arg1_in_shape, ///
const ngraph::Shape& arg2_in_shape) static pair<Shape, vector<Shape>>
get_numpy_broadcast_shapes(const vector<Shape>& input_shapes)
{ {
Autobroadcast_plan plan; Shape target_shape = accumulate(
begin(input_shapes), end(input_shapes), Shape{}, calculate_broadcast_shape);
size_t arg1_size = arg1_in_shape.size(); vector<Shape> full_shapes;
size_t arg2_size = arg2_in_shape.size(); for (const Shape& input : input_shapes)
size_t axis = std::max(arg1_size, arg2_size) - 1; {
Shape padded_shape{input};
padded_shape.insert(
begin(padded_shape), target_shape.size() - padded_shape.size(), 1);
full_shapes.push_back(move(padded_shape));
}
return {target_shape, full_shapes};
}
// per numpy definition of broadcast: static pair<Shape, vector<Shape>> get_numpy_broadcast_shapes(const OutputVector& values)
// start with trailing dimensions and work forward
// two dimensions are compatible:
// * if they are equal
// * if one of them is 1
while (arg1_size >= 1 || arg2_size >= 1)
{ {
size_t arg1_dim = arg1_size ? arg1_in_shape[arg1_size - 1] : 1; vector<Shape> input_shapes;
size_t arg2_dim = arg2_size ? arg2_in_shape[arg2_size - 1] : 1;
if (arg1_dim == arg2_dim) for (const auto& input : values)
{ {
// add dimension to broadcast shape + arg1/arg2 reshape input_shapes.push_back(input.get_shape());
plan.m_final_shape.insert(plan.m_final_shape.begin(), arg1_dim); }
plan.m_arg1_shape_after_possible_reshaping.insert(
plan.m_arg1_shape_after_possible_reshaping.begin(), arg1_dim); return get_numpy_broadcast_shapes(input_shapes);
plan.m_arg2_shape_after_possible_reshaping.insert(
plan.m_arg2_shape_after_possible_reshaping.begin(), arg2_dim);
} }
else if (arg2_dim == 1)
/// \brief Broadcast input node.
///
/// \note The source shape does not have to be the actual shape of input node. However
/// it should be a superset of it (containing it as a continuous subset). This
/// implies we may expand the number of axes of input node. The ranks of
/// source_shape and output_shape must be equal. This means that the
/// source_shape has to be padded with ones for this operation.
///
/// \param[in] value The input Node to be broadcast.
/// \param[in] output_shape The output shape.
/// \param[in] source_shape The source shape from which we want to broadcast input node.
///
/// \return The broadcasted Node.
///
static shared_ptr<Node> numpy_broadcast_node(const Output<Node>& value,
const Shape& output_shape,
const Shape& source_shape)
{
shared_ptr<Node> broadcasted_node = value.as_single_output_node();
// If node already has the required shape, return original node
if (output_shape == value.get_shape())
{ {
// add arg1 dimension to broadcast shape and arg1 reshape return broadcasted_node;
plan.m_final_shape.insert(plan.m_final_shape.begin(), arg1_dim);
plan.m_arg1_shape_after_possible_reshaping.insert(
plan.m_arg1_shape_after_possible_reshaping.begin(), arg1_dim);
// add current axis to arg2 broadcast axes
plan.m_arg2_broadcast_axes.insert(plan.m_arg2_broadcast_axes.begin(), axis);
} }
else if (arg1_dim == 1)
NGRAPH_CHECK(source_shape.size() == output_shape.size(),
"Ranks of source_shape and output_shape dont match: ",
source_shape.size(),
" vs ",
output_shape.size());
AxisVector broadcast_axes;
Shape squeezed_shape;
// Positions of axes which have length of 1 are needed to calculate broadcast_axes
// for nGraph broadcast operation. We need to remove ones from source shape
// to avoid broadcasting axis conflict.
for (size_t index = 0; index < output_shape.size(); ++index)
{
if (source_shape.at(index) == 1 && output_shape.at(index) != 1)
{ {
// add arg2 dimension to broadcast shape and arg2 reshape broadcast_axes.push_back(index);
plan.m_final_shape.insert(plan.m_final_shape.begin(), arg2_dim);
plan.m_arg2_shape_after_possible_reshaping.insert(
plan.m_arg2_shape_after_possible_reshaping.begin(), arg2_dim);
// add current axis to arg1 broadcast axes
plan.m_arg1_broadcast_axes.insert(plan.m_arg1_broadcast_axes.begin(), axis);
} }
else else
{ {
throw autobroadcast_incompatible_shapes(arg1_in_shape, arg2_in_shape); squeezed_shape.push_back(source_shape.at(index));
}
} }
if (arg1_size) if (squeezed_shape != value.get_shape())
{ {
--arg1_size; broadcasted_node = builder::reshape(value, squeezed_shape);
} }
if (arg2_size) if (!broadcast_axes.empty())
{ {
--arg2_size; broadcasted_node =
make_shared<op::Broadcast>(broadcasted_node, output_shape, broadcast_axes);
}
return broadcasted_node;
} }
if (axis) /// \brief Broadcast input node.
///
/// \param[in] value The input Node to be broadcast.
/// \param[in] output_shape The output shape.
/// \param[in] axis The start index to align with output_shape
///
/// \return The broadcasted Node.
///
static shared_ptr<Node> broadcast_value_pdpd_style(const Output<Node>& value,
const Shape& output_shape,
int64_t axis)
{
auto value_shape = value.get_shape();
// If node already has the required shape, return original node
if (output_shape == value_shape)
{ {
--axis; return value.as_single_output_node();
} }
if (axis == -1)
{
axis = output_shape.size() - value_shape.size();
} }
return plan; auto trimmed_value_shape = value_shape;
while (trimmed_value_shape.size() > 0 && trimmed_value_shape.back() == 1)
{
trimmed_value_shape.pop_back();
} }
/// If necessary, wrap \p node with an additional reshape and/or broadcast op. AxisSet axes;
/// Return a pointer to the node that produces the wrapped value. for (int64_t i = 0; i < axis; ++i)
/// If no additional reshape or broadcast op was needed, simply return \p node.
static std::shared_ptr<Node>
add_required_ops(const Output<Node>& value,
const ngraph::Shape& shape_after_possible_reshaping,
const ngraph::AxisSet& broadcast_axes,
const ngraph::Shape& final_shape)
{ {
Output<Node> return_value{value}; axes.insert(static_cast<size_t>(i));
}
if (value.get_shape() != shape_after_possible_reshaping) for (size_t i = axis + trimmed_value_shape.size(); i < output_shape.size(); ++i)
{ {
// tell reshape to examine input dimensions in order axes.insert(i);
ngraph::AxisVector order = ngraph::get_default_order(value.get_shape());
return_value = std::make_shared<ngraph::op::Reshape>(
return_value, order, shape_after_possible_reshaping);
} }
if (final_shape != shape_after_possible_reshaping) auto trimmed_value = value;
if (value_shape != trimmed_value_shape)
{ {
return_value = std::make_shared<ngraph::op::Broadcast>( trimmed_value = make_shared<op::Reshape>(
return_value, final_shape, broadcast_axes); value, get_default_order(value_shape), trimmed_value_shape);
} }
return return_value.get_node_shared_ptr()->add_provenance_group_members_above({value}); auto value_bcast = make_shared<op::Broadcast>(trimmed_value, output_shape, axes);
return move(value_bcast);
} }
std::pair<std::shared_ptr<Node>, std::shared_ptr<Node>> pair<shared_ptr<Node>, shared_ptr<Node>>
numpy_broadcast(const std::pair<Output<Node>, Output<Node>>& args) numpy_broadcast(const pair<Output<Node>, Output<Node>>& args)
{ {
NGRAPH_CHECK(args.first.get_node()); NGRAPH_CHECK(args.first.get_node());
NGRAPH_CHECK(args.second.get_node()); NGRAPH_CHECK(args.second.get_node());
const ngraph::Shape& arg1_in_shape = args.first.get_shape(); const Shape& arg1_in_shape = args.first.get_shape();
const ngraph::Shape& arg2_in_shape = args.second.get_shape(); const Shape& arg2_in_shape = args.second.get_shape();
// Handle the trivial case... // Handle the trivial case...
if (arg1_in_shape == arg2_in_shape) if (arg1_in_shape == arg2_in_shape)
...@@ -191,21 +270,269 @@ namespace ngraph ...@@ -191,21 +270,269 @@ namespace ngraph
args.second.as_single_output_node()); args.second.as_single_output_node());
} }
Autobroadcast_plan plan = NodeVector bcasted_outputs =
compute_shapes_and_broadcast_axes(arg1_in_shape, arg2_in_shape); as_node_vector(numpy_broadcast_outputs({args.first, args.second}));
return make_pair(bcasted_outputs.at(0), bcasted_outputs.at(1));
}
OutputVector numpy_broadcast_outputs(const OutputVector& values)
{
if (values.size() <= 1)
{
return values;
}
// find the output tensor's shape, then broadcast all inputs so that they are compatible
auto bcast_shapes = get_numpy_broadcast_shapes(values);
OutputVector broadcasted_inputs;
for (size_t i = 0; i < values.size(); ++i)
{
broadcasted_inputs.push_back(
numpy_broadcast_node(values[i], bcast_shapes.first, bcast_shapes.second[i]));
}
return broadcasted_inputs;
}
shared_ptr<Node> numpy_broadcast(const Output<Node>& value, const Shape& shape)
{
auto bcast_shape = get_numpy_broadcast_shapes({value.get_shape(), shape});
return numpy_broadcast_node(value, bcast_shape.first, bcast_shape.second[0]);
}
OutputVector numpy_broadcast_for_matmul_operation(const Output<Node>& left,
const Output<Node>& right)
{
const auto& left_shape = left.get_shape();
const auto& right_shape = right.get_shape();
// Broadcast only _stack of matrices_ axes.
const auto& numpy_shapes =
get_numpy_broadcast_shapes({Shape{begin(left_shape), next(end(left_shape), -2)},
Shape{begin(right_shape), next(end(right_shape), -2)}});
// Prepare tensors output shapes with broadcasted _stack of matrices_ axes.
auto left_output_shape = numpy_shapes.first;
auto right_output_shape = numpy_shapes.first;
// Append the last two axes original dimensions.
left_output_shape.insert(end(left_output_shape),
next(begin(left_shape), left_shape.size() - 2),
end(left_shape));
right_output_shape.insert(end(right_output_shape),
next(begin(right_shape), right_shape.size() - 2),
end(right_shape));
auto left_full_shape = numpy_shapes.second.at(0);
auto right_full_shape = numpy_shapes.second.at(1);
// Append the last two axes original dimensions.
left_full_shape.insert(end(left_full_shape),
next(begin(left_shape), left_shape.size() - 2),
end(left_shape));
right_full_shape.insert(end(right_full_shape),
next(begin(right_shape), right_shape.size() - 2),
end(right_shape));
return {numpy_broadcast_node(left, left_output_shape, left_full_shape),
numpy_broadcast_node(right, right_output_shape, right_full_shape)};
}
OutputVector legacy_broadcast_for_binary_operation(const Output<Node>& left,
const Output<Node>& right,
size_t start_match_axis)
{
const auto& left_shape = left.get_shape();
const auto& right_shape = right.get_shape();
bool dimensions_identical = (left_shape == right_shape);
if (dimensions_identical)
{
return {left, right};
}
// Prepare new shape of right operand for broadcasting
// Remove dimensions with length=1 from back
auto new_right_shape = right_shape;
for (int dimension = new_right_shape.size() - 1; dimension >= 0; --dimension)
{
if (new_right_shape[dimension] == 1)
{
new_right_shape.pop_back();
}
else
{
break;
}
}
// Find first dimensions at front with length different from 1
size_t num_ones = 0;
for (size_t dimension : new_right_shape)
{
if (dimension == 1)
{
++num_ones;
}
else
{
break;
}
}
// Remove dimensions with length=1 from front
new_right_shape.erase(begin(new_right_shape), next(begin(new_right_shape), num_ones));
auto reshape_right =
make_shared<op::Reshape>(right, get_default_order(right_shape), new_right_shape);
// Move broadcast start axis parameter to right
start_match_axis += num_ones;
auto broadcast_right = make_shared<op::Broadcast>(
reshape_right,
left_shape,
calculate_broadcast_axes(left_shape, new_right_shape, start_match_axis));
return {left, broadcast_right};
}
OutputVector pdpd_broadcast(const OutputVector& inputs, int64_t axis)
{
if (inputs.size() <= 1)
{
return inputs;
}
OutputVector broadcasted_inputs{inputs[0]};
for (size_t i = 1; i < inputs.size(); ++i)
{
broadcasted_inputs.push_back(
broadcast_value_pdpd_style(inputs[i], inputs[0].get_shape(), axis));
}
return broadcasted_inputs;
}
AxisSet calculate_broadcast_axes(const Shape& output_shape,
const Shape& input_shape,
size_t start_match_axis)
{
vector<size_t> result(output_shape.size() - input_shape.size());
// Populate the result vector with monotonic increasing series from 0 until
// output_shape_size, excluding values in range:
// [start_match_axis, start_match_axis + input_shape.size()]
iota(begin(result), begin(result) + start_match_axis, 0);
iota(begin(result) + start_match_axis,
end(result),
start_match_axis + input_shape.size());
return result;
}
namespace opset1
{
Output<Node> legacy_broadcast_for_binary_operation(const Output<Node>& left,
const Output<Node>& right,
size_t start_match_axis)
{
const auto& left_shape = left.get_shape();
const auto& right_shape = right.get_shape();
bool dimensions_identical = (left_shape == right_shape);
if (dimensions_identical)
{
return right;
}
// Prepare new shape of right operand for broadcasting
// Remove dimensions with length=1 from back
auto new_right_shape = right_shape;
for (int dimension = new_right_shape.size() - 1; dimension >= 0; --dimension)
{
if (new_right_shape.at(dimension) == 1)
{
new_right_shape.pop_back();
}
else
{
break;
}
}
// Find first dimensions at front with length different from 1
size_t num_ones = 0;
for (size_t dimension : new_right_shape)
{
if (dimension == 1)
{
++num_ones;
}
else
{
break;
}
}
auto arg1_out = add_required_ops(args.first, // Remove dimensions with length=1 from front
plan.m_arg1_shape_after_possible_reshaping, new_right_shape.erase(begin(new_right_shape),
plan.m_arg1_broadcast_axes, next(begin(new_right_shape), num_ones));
plan.m_final_shape);
auto arg2_out = add_required_ops(args.second, auto reshape_right = reshape(right, new_right_shape);
plan.m_arg2_shape_after_possible_reshaping,
plan.m_arg2_broadcast_axes,
plan.m_final_shape);
return {arg1_out, arg2_out}; // Move broadcast start axis parameter to right
start_match_axis += num_ones;
return make_broadcast(reshape_right, left_shape, start_match_axis);
}
vector<size_t> get_axes_mapping(const Shape& output_shape,
const AxisSet& broadcast_axes)
{
NGRAPH_CHECK((broadcast_axes.size() <= output_shape.size()));
vector<size_t> axes_mapping(output_shape.size());
iota(axes_mapping.begin(), axes_mapping.end(), 0);
for (auto i = broadcast_axes.rbegin(); i != broadcast_axes.rend(); ++i)
{
axes_mapping.erase(axes_mapping.begin() + *i);
}
return axes_mapping;
}
Output<Node> get_axes_mapping_output(const Shape& output_shape,
const Shape& input_shape,
size_t start_match_axis)
{
NGRAPH_CHECK((input_shape.size() + start_match_axis <= output_shape.size()));
vector<size_t> mapping(input_shape.size());
iota(begin(mapping), end(mapping), start_match_axis);
return op::Constant::create(element::i64, Shape{mapping.size()}, mapping);
}
Output<Node> get_axes_mapping_output(const Shape& output_shape,
const AxisSet& broadcast_axes)
{
vector<size_t> axes_mapping{get_axes_mapping(output_shape, broadcast_axes)};
return op::Constant::create(element::i64, Shape{axes_mapping.size()}, axes_mapping);
}
Output<Node> make_broadcast(const Output<Node>& node,
const Shape& target_shape,
const AxisSet& broadcast_axes)
{
return make_shared<op::v1::Broadcast>(
node,
op::Constant::create(element::i64, Shape{target_shape.size()}, target_shape),
get_axes_mapping_output(target_shape, broadcast_axes));
}
Output<Node> make_broadcast(const Output<Node>& node,
const Shape& target_shape,
size_t start_match_axis)
{
return make_shared<op::v1::Broadcast>(
node,
op::Constant::create(element::i64, Shape{target_shape.size()}, target_shape),
get_axes_mapping_output(target_shape, node.get_shape(), start_match_axis));
} }
} // namespace opset1
} // namespace builder } // namespace builder
} // namespace ngraph } // namespace ngraph
...@@ -16,25 +16,23 @@ ...@@ -16,25 +16,23 @@
#pragma once #pragma once
#include "ngraph/except.hpp"
#include "ngraph/node.hpp"
#include <memory> #include <memory>
#include <utility> #include <utility>
#include "ngraph/except.hpp"
#include "ngraph/node.hpp"
#include "ngraph/op/broadcast.hpp"
namespace ngraph namespace ngraph
{ {
namespace builder namespace builder
{ {
class autobroadcast_incompatible_shapes : public ngraph::ngraph_error class numpy_autobroadcast_incompatible_shapes : public ngraph::ngraph_error
{ {
public: public:
autobroadcast_incompatible_shapes(const ngraph::Shape& shape1, numpy_autobroadcast_incompatible_shapes(const ngraph::Shape& shape1,
const ngraph::Shape& shape2); const ngraph::Shape& shape2);
const ngraph::Shape& get_shape1() const;
const ngraph::Shape& get_shape2() const;
private: private:
const ngraph::Shape m_shape1; const ngraph::Shape m_shape1;
const ngraph::Shape m_shape2; const ngraph::Shape m_shape2;
...@@ -42,6 +40,33 @@ namespace ngraph ...@@ -42,6 +40,33 @@ namespace ngraph
static std::string error_str(const ngraph::Shape& shape1, const ngraph::Shape& shape2); static std::string error_str(const ngraph::Shape& shape1, const ngraph::Shape& shape2);
}; };
///
/// \brief Broadcast all values, if necessary, to obtain equal shapes according
/// to NumPy's auto-broadcasting scheme.
///
/// \note There are some shape combinations which the autobroadcast algoritm cannot
/// handle. An exception is thrown when such combinations are provided to this
/// function.
///
/// \param values Vector of output values.
///
/// \exception ngraph::builder::numpy_autobroadcast_incompatible_shapes
///
/// \return Vector of broadcasted values.
///
OutputVector numpy_broadcast_outputs(const OutputVector& values);
///
/// \brief Broadcast input value to provided shape using NumPy's auto-broadcasting
/// rules.
///
/// \param value Input value
/// \param shape Requested output shape
///
/// \return Node producing values with requested shape.
///
std::shared_ptr<Node> numpy_broadcast(const Output<Node>& value, const Shape& shape);
/// \brief Wrap two graph values, if necessary, to obtain values with identical shapes, /// \brief Wrap two graph values, if necessary, to obtain values with identical shapes,
/// using NumPy's auto-broadcast rules. /// using NumPy's auto-broadcast rules.
/// ///
...@@ -69,7 +94,7 @@ namespace ngraph ...@@ -69,7 +94,7 @@ namespace ngraph
/// - If an exception was not thrown, then the return value's \p first and \p second /// - If an exception was not thrown, then the return value's \p first and \p second
/// elements point to ngraph::Node objects whose output values have the same shape. /// elements point to ngraph::Node objects whose output values have the same shape.
/// ///
/// \exception ngraph::builder::autobroadcast_incompatible_shapes /// \exception ngraph::builder::numpy_autobroadcast_incompatible_shapes
std::pair<std::shared_ptr<Node>, std::shared_ptr<Node>> std::pair<std::shared_ptr<Node>, std::shared_ptr<Node>>
numpy_broadcast(const std::pair<Output<Node>, Output<Node>>& args); numpy_broadcast(const std::pair<Output<Node>, Output<Node>>& args);
...@@ -86,7 +111,7 @@ namespace ngraph ...@@ -86,7 +111,7 @@ namespace ngraph
/// ///
/// \return The sink node of any/all nodes created by this function. Will never be null. /// \return The sink node of any/all nodes created by this function. Will never be null.
/// ///
/// \exception ngraph::builder::autobroadcast_incompatible_shapes /// \exception ngraph::builder::numpy_autobroadcast_incompatible_shapes
template <typename NodeType> template <typename NodeType>
std::shared_ptr<NodeType> std::shared_ptr<NodeType>
make_with_numpy_broadcast(const Output<Node>& operand1_reshapeable, make_with_numpy_broadcast(const Output<Node>& operand1_reshapeable,
...@@ -113,7 +138,7 @@ namespace ngraph ...@@ -113,7 +138,7 @@ namespace ngraph
/// ///
/// \return The sink node of any/all nodes created by this function. Will never be null. /// \return The sink node of any/all nodes created by this function. Will never be null.
/// ///
/// \exception ngraph::builder::autobroadcast_incompatible_shapes /// \exception ngraph::builder::numpy_autobroadcast_incompatible_shapes
template <typename NodeType> template <typename NodeType>
std::shared_ptr<Node> make_with_numpy_broadcast(const Output<Node>& operand1, std::shared_ptr<Node> make_with_numpy_broadcast(const Output<Node>& operand1,
const Output<Node>& operand2_reshapeable, const Output<Node>& operand2_reshapeable,
...@@ -123,5 +148,174 @@ namespace ngraph ...@@ -123,5 +148,174 @@ namespace ngraph
return std::make_shared<NodeType>( return std::make_shared<NodeType>(
operand1, shaped_op2_op3.first, shaped_op2_op3.second); operand1, shaped_op2_op3.first, shaped_op2_op3.second);
} }
///
/// \brief Cast shape of two outputs to make them compatible for an element-wise binary
/// operation.
///
/// \note If necessary the right-hand-side argument will be broadcast to match the
/// shape of left-hand-side argument. The starting of the mutually equal shape
/// is specified by the argument "start_match_axis", and if it is not set suffix
/// matching is assumed.
///
/// \note This style of broadcast was used in ONNX Op sets prior to version 7, where
/// it was replaced by NumPy style auto-broadcasting mechanism.
///
/// \param left Node which contain input of binary op.
/// \param right Node which contain input of binary op.
/// \param start_match_axis Position in shape denoting start of the mutually equal
/// shape
///
/// \return Left and right node after broadcasting.
///
OutputVector legacy_broadcast_for_binary_operation(const Output<Node>& left,
const Output<Node>& right,
size_t start_match_axis);
/// \brief Broadcast shape of two nodes to make them compatible for a matrix
/// multiplication.
///
/// \note This function is reflecting broadcasting behaviour of NumPy's `matmul`
/// operation.
/// (https://docs.scipy.org/doc/numpy/reference/generated/numpy.matmul.html)
/// This mean that only \"stack of matrices\" axes are bidirectionally
/// broadcasted. The last two dimension are left untouched.
///
/// \param[in] left The Node providing data for the left-hand side of matrix
/// multiplication.
/// \param[in] right The Node providing data for the right-hand side of matrix
/// multiplication.
///
/// \return The vector containing both outputs broadcasted.
///
OutputVector numpy_broadcast_for_matmul_operation(const Output<Node>& left,
const Output<Node>& right);
/// \brief Cast shape of all input nodes for an element-wise operation that requires
/// shape-compatibility
///
/// \param inputs Original list of inputs
/// \param axis Index starting to align
///
/// \return pdpd-style broadcasted list of nodes.
OutputVector pdpd_broadcast(const OutputVector& inputs, int64_t axis);
/// \brief Generate a list of broadcast axes.
///
/// \details Informally, a broadcast "adds" axes to the input tensor, replicating
/// elements from the input tensor as needed to fill the new dimensions.
/// Function calculate which of the output axes are added in this way.
///
/// \param output_shape The new shape for the output tensor.
/// \param input_shape The shape of input tensor.
/// \param start_match_axis The axis along which we want to replicate elements.
/// The starting axis position (0-based) int the output
/// shape from which the current shape of the tensor
/// matches the desired new shape.
///
/// \return The indices of added axes.
AxisSet calculate_broadcast_axes(const Shape& output_shape,
const Shape& input_shape,
std::size_t start_match_axis);
/// \brief Generate a list of broadcast along axes.
///
/// \details Broadcast "adds" elements along axes to the input tensor, replicating
/// elements from the input tensor as needed to fill the new dimensions.
/// Function calculate which of the output axes are added in this way.
///
/// This function will attempt to match shapes, assuming the current shape
/// matches the rightmost positions of the desired new shape. This behaviour
/// is similar to NumPy's broadcasting.
///
/// \param output_shape The new shape for the output tensor.
/// \param input_shape The shape of input tensor.
///
/// \return The indices of added axes.
inline AxisSet calculate_broadcast_axes(const Shape& output_shape, const Shape& input_shape)
{
return calculate_broadcast_axes(
output_shape, input_shape, output_shape.size() - input_shape.size());
}
inline std::shared_ptr<Node> make_broadcast_node(const Output<Node>& output,
Shape new_shape)
{
return std::make_shared<op::Broadcast>(
output, new_shape, calculate_broadcast_axes(new_shape, output.get_shape()));
}
inline std::shared_ptr<Node> make_broadcast_node(const Output<Node>& value,
const Shape& new_shape,
std::size_t start_match_axis)
{
return std::make_shared<op::Broadcast>(
value,
new_shape,
calculate_broadcast_axes(new_shape, value.get_shape(), start_match_axis));
}
namespace opset1
{
///
/// \brief Broadcast right node to left node's shape using legacy scheme.
///
/// \param[in] left The left hand side node of binary operation.
/// \param[in] right The right hand side node of binary operation. The one
/// to be broadcasted.
/// \param[in] start_match_axis The axis index starting mutually equal shapes
/// of both nodes.
///
/// \return The Output object connected to node producing broadcasted right node.
///
Output<Node> legacy_broadcast_for_binary_operation(const Output<Node>& left,
const Output<Node>& right,
size_t start_match_axis);
///
/// \brief Reconstructs axes mapping vector for Broadcast:v1 operation.
///
/// \param[in] output_shape The output shape of Broadcast operation.
/// \param[in] broadcast_axes The broadcast axes used for Broadcast:v0 operator.
///
/// \return The vector with axes indexes mapping .
///
std::vector<std::size_t> get_axes_mapping(const Shape& output_shape,
const AxisSet& broadcast_axes);
///
/// \brief Creates Node returning the axes mapping for Broadcast:v1 operation.
///
/// \param[in] output_shape The output shape of Broadcast operation.
/// \param[in] input_shape The input shape.
/// \param[in] start_match_axis The axis index at which input shape starts to be
/// identical as the output shape.
///
/// \return Returns the Output object pointing to node with the axes mapping.
///
Output<Node> get_axes_mapping_output(const Shape& output_shape,
const Shape& input_shape,
std::size_t start_match_axis);
///
/// \brief Creates Node returning the axes mapping for Broadcast:v1 operation.
///
/// \param[in] output_shape The output shape of Broadcast operation.
/// \param[in] broadcast_axes The broadcast axes used for Broadcast:v0 operator.
///
/// \return The Output object with Node returning axes mapping.
///
Output<Node> get_axes_mapping_output(const Shape& output_shape,
const AxisSet& broadcast_axes);
Output<Node> make_broadcast(const Output<Node>& node,
const Shape& target_shape,
const AxisSet& broadcast_axes);
Output<Node> make_broadcast(const Output<Node>& node,
const Shape& target_shape,
std::size_t start_match_axis);
} // namespace opset1
} // namespace builder } // namespace builder
} // namespace ngraph } // namespace ngraph
...@@ -18,6 +18,7 @@ ...@@ -18,6 +18,7 @@
#include <iterator> #include <iterator>
#include <memory> #include <memory>
#include "ngraph/builder/autobroadcast.hpp"
#include "ngraph/builder/make_constant.hpp" #include "ngraph/builder/make_constant.hpp"
#include "ngraph/builder/matmul_factory.hpp" #include "ngraph/builder/matmul_factory.hpp"
#include "ngraph/builder/reshape.hpp" #include "ngraph/builder/reshape.hpp"
...@@ -26,7 +27,6 @@ ...@@ -26,7 +27,6 @@
#include "ngraph/op/quantized_dot.hpp" #include "ngraph/op/quantized_dot.hpp"
#include "ngraph/op/reshape.hpp" #include "ngraph/op/reshape.hpp"
#include "ngraph/op/slice.hpp" #include "ngraph/op/slice.hpp"
#include "ngraph/op/util/broadcasting.hpp"
using namespace ngraph; using namespace ngraph;
using namespace std; using namespace std;
...@@ -92,7 +92,7 @@ NodeVector builder::MatmulFactory::make_matmul_op() ...@@ -92,7 +92,7 @@ NodeVector builder::MatmulFactory::make_matmul_op()
if (left_rank > 1 && right_rank > 1) if (left_rank > 1 && right_rank > 1)
{ {
const OutputVector& broadcasted_nodes = const OutputVector& broadcasted_nodes =
op::numpy_style_broadcast_for_matmul_operation(left, right); builder::numpy_broadcast_for_matmul_operation(left, right);
left = broadcasted_nodes.at(0); left = broadcasted_nodes.at(0);
right = broadcasted_nodes.at(1); right = broadcasted_nodes.at(1);
......
...@@ -16,7 +16,7 @@ ...@@ -16,7 +16,7 @@
#include "add.hpp" #include "add.hpp"
#include "default_opset.hpp" #include "default_opset.hpp"
#include "ngraph/op/util/broadcasting.hpp" #include "ngraph/builder/autobroadcast.hpp"
#include "ngraph/shape.hpp" #include "ngraph/shape.hpp"
namespace ngraph namespace ngraph
...@@ -35,7 +35,7 @@ namespace ngraph ...@@ -35,7 +35,7 @@ namespace ngraph
auto rhs_rank = rhs_node.get_shape().size(); auto rhs_rank = rhs_node.get_shape().size();
auto axis = node.get_attribute_value<std::int64_t>("axis", lhs_rank - rhs_rank); auto axis = node.get_attribute_value<std::int64_t>("axis", lhs_rank - rhs_rank);
// Unidirectional broadcast right node to left shape. // Unidirectional broadcast right node to left shape.
rhs_node = ngraph::op::opset1::legacy_style_broadcast_for_binary_operation( rhs_node = ngraph::builder::opset1::legacy_broadcast_for_binary_operation(
lhs_node, rhs_node, axis); lhs_node, rhs_node, axis);
return {std::make_shared<default_opset::Add>( return {std::make_shared<default_opset::Add>(
lhs_node, rhs_node, ngraph::op::AutoBroadcastSpec::NONE)}; lhs_node, rhs_node, ngraph::op::AutoBroadcastSpec::NONE)};
......
...@@ -25,7 +25,7 @@ ...@@ -25,7 +25,7 @@
#include "ngraph/op/fused/group_conv.hpp" #include "ngraph/op/fused/group_conv.hpp"
#include "ngraph/op/slice.hpp" #include "ngraph/op/slice.hpp"
#include "ngraph/op/util/attr_types.hpp" #include "ngraph/op/util/attr_types.hpp"
#include "ngraph/op/util/broadcasting.hpp" #include "ngraph/opsets/opset0.hpp"
#include "utils/convpool.hpp" #include "utils/convpool.hpp"
namespace ngraph namespace ngraph
......
...@@ -20,14 +20,13 @@ ...@@ -20,14 +20,13 @@
#include <memory> #include <memory>
#include <vector> #include <vector>
#include "builder/reshape.hpp"
#include "conv_transpose.hpp" #include "conv_transpose.hpp"
#include "default_opset.hpp" #include "default_opset.hpp"
#include "exceptions.hpp" #include "exceptions.hpp"
#include "ngraph/builder/autobroadcast.hpp"
#include "ngraph/builder/reshape.hpp"
#include "ngraph/coordinate_diff.hpp" #include "ngraph/coordinate_diff.hpp"
#include "ngraph/op/util/attr_types.hpp" #include "ngraph/op/util/attr_types.hpp"
#include "ngraph/op/util/broadcasting.hpp"
#include "ngraph/opsets/opset0.hpp"
#include "ngraph/shape.hpp" #include "ngraph/shape.hpp"
#include "utils/convpool.hpp" #include "utils/convpool.hpp"
...@@ -93,7 +92,7 @@ namespace ngraph ...@@ -93,7 +92,7 @@ namespace ngraph
Shape new_filters_shape{weights_shape}; Shape new_filters_shape{weights_shape};
new_filters_shape.at(0) /= groups; new_filters_shape.at(0) /= groups;
new_filters_shape.insert(std::begin(new_filters_shape), groups); new_filters_shape.insert(std::begin(new_filters_shape), groups);
filters = builder::reshape(filters, new_filters_shape); filters = builder::opset1::reshape(filters, new_filters_shape);
std::shared_ptr<ngraph::Node> conv_node; std::shared_ptr<ngraph::Node> conv_node;
if (!output_shape.empty()) if (!output_shape.empty())
......
...@@ -20,8 +20,8 @@ ...@@ -20,8 +20,8 @@
#include "core/node.hpp" #include "core/node.hpp"
#include "default_opset.hpp" #include "default_opset.hpp"
#include "ngraph/builder/autobroadcast.hpp"
#include "ngraph/node.hpp" #include "ngraph/node.hpp"
#include "ngraph/op/util/broadcasting.hpp"
#include "ngraph/shape.hpp" #include "ngraph/shape.hpp"
namespace ngraph namespace ngraph
...@@ -40,7 +40,7 @@ namespace ngraph ...@@ -40,7 +40,7 @@ namespace ngraph
auto rhs_rank = rhs_node.get_shape().size(); auto rhs_rank = rhs_node.get_shape().size();
auto axis = node.get_attribute_value<std::int64_t>("axis", lhs_rank - rhs_rank); auto axis = node.get_attribute_value<std::int64_t>("axis", lhs_rank - rhs_rank);
// Unidirectional broadcast right node to left shape. // Unidirectional broadcast right node to left shape.
rhs_node = ngraph::op::opset1::legacy_style_broadcast_for_binary_operation( rhs_node = ngraph::builder::opset1::legacy_broadcast_for_binary_operation(
lhs_node, rhs_node, axis); lhs_node, rhs_node, axis);
return {std::make_shared<default_opset::Divide>( return {std::make_shared<default_opset::Divide>(
lhs_node, rhs_node, ngraph::op::AutoBroadcastSpec::NONE)}; lhs_node, rhs_node, ngraph::op::AutoBroadcastSpec::NONE)};
......
...@@ -27,7 +27,6 @@ ...@@ -27,7 +27,6 @@
#include "ngraph/op/experimental/range.hpp" #include "ngraph/op/experimental/range.hpp"
#include "ngraph/op/experimental/shape_of.hpp" #include "ngraph/op/experimental/shape_of.hpp"
#include "ngraph/op/reshape.hpp" #include "ngraph/op/reshape.hpp"
#include "ngraph/op/util/broadcasting.hpp"
#include "ngraph/shape.hpp" #include "ngraph/shape.hpp"
namespace ngraph namespace ngraph
......
...@@ -22,7 +22,6 @@ ...@@ -22,7 +22,6 @@
#include "default_opset.hpp" #include "default_opset.hpp"
#include "ngraph/node.hpp" #include "ngraph/node.hpp"
#include "ngraph/op/greater.hpp" #include "ngraph/op/greater.hpp"
#include "ngraph/op/util/broadcasting.hpp"
namespace ngraph namespace ngraph
{ {
......
...@@ -21,13 +21,13 @@ ...@@ -21,13 +21,13 @@
#include "exceptions.hpp" #include "exceptions.hpp"
#include "instance_norm.hpp" #include "instance_norm.hpp"
#include "ngraph/axis_set.hpp" #include "ngraph/axis_set.hpp"
#include "ngraph/builder/autobroadcast.hpp"
#include "ngraph/builder/reduce_ops.hpp" #include "ngraph/builder/reduce_ops.hpp"
#include "ngraph/op/add.hpp" #include "ngraph/op/add.hpp"
#include "ngraph/op/divide.hpp" #include "ngraph/op/divide.hpp"
#include "ngraph/op/multiply.hpp" #include "ngraph/op/multiply.hpp"
#include "ngraph/op/sqrt.hpp" #include "ngraph/op/sqrt.hpp"
#include "ngraph/op/subtract.hpp" #include "ngraph/op/subtract.hpp"
#include "ngraph/op/util/broadcasting.hpp"
#include "ngraph/opsets/opset0.hpp" #include "ngraph/opsets/opset0.hpp"
#include "utils/common.hpp" #include "utils/common.hpp"
...@@ -68,15 +68,16 @@ namespace ngraph ...@@ -68,15 +68,16 @@ namespace ngraph
std::make_shared<default_opset::Constant>( std::make_shared<default_opset::Constant>(
data->get_element_type(), data_shape, std::vector<float>{epsilon}); data->get_element_type(), data_shape, std::vector<float>{epsilon});
scale = ngraph::op::opset1::make_broadcast(scale, data_shape, 1); scale = ngraph::builder::opset1::make_broadcast(scale, data_shape, 1);
bias = ngraph::op::opset1::make_broadcast(bias, data_shape, 1); bias = ngraph::builder::opset1::make_broadcast(bias, data_shape, 1);
Output<ngraph::Node> mean = builder::mean(data, reduction_axes); Output<ngraph::Node> mean = builder::mean(data, reduction_axes);
mean = ngraph::op::opset1::make_broadcast(mean, data_shape, reduction_axes); mean =
ngraph::builder::opset1::make_broadcast(mean, data_shape, reduction_axes);
Output<ngraph::Node> variance = builder::variance(data, reduction_axes); Output<ngraph::Node> variance = builder::variance(data, reduction_axes);
variance = variance = ngraph::builder::opset1::make_broadcast(
ngraph::op::opset1::make_broadcast(variance, data_shape, reduction_axes); variance, data_shape, reduction_axes);
const auto sqrt = std::make_shared<default_opset::Sqrt>(variance + eps_node); const auto sqrt = std::make_shared<default_opset::Sqrt>(variance + eps_node);
......
...@@ -22,7 +22,6 @@ ...@@ -22,7 +22,6 @@
#include "default_opset.hpp" #include "default_opset.hpp"
#include "ngraph/node.hpp" #include "ngraph/node.hpp"
#include "ngraph/op/less.hpp" #include "ngraph/op/less.hpp"
#include "ngraph/op/util/broadcasting.hpp"
namespace ngraph namespace ngraph
{ {
......
...@@ -19,7 +19,6 @@ ...@@ -19,7 +19,6 @@
#include "core/node.hpp" #include "core/node.hpp"
#include "default_opset.hpp" #include "default_opset.hpp"
#include "ngraph/node.hpp" #include "ngraph/node.hpp"
#include "ngraph/op/util/broadcasting.hpp"
#include "utils/variadic.hpp" #include "utils/variadic.hpp"
namespace ngraph namespace ngraph
......
...@@ -20,10 +20,10 @@ ...@@ -20,10 +20,10 @@
#include "core/node.hpp" #include "core/node.hpp"
#include "default_opset.hpp" #include "default_opset.hpp"
#include "ngraph/builder/autobroadcast.hpp"
#include "ngraph/node.hpp" #include "ngraph/node.hpp"
#include "ngraph/op/broadcast.hpp" #include "ngraph/op/broadcast.hpp"
#include "ngraph/op/multiply.hpp" #include "ngraph/op/multiply.hpp"
#include "ngraph/op/util/broadcasting.hpp"
namespace ngraph namespace ngraph
{ {
...@@ -41,7 +41,7 @@ namespace ngraph ...@@ -41,7 +41,7 @@ namespace ngraph
auto rhs_rank = rhs_node.get_shape().size(); auto rhs_rank = rhs_node.get_shape().size();
auto axis = node.get_attribute_value<std::int64_t>("axis", lhs_rank - rhs_rank); auto axis = node.get_attribute_value<std::int64_t>("axis", lhs_rank - rhs_rank);
// Unidirectional broadcast right node to left shape. // Unidirectional broadcast right node to left shape.
rhs_node = ngraph::op::opset1::legacy_style_broadcast_for_binary_operation( rhs_node = ngraph::builder::opset1::legacy_broadcast_for_binary_operation(
lhs_node, rhs_node, axis); lhs_node, rhs_node, axis);
return {std::make_shared<default_opset::Multiply>( return {std::make_shared<default_opset::Multiply>(
lhs_node, rhs_node, ngraph::op::AutoBroadcastSpec::NONE)}; lhs_node, rhs_node, ngraph::op::AutoBroadcastSpec::NONE)};
......
...@@ -21,7 +21,6 @@ ...@@ -21,7 +21,6 @@
#include "core/node.hpp" #include "core/node.hpp"
#include "default_opset.hpp" #include "default_opset.hpp"
#include "ngraph/node.hpp" #include "ngraph/node.hpp"
#include "ngraph/op/util/broadcasting.hpp"
namespace ngraph namespace ngraph
{ {
......
...@@ -18,10 +18,10 @@ ...@@ -18,10 +18,10 @@
#include <vector> #include <vector>
#include "default_opset.hpp" #include "default_opset.hpp"
#include "ngraph/builder/autobroadcast.hpp"
#include "ngraph/op/abs.hpp" #include "ngraph/op/abs.hpp"
#include "ngraph/op/add.hpp" #include "ngraph/op/add.hpp"
#include "ngraph/op/divide.hpp" #include "ngraph/op/divide.hpp"
#include "ngraph/op/util/broadcasting.hpp"
#include "ngraph/shape.hpp" #include "ngraph/shape.hpp"
#include "softsign.hpp" #include "softsign.hpp"
...@@ -40,7 +40,7 @@ namespace ngraph ...@@ -40,7 +40,7 @@ namespace ngraph
std::shared_ptr<ngraph::Node> one_node = std::shared_ptr<ngraph::Node> one_node =
std::make_shared<default_opset::Constant>( std::make_shared<default_opset::Constant>(
data->get_element_type(), Shape{}, std::vector<double>{1}); data->get_element_type(), Shape{}, std::vector<double>{1});
one_node = ngraph::op::make_broadcast_node(one_node, data->get_shape()); one_node = ngraph::builder::make_broadcast_node(one_node, data->get_shape());
return {data / (std::make_shared<default_opset::Abs>(data) + one_node)}; return {data / (std::make_shared<default_opset::Abs>(data) + one_node)};
} }
......
...@@ -18,8 +18,8 @@ ...@@ -18,8 +18,8 @@
#include "core/node.hpp" #include "core/node.hpp"
#include "default_opset.hpp" #include "default_opset.hpp"
#include "ngraph/builder/autobroadcast.hpp"
#include "ngraph/node.hpp" #include "ngraph/node.hpp"
#include "ngraph/op/util/broadcasting.hpp"
namespace ngraph namespace ngraph
{ {
...@@ -37,7 +37,7 @@ namespace ngraph ...@@ -37,7 +37,7 @@ namespace ngraph
auto rhs_rank = rhs_node.get_shape().size(); auto rhs_rank = rhs_node.get_shape().size();
auto axis = node.get_attribute_value<std::int64_t>("axis", lhs_rank - rhs_rank); auto axis = node.get_attribute_value<std::int64_t>("axis", lhs_rank - rhs_rank);
// Unidirectional broadcast right node to left shape. // Unidirectional broadcast right node to left shape.
rhs_node = ngraph::op::opset1::legacy_style_broadcast_for_binary_operation( rhs_node = ngraph::builder::opset1::legacy_broadcast_for_binary_operation(
lhs_node, rhs_node, axis); lhs_node, rhs_node, axis);
return {std::make_shared<default_opset::Subtract>( return {std::make_shared<default_opset::Subtract>(
lhs_node, rhs_node, ngraph::op::AutoBroadcastSpec::NONE)}; lhs_node, rhs_node, ngraph::op::AutoBroadcastSpec::NONE)};
......
...@@ -19,7 +19,6 @@ ...@@ -19,7 +19,6 @@
#include "core/node.hpp" #include "core/node.hpp"
#include "default_opset.hpp" #include "default_opset.hpp"
#include "ngraph/node.hpp" #include "ngraph/node.hpp"
#include "ngraph/op/util/broadcasting.hpp"
#include "ngraph/opsets/opset0.hpp" #include "ngraph/opsets/opset0.hpp"
#include "utils/variadic.hpp" #include "utils/variadic.hpp"
......
...@@ -19,7 +19,6 @@ ...@@ -19,7 +19,6 @@
#include "core/node.hpp" #include "core/node.hpp"
#include "default_opset.hpp" #include "default_opset.hpp"
#include "ngraph/node.hpp" #include "ngraph/node.hpp"
#include "ngraph/op/util/broadcasting.hpp"
namespace ngraph namespace ngraph
{ {
......
...@@ -18,9 +18,9 @@ ...@@ -18,9 +18,9 @@
#include "common.hpp" #include "common.hpp"
#include "default_opset.hpp" #include "default_opset.hpp"
#include "ngraph/graph_util.hpp" #include "ngraph/graph_util.hpp"
#include "ngraph/node.hpp"
#include "ngraph/op/get_output_element.hpp" #include "ngraph/op/get_output_element.hpp"
#include "ngraph/opsets/opset0.hpp" #include "ngraph/opsets/opset0.hpp"
#include "validation_util.hpp"
namespace ngraph namespace ngraph
{ {
......
...@@ -28,7 +28,6 @@ ...@@ -28,7 +28,6 @@
#include "core/node.hpp" #include "core/node.hpp"
#include "default_opset.hpp" #include "default_opset.hpp"
#include "ngraph/node.hpp" #include "ngraph/node.hpp"
#include "ngraph/op/util/broadcasting.hpp"
#include "ngraph/shape.hpp" #include "ngraph/shape.hpp"
#include "ngraph/type/element_type.hpp" #include "ngraph/type/element_type.hpp"
......
...@@ -22,7 +22,6 @@ ...@@ -22,7 +22,6 @@
#include "ngraph/coordinate_diff.hpp" #include "ngraph/coordinate_diff.hpp"
#include "ngraph/node.hpp" #include "ngraph/node.hpp"
#include "ngraph/op/add.hpp" #include "ngraph/op/add.hpp"
#include "ngraph/op/util/broadcasting.hpp"
#include "ngraph/shape.hpp" #include "ngraph/shape.hpp"
namespace ngraph namespace ngraph
......
...@@ -18,7 +18,6 @@ ...@@ -18,7 +18,6 @@
#include "ngraph/attribute_visitor.hpp" #include "ngraph/attribute_visitor.hpp"
#include "ngraph/op/constant.hpp" #include "ngraph/op/constant.hpp"
#include "ngraph/op/sum.hpp" #include "ngraph/op/sum.hpp"
#include "ngraph/op/util/broadcasting.hpp"
#include "ngraph/partial_shape.hpp" #include "ngraph/partial_shape.hpp"
#include <numeric> #include <numeric>
......
...@@ -27,7 +27,6 @@ ...@@ -27,7 +27,6 @@
#include "ngraph/op/reshape.hpp" #include "ngraph/op/reshape.hpp"
#include "ngraph/op/subtract.hpp" #include "ngraph/op/subtract.hpp"
#include "ngraph/op/sum.hpp" #include "ngraph/op/sum.hpp"
#include "ngraph/op/util/broadcasting.hpp"
using namespace std; using namespace std;
using namespace ngraph; using namespace ngraph;
......
...@@ -15,6 +15,7 @@ ...@@ -15,6 +15,7 @@
//***************************************************************************** //*****************************************************************************
#include "ngraph/op/fused/elu.hpp" #include "ngraph/op/fused/elu.hpp"
#include "ngraph/builder/autobroadcast.hpp"
#include "ngraph/builder/make_constant.hpp" #include "ngraph/builder/make_constant.hpp"
#include "ngraph/op/add.hpp" #include "ngraph/op/add.hpp"
#include "ngraph/op/constant.hpp" #include "ngraph/op/constant.hpp"
...@@ -23,7 +24,6 @@ ...@@ -23,7 +24,6 @@
#include "ngraph/op/minimum.hpp" #include "ngraph/op/minimum.hpp"
#include "ngraph/op/multiply.hpp" #include "ngraph/op/multiply.hpp"
#include "ngraph/op/subtract.hpp" #include "ngraph/op/subtract.hpp"
#include "ngraph/op/util/broadcasting.hpp"
using namespace std; using namespace std;
using namespace ngraph; using namespace ngraph;
...@@ -43,7 +43,7 @@ NodeVector op::Elu::decompose_op() const ...@@ -43,7 +43,7 @@ NodeVector op::Elu::decompose_op() const
shared_ptr<Node> alpha_node = shared_ptr<Node> alpha_node =
make_shared<op::Constant>(data.get_element_type(), Shape{}, vector<double>{m_alpha}); make_shared<op::Constant>(data.get_element_type(), Shape{}, vector<double>{m_alpha});
alpha_node = ngraph::op::numpy_style_broadcast(alpha_node, data.get_shape()); alpha_node = builder::numpy_broadcast(alpha_node, data.get_shape());
shared_ptr<ngraph::Node> zero_node = shared_ptr<ngraph::Node> zero_node =
builder::make_constant(data.get_element_type(), data.get_shape(), 0); builder::make_constant(data.get_element_type(), data.get_shape(), 0);
......
...@@ -17,6 +17,7 @@ ...@@ -17,6 +17,7 @@
#include <memory> #include <memory>
#include "fake_quantize.hpp" #include "fake_quantize.hpp"
#include "ngraph/builder/autobroadcast.hpp"
#include "ngraph/op/add.hpp" #include "ngraph/op/add.hpp"
#include "ngraph/op/constant.hpp" #include "ngraph/op/constant.hpp"
#include "ngraph/op/convert.hpp" #include "ngraph/op/convert.hpp"
...@@ -30,7 +31,6 @@ ...@@ -30,7 +31,6 @@
#include "ngraph/op/quantize.hpp" #include "ngraph/op/quantize.hpp"
#include "ngraph/op/select.hpp" #include "ngraph/op/select.hpp"
#include "ngraph/op/subtract.hpp" #include "ngraph/op/subtract.hpp"
#include "ngraph/op/util/broadcasting.hpp"
#include "ngraph/shape.hpp" #include "ngraph/shape.hpp"
using namespace std; using namespace std;
...@@ -90,7 +90,7 @@ NodeVector op::FakeQuantize::decompose_op() const ...@@ -90,7 +90,7 @@ NodeVector op::FakeQuantize::decompose_op() const
if (m_auto_broadcast.m_type == AutoBroadcastType::NUMPY) if (m_auto_broadcast.m_type == AutoBroadcastType::NUMPY)
{ {
OutputVector broadcasted_nodes = numpy_style_broadcast_values( OutputVector broadcasted_nodes = builder::numpy_broadcast_outputs(
OutputVector{data, input_low, input_high, output_low, output_high}); OutputVector{data, input_low, input_high, output_low, output_high});
data = broadcasted_nodes.at(0); data = broadcasted_nodes.at(0);
...@@ -101,8 +101,8 @@ NodeVector op::FakeQuantize::decompose_op() const ...@@ -101,8 +101,8 @@ NodeVector op::FakeQuantize::decompose_op() const
} }
else if (m_auto_broadcast.m_type == AutoBroadcastType::PDPD) else if (m_auto_broadcast.m_type == AutoBroadcastType::PDPD)
{ {
OutputVector broadcasted_nodes = OutputVector broadcasted_nodes = builder::pdpd_broadcast(
pdpd_style_broadcast(OutputVector{data, input_low, input_high, output_low, output_high}, OutputVector{data, input_low, input_high, output_low, output_high},
m_auto_broadcast.m_axis); m_auto_broadcast.m_axis);
data = broadcasted_nodes.at(0); data = broadcasted_nodes.at(0);
......
...@@ -15,13 +15,13 @@ ...@@ -15,13 +15,13 @@
//***************************************************************************** //*****************************************************************************
#include "ngraph/op/fused/gemm.hpp" #include "ngraph/op/fused/gemm.hpp"
#include "ngraph/builder/autobroadcast.hpp"
#include "ngraph/builder/reshape.hpp" #include "ngraph/builder/reshape.hpp"
#include "ngraph/op/add.hpp" #include "ngraph/op/add.hpp"
#include "ngraph/op/constant.hpp" #include "ngraph/op/constant.hpp"
#include "ngraph/op/dot.hpp" #include "ngraph/op/dot.hpp"
#include "ngraph/op/fused/matmul.hpp" #include "ngraph/op/fused/matmul.hpp"
#include "ngraph/op/multiply.hpp" #include "ngraph/op/multiply.hpp"
#include "ngraph/op/util/broadcasting.hpp"
using namespace std; using namespace std;
using namespace ngraph; using namespace ngraph;
...@@ -52,36 +52,34 @@ NodeVector op::Gemm::decompose_op() const ...@@ -52,36 +52,34 @@ NodeVector op::Gemm::decompose_op() const
if (m_transA) if (m_transA)
{ {
A = ngraph::builder::transpose(A); A = builder::transpose(A);
} }
if (m_transB) if (m_transB)
{ {
B = ngraph::builder::transpose(B); B = builder::transpose(B);
} }
A = ngraph::builder::flatten(A, 1); A = builder::flatten(A, 1);
B = ngraph::builder::flatten(B, 1); B = builder::flatten(B, 1);
// A' * B' // A' * B'
std::shared_ptr<ngraph::Node> a_dot_b = std::make_shared<ngraph::op::Dot>(A, B); std::shared_ptr<Node> a_dot_b = std::make_shared<op::Dot>(A, B);
// alpha // alpha
std::shared_ptr<ngraph::Node> alpha_node = std::make_shared<ngraph::op::Constant>( std::shared_ptr<Node> alpha_node = std::make_shared<op::Constant>(
a_dot_b->get_element_type(), a_dot_b->get_shape(), std::vector<double>{m_alpha}); a_dot_b->get_element_type(), a_dot_b->get_shape(), std::vector<double>{m_alpha});
// alpha * A' * B' // alpha * A' * B'
a_dot_b = std::make_shared<ngraph::op::Multiply>(alpha_node, a_dot_b); a_dot_b = std::make_shared<op::Multiply>(alpha_node, a_dot_b);
// beta * C // beta * C
std::shared_ptr<ngraph::Node> beta_node = std::make_shared<ngraph::op::Constant>( std::shared_ptr<Node> beta_node = std::make_shared<op::Constant>(
C.get_element_type(), C.get_shape(), std::vector<double>{m_beta}); C.get_element_type(), C.get_shape(), std::vector<double>{m_beta});
C = std::make_shared<ngraph::op::Multiply>(beta_node, C); C = std::make_shared<op::Multiply>(beta_node, C);
// alpha * A' * B' + beta * C // alpha * A' * B' + beta * C
OutputVector broadcasted_nodes =
ngraph::op::numpy_style_broadcast_values(OutputVector{a_dot_b, C});
// The input tensor `C` should be "unidirectionally broadcastable" to the `a_dot_b` tensor. // The input tensor `C` should be "unidirectionally broadcastable" to the `a_dot_b` tensor.
// Numpy style broadcast is bidirectional, so we only use the second output from broadcasting. auto broadcasted_c = builder::numpy_broadcast(C, a_dot_b->get_shape());
return {std::make_shared<ngraph::op::Add>(a_dot_b, broadcasted_nodes.at(1))}; return {std::make_shared<op::Add>(a_dot_b, broadcasted_c)};
} }
shared_ptr<Node> op::Gemm::copy_with_new_args(const NodeVector& new_args) const shared_ptr<Node> op::Gemm::copy_with_new_args(const NodeVector& new_args) const
......
...@@ -29,7 +29,6 @@ ...@@ -29,7 +29,6 @@
#include "ngraph/op/sqrt.hpp" #include "ngraph/op/sqrt.hpp"
#include "ngraph/op/subtract.hpp" #include "ngraph/op/subtract.hpp"
#include "ngraph/op/sum.hpp" #include "ngraph/op/sum.hpp"
#include "ngraph/op/util/broadcasting.hpp"
using namespace std; using namespace std;
using namespace ngraph; using namespace ngraph;
......
...@@ -15,6 +15,8 @@ ...@@ -15,6 +15,8 @@
//***************************************************************************** //*****************************************************************************
#include "ngraph/op/fused/lstm_sequence.hpp" #include "ngraph/op/fused/lstm_sequence.hpp"
#include "ngraph/builder/autobroadcast.hpp"
#include "ngraph/builder/reshape.hpp" #include "ngraph/builder/reshape.hpp"
#include "ngraph/builder/split.hpp" #include "ngraph/builder/split.hpp"
#include "ngraph/frontend/onnx_import/utils/reshape.hpp" #include "ngraph/frontend/onnx_import/utils/reshape.hpp"
...@@ -25,7 +27,6 @@ ...@@ -25,7 +27,6 @@
#include "ngraph/op/greater.hpp" #include "ngraph/op/greater.hpp"
#include "ngraph/op/reverse_sequence.hpp" #include "ngraph/op/reverse_sequence.hpp"
#include "ngraph/op/select.hpp" #include "ngraph/op/select.hpp"
#include "ngraph/op/util/broadcasting.hpp"
using namespace ngraph; using namespace ngraph;
using namespace std; using namespace std;
...@@ -121,7 +122,7 @@ shared_ptr<Node> op::LSTMSequence::get_masked_node(const Output<Node>& data, ...@@ -121,7 +122,7 @@ shared_ptr<Node> op::LSTMSequence::get_masked_node(const Output<Node>& data,
element::i32, data.get_shape(), vector<int32_t>(shape_size(data.get_shape()), time_step)); element::i32, data.get_shape(), vector<int32_t>(shape_size(data.get_shape()), time_step));
Output<Node> batch_seq_length = Output<Node> batch_seq_length =
op::legacy_style_broadcast_for_binary_operation( builder::legacy_broadcast_for_binary_operation(
curr_time_step_node, input_value(3).get_node_shared_ptr(), batch_axis) curr_time_step_node, input_value(3).get_node_shared_ptr(), batch_axis)
.at(1); .at(1);
......
...@@ -18,11 +18,11 @@ ...@@ -18,11 +18,11 @@
#include "mvn.hpp" #include "mvn.hpp"
#include "ngraph/builder/reduce_ops.hpp" #include "ngraph/builder/reduce_ops.hpp"
#include "ngraph/op/add.hpp" #include "ngraph/op/add.hpp"
#include "ngraph/op/broadcast.hpp"
#include "ngraph/op/constant.hpp" #include "ngraph/op/constant.hpp"
#include "ngraph/op/divide.hpp" #include "ngraph/op/divide.hpp"
#include "ngraph/op/sqrt.hpp" #include "ngraph/op/sqrt.hpp"
#include "ngraph/op/subtract.hpp" #include "ngraph/op/subtract.hpp"
#include "ngraph/op/util/broadcasting.hpp"
using namespace std; using namespace std;
using namespace ngraph; using namespace ngraph;
......
...@@ -22,7 +22,6 @@ ...@@ -22,7 +22,6 @@
#include "ngraph/op/divide.hpp" #include "ngraph/op/divide.hpp"
#include "ngraph/op/fused/normalize_l2.hpp" #include "ngraph/op/fused/normalize_l2.hpp"
#include "ngraph/op/multiply.hpp" #include "ngraph/op/multiply.hpp"
#include "ngraph/op/util/broadcasting.hpp"
using namespace std; using namespace std;
using namespace ngraph; using namespace ngraph;
......
...@@ -15,6 +15,7 @@ ...@@ -15,6 +15,7 @@
//***************************************************************************** //*****************************************************************************
#include "ngraph/op/fused/prelu.hpp" #include "ngraph/op/fused/prelu.hpp"
#include "ngraph/builder/autobroadcast.hpp"
#include "ngraph/op/add.hpp" #include "ngraph/op/add.hpp"
#include "ngraph/op/broadcast.hpp" #include "ngraph/op/broadcast.hpp"
#include "ngraph/op/constant.hpp" #include "ngraph/op/constant.hpp"
...@@ -22,7 +23,6 @@ ...@@ -22,7 +23,6 @@
#include "ngraph/op/greater.hpp" #include "ngraph/op/greater.hpp"
#include "ngraph/op/less.hpp" #include "ngraph/op/less.hpp"
#include "ngraph/op/multiply.hpp" #include "ngraph/op/multiply.hpp"
#include "ngraph/op/util/broadcasting.hpp"
using namespace std; using namespace std;
using namespace ngraph; using namespace ngraph;
...@@ -47,11 +47,11 @@ NodeVector op::PRelu::decompose_op() const ...@@ -47,11 +47,11 @@ NodeVector op::PRelu::decompose_op() const
{ {
auto it = std::find(std::begin(data_shape), std::end(data_shape), slope_shape.at(0)); auto it = std::find(std::begin(data_shape), std::end(data_shape), slope_shape.at(0));
auto index = std::distance(std::begin(data_shape), it); auto index = std::distance(std::begin(data_shape), it);
slope = make_broadcast_node(slope, data.get_shape(), index); slope = builder::make_broadcast_node(slope, data.get_shape(), index);
} }
else if (data_shape != slope_shape) else if (data_shape != slope_shape)
{ {
slope = numpy_style_broadcast_values({slope, data})[0]; slope = builder::numpy_broadcast(slope, data.get_shape());
} }
// x < 0 => f(x) = x * slope // x < 0 => f(x) = x * slope
...@@ -59,7 +59,7 @@ NodeVector op::PRelu::decompose_op() const ...@@ -59,7 +59,7 @@ NodeVector op::PRelu::decompose_op() const
std::shared_ptr<ngraph::Node> zero_node = std::make_shared<ngraph::op::Constant>( std::shared_ptr<ngraph::Node> zero_node = std::make_shared<ngraph::op::Constant>(
data.get_element_type(), ngraph::Shape{}, std::vector<double>{0}); data.get_element_type(), ngraph::Shape{}, std::vector<double>{0});
zero_node = make_broadcast_node(zero_node, data.get_shape()); zero_node = builder::make_broadcast_node(zero_node, data.get_shape());
std::shared_ptr<ngraph::Node> negative_map = std::make_shared<ngraph::op::Convert>( std::shared_ptr<ngraph::Node> negative_map = std::make_shared<ngraph::op::Convert>(
std::make_shared<ngraph::op::Less>(data, zero_node), data.get_element_type()); std::make_shared<ngraph::op::Less>(data, zero_node), data.get_element_type());
......
...@@ -14,9 +14,10 @@ ...@@ -14,9 +14,10 @@
// limitations under the License. // limitations under the License.
//***************************************************************************** //*****************************************************************************
#include "scale_shift.hpp" #include "scale_shift.hpp"
#include "ngraph/builder/autobroadcast.hpp"
#include "ngraph/op/add.hpp" #include "ngraph/op/add.hpp"
#include "ngraph/op/multiply.hpp" #include "ngraph/op/multiply.hpp"
#include "ngraph/op/util/broadcasting.hpp"
using namespace std; using namespace std;
using namespace ngraph; using namespace ngraph;
...@@ -38,7 +39,7 @@ NodeVector op::ScaleShift::decompose_op() const ...@@ -38,7 +39,7 @@ NodeVector op::ScaleShift::decompose_op() const
auto shift = input_value(2); auto shift = input_value(2);
// broadcast all data // broadcast all data
auto broadcasted_nodes = numpy_style_broadcast_values({data, scale, shift}); auto broadcasted_nodes = builder::numpy_broadcast_outputs({data, scale, shift});
data = broadcasted_nodes[0]; data = broadcasted_nodes[0];
scale = broadcasted_nodes[1]; scale = broadcasted_nodes[1];
shift = broadcasted_nodes[2]; shift = broadcasted_nodes[2];
......
...@@ -29,7 +29,6 @@ ...@@ -29,7 +29,6 @@
#include "ngraph/op/softmax.hpp" #include "ngraph/op/softmax.hpp"
#include "ngraph/op/subtract.hpp" #include "ngraph/op/subtract.hpp"
#include "ngraph/op/sum.hpp" #include "ngraph/op/sum.hpp"
#include "ngraph/op/util/broadcasting.hpp"
using namespace std; using namespace std;
using namespace ngraph; using namespace ngraph;
......
...@@ -18,7 +18,6 @@ ...@@ -18,7 +18,6 @@
#include "ngraph/node.hpp" #include "ngraph/node.hpp"
#include "ngraph/op/multiply.hpp" #include "ngraph/op/multiply.hpp"
#include "ngraph/op/subtract.hpp" #include "ngraph/op/subtract.hpp"
#include "ngraph/op/util/broadcasting.hpp"
#include "ngraph/op/util/fused_op.hpp" #include "ngraph/op/util/fused_op.hpp"
using namespace std; using namespace std;
......
//*****************************************************************************
// Copyright 2017-2020 Intel Corporation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//*****************************************************************************
#include <cstddef>
#include <iterator>
#include <numeric>
#include <vector>
#include "broadcasting.hpp"
#include "ngraph/axis_vector.hpp"
#include "ngraph/builder/reshape.hpp"
#include "ngraph/log.hpp"
#include "ngraph/op/broadcast.hpp"
#include "ngraph/op/constant.hpp"
#include "ngraph/op/reshape.hpp"
#include "ngraph/util.hpp"
/// \brief Calculate the output shape of numpy-style broadcast operation for two shapes.
///
/// more info:
/// https://docs.scipy.org/doc/numpy/user/basics.broadcasting.html#general-broadcasting-rules
/// example:
/// left: [3, 1, 10] right: [5, 1]
/// return: [3, 5, 10]
///
/// \param left_shape First input shape.
/// \param right_shape Second input Shape.
/// \return Broadcast shape of input shapes.
static ngraph::Shape calculate_broadcast_shape(ngraph::Shape left_shape, ngraph::Shape right_shape)
{
ngraph::Shape result;
auto left_rank = left_shape.size();
auto right_rank = right_shape.size();
auto max_rank = std::max(left_rank, right_rank);
// left-pad the left_shape with zeros
left_shape.insert(std::begin(left_shape), max_rank - left_rank, 0);
// left-pad the right_shape with zeros
right_shape.insert(std::begin(right_shape), max_rank - right_rank, 0);
for (std::size_t index = 0; index < max_rank; ++index)
{
result.push_back(std::max(left_shape.at(index), right_shape.at(index)));
}
return result;
};
/// \brief Calculate the output shape of numpy-style broadcast operation for all input shapes.
///
/// This function finds the maximum tensor shape that will be the result of element-wise operation
/// that will be applied to the input shapes vector. The function also prepares the shape of each
/// input for the element-wise operation by left-padding those shapes so that their rank is equal
/// to the left_shape's rank.
///
/// \param input_shapes A vector of input shapes for which a common shape should be found
/// \return A pair that contains the target shape as its first object and a vector of padded
/// input shapes ready to be broadcasted as the second object
static std::pair<ngraph::Shape, std::vector<ngraph::Shape>>
get_numpy_broadcast_shapes(const std::vector<ngraph::Shape>& input_shapes)
{
ngraph::Shape target_shape = std::accumulate(std::begin(input_shapes),
std::end(input_shapes),
ngraph::Shape{},
calculate_broadcast_shape);
std::vector<ngraph::Shape> full_shapes;
for (const ngraph::Shape& input : input_shapes)
{
ngraph::Shape padded_shape{input};
padded_shape.insert(std::begin(padded_shape), target_shape.size() - padded_shape.size(), 1);
full_shapes.push_back(std::move(padded_shape));
}
return {target_shape, full_shapes};
}
/// \brief Calculate the output shape of numpy-style broadcast operation for all input nodes.
///
/// \param inputs A vector of input nodes for which a common shape should be found
/// \return A pair that contains the target shape as its first object and a vector of padded
/// input shapes ready to be broadcasted as the second object
static std::pair<ngraph::Shape, std::vector<ngraph::Shape>>
get_numpy_broadcast_shapes(const ngraph::NodeVector& inputs)
{
std::vector<ngraph::Shape> input_shapes;
for (const auto& input : inputs)
{
input_shapes.push_back(input->get_shape());
}
return get_numpy_broadcast_shapes(input_shapes);
}
static std::pair<ngraph::Shape, std::vector<ngraph::Shape>>
get_numpy_broadcast_shapes(const ngraph::OutputVector& values)
{
std::vector<ngraph::Shape> input_shapes;
for (const auto& input : values)
{
input_shapes.push_back(input.get_shape());
}
return get_numpy_broadcast_shapes(input_shapes);
}
/// \brief Broadcast input node.
///
/// \note The source shape does not have to be the actual shape of input node. However
/// it should be a superset of it (containing it as a continuous subset). This implies
/// we may expand the number of axes of input node.
/// The ranks of source_shape and output_shape must be equal. This means that the
/// source_shape has to be padded with ones for this operation.
///
/// \param[in] value The input Node to be broadcast.
/// \param[in] output_shape The output shape.
/// \param[in] source_shape The source shape from which we want to broadcast input node.
///
/// \return The broadcasted Node.
///
static std::shared_ptr<ngraph::Node>
broadcast_node_numpy_style(const ngraph::Output<ngraph::Node>& value,
const ngraph::Shape& output_shape,
const ngraph::Shape& source_shape)
{
// If node already has the required shape, return original node
if (output_shape == value.get_shape())
{
return value.as_single_output_node();
}
if (source_shape.size() != output_shape.size())
{
NGRAPH_WARN << "Ranks of source_shape and output_shape dont match: " << source_shape.size()
<< " vs " << output_shape.size();
}
ngraph::AxisVector broadcast_axes;
ngraph::Shape squeezed_shape;
// Positions of axes which have length of 1 are needed to calculate broadcast_axes
// for nGraph broadcast operation. We need to remove all ones from source shape
// to avoid broadcasting axis conflict.
for (std::size_t index = 0; index < output_shape.size(); ++index)
{
if (source_shape.at(index) == 1)
{
broadcast_axes.push_back(index);
}
else
{
squeezed_shape.push_back(source_shape.at(index));
}
}
// Remove axes which have length of 1 from source shape
ngraph::Output<ngraph::Node> broadcasted_value = std::make_shared<ngraph::op::Reshape>(
value, ngraph::get_default_order(value.get_shape()), squeezed_shape);
return std::make_shared<ngraph::op::Broadcast>(broadcasted_value, output_shape, broadcast_axes);
}
/// \brief Broadcast input node.
///
/// \param[in] value The input Node to be broadcast.
/// \param[in] output_shape The output shape.
/// \param[in] axis The start index to align with output_shape
///
/// \return The broadcasted Node.
///
static std::shared_ptr<ngraph::Node> broadcast_value_pdpd_style(
const ngraph::Output<ngraph::Node>& value, const ngraph::Shape& output_shape, int64_t axis)
{
auto value_shape = value.get_shape();
// If node already has the required shape, return original node
if (output_shape == value_shape)
{
return value.as_single_output_node();
}
if (axis == -1)
{
axis = output_shape.size() - value_shape.size();
}
auto trimmed_value_shape = value_shape;
while (trimmed_value_shape.size() > 0 && trimmed_value_shape.back() == 1)
{
trimmed_value_shape.pop_back();
}
ngraph::AxisSet axes;
for (int64_t i = 0; i < axis; ++i)
{
axes.insert(static_cast<size_t>(i));
}
for (size_t i = axis + trimmed_value_shape.size(); i < output_shape.size(); ++i)
{
axes.insert(i);
}
auto trimmed_value = value;
if (value_shape != trimmed_value_shape)
{
trimmed_value = std::make_shared<ngraph::op::Reshape>(
value, ngraph::get_default_order(value_shape), trimmed_value_shape);
}
auto value_bcast = std::make_shared<ngraph::op::Broadcast>(trimmed_value, output_shape, axes);
return std::move(value_bcast);
}
namespace ngraph
{
namespace op
{
OutputVector numpy_style_broadcast_values(const OutputVector& values)
{
if (values.size() <= 1)
{
return values;
}
// find the output tensor's shape, then broadcast all inputs so that they are compatible
auto bcast_shapes = get_numpy_broadcast_shapes(values);
OutputVector broadcasted_inputs;
for (std::size_t i = 0; i < values.size(); ++i)
{
broadcasted_inputs.push_back(broadcast_node_numpy_style(
values[i], bcast_shapes.first, bcast_shapes.second[i]));
}
return broadcasted_inputs;
}
NodeVector numpy_style_broadcast(const NodeVector& inputs)
{
if (inputs.size() <= 1)
{
return inputs;
}
// find the output tensor's shape, then broadcast all inputs so that they are compatible
auto bcast_shapes = get_numpy_broadcast_shapes(inputs);
NodeVector broadcasted_inputs;
for (std::size_t i = 0; i < inputs.size(); ++i)
{
broadcasted_inputs.push_back(broadcast_node_numpy_style(
inputs[i], bcast_shapes.first, bcast_shapes.second[i]));
}
return broadcasted_inputs;
}
std::shared_ptr<ngraph::Node> numpy_style_broadcast(const Output<ngraph::Node>& value,
const Shape& shape)
{
auto bcast_shape = get_numpy_broadcast_shapes({value.get_shape(), shape});
return broadcast_node_numpy_style(value, bcast_shape.first, bcast_shape.second[0]);
}
OutputVector numpy_style_broadcast_for_matmul_operation(const Output<ngraph::Node>& left,
const Output<ngraph::Node>& right)
{
const auto& left_shape = left.get_shape();
const auto& right_shape = right.get_shape();
// Broadcast only _stack of matrices_ axes.
const auto& numpy_shapes = get_numpy_broadcast_shapes(
{Shape{std::begin(left_shape), std::next(std::end(left_shape), -2)},
Shape{std::begin(right_shape), std::next(std::end(right_shape), -2)}});
// Prepare tensors output shapes with broadcasted _stack of matrices_ axes.
auto left_output_shape = numpy_shapes.first;
auto right_output_shape = numpy_shapes.first;
// Append the last two axes original dimensions.
left_output_shape.insert(std::end(left_output_shape),
std::next(std::begin(left_shape), left_shape.size() - 2),
std::end(left_shape));
right_output_shape.insert(std::end(right_output_shape),
std::next(std::begin(right_shape), right_shape.size() - 2),
std::end(right_shape));
auto left_full_shape = numpy_shapes.second.at(0);
auto right_full_shape = numpy_shapes.second.at(1);
// Append the last two axes original dimensions.
left_full_shape.insert(std::end(left_full_shape),
std::next(std::begin(left_shape), left_shape.size() - 2),
std::end(left_shape));
right_full_shape.insert(std::end(right_full_shape),
std::next(std::begin(right_shape), right_shape.size() - 2),
std::end(right_shape));
return {broadcast_node_numpy_style(left, left_output_shape, left_full_shape),
broadcast_node_numpy_style(right, right_output_shape, right_full_shape)};
}
OutputVector legacy_style_broadcast_for_binary_operation(const Output<ngraph::Node>& left,
const Output<ngraph::Node>& right,
size_t start_match_axis)
{
const auto& left_shape = left.get_shape();
const auto& right_shape = right.get_shape();
bool dimensions_identical = (left_shape == right_shape);
if (dimensions_identical)
{
return {left, right};
}
// Prepare new shape of right operand for broadcasting
// Remove dimensions with length=1 from back
auto new_right_shape = right_shape;
for (int dimension = new_right_shape.size() - 1; dimension >= 0; --dimension)
{
if (new_right_shape[dimension] == 1)
{
new_right_shape.pop_back();
}
else
{
break;
}
}
// Find first dimensions at front with length different from 1
std::size_t num_ones = 0;
for (std::size_t dimension : new_right_shape)
{
if (dimension == 1)
{
++num_ones;
}
else
{
break;
}
}
// Remove dimensions with length=1 from front
new_right_shape.erase(std::begin(new_right_shape),
std::next(std::begin(new_right_shape), num_ones));
auto reshape_right = std::make_shared<ngraph::op::Reshape>(
right, ngraph::get_default_order(right_shape), new_right_shape);
// Move broadcast start axis parameter to right
start_match_axis += num_ones;
auto broadcast_right = std::make_shared<ngraph::op::Broadcast>(
reshape_right,
left_shape,
calculate_broadcast_axes(left_shape, new_right_shape, start_match_axis));
return {left, broadcast_right};
}
NodeVector pdpd_style_broadcast(const NodeVector& inputs, int64_t axis)
{
if (inputs.size() <= 1)
{
return inputs;
}
NodeVector broadcasted_inputs{inputs[0]};
for (std::size_t i = 1; i < inputs.size(); ++i)
{
broadcasted_inputs.push_back(
broadcast_value_pdpd_style(inputs[i], inputs[0]->get_shape(), axis));
}
return broadcasted_inputs;
}
OutputVector pdpd_style_broadcast(const OutputVector& inputs, int64_t axis)
{
if (inputs.size() <= 1)
{
return inputs;
}
OutputVector broadcasted_inputs{inputs[0]};
for (std::size_t i = 1; i < inputs.size(); ++i)
{
broadcasted_inputs.push_back(
broadcast_value_pdpd_style(inputs[i], inputs[0].get_shape(), axis));
}
return broadcasted_inputs;
}
AxisSet calculate_broadcast_axes(const Shape& output_shape,
const Shape& input_shape,
std::size_t start_match_axis)
{
std::vector<std::size_t> result(output_shape.size() - input_shape.size());
// Populate the result vector with monotonic increasing series from 0 until
// output_shape_size, excluding values in range:
// [start_match_axis, start_match_axis + input_shape.size()]
std::iota(std::begin(result), std::begin(result) + start_match_axis, 0);
std::iota(std::begin(result) + start_match_axis,
std::end(result),
start_match_axis + input_shape.size());
return result;
}
namespace opset1
{
Output<Node> legacy_style_broadcast_for_binary_operation(const Output<Node>& left,
const Output<Node>& right,
size_t start_match_axis)
{
const auto& left_shape = left.get_shape();
const auto& right_shape = right.get_shape();
bool dimensions_identical = (left_shape == right_shape);
if (dimensions_identical)
{
return right;
}
// Prepare new shape of right operand for broadcasting
// Remove dimensions with length=1 from back
auto new_right_shape = right_shape;
for (int dimension = new_right_shape.size() - 1; dimension >= 0; --dimension)
{
if (new_right_shape.at(dimension) == 1)
{
new_right_shape.pop_back();
}
else
{
break;
}
}
// Find first dimensions at front with length different from 1
std::size_t num_ones = 0;
for (std::size_t dimension : new_right_shape)
{
if (dimension == 1)
{
++num_ones;
}
else
{
break;
}
}
// Remove dimensions with length=1 from front
new_right_shape.erase(std::begin(new_right_shape),
std::next(std::begin(new_right_shape), num_ones));
auto reshape_right = std::make_shared<Reshape>(
right, ngraph::get_default_order(right_shape), new_right_shape);
// Move broadcast start axis parameter to right
start_match_axis += num_ones;
auto broadcasted_right = std::make_shared<v1::Broadcast>(
reshape_right,
Constant::create(element::i64, Shape{left_shape.size()}, left_shape),
get_axes_mapping_output(left_shape, new_right_shape, start_match_axis));
return broadcasted_right;
}
std::vector<std::size_t> get_axes_mapping(const Shape& output_shape,
const AxisSet& broadcast_axes)
{
NGRAPH_CHECK((broadcast_axes.size() <= output_shape.size()));
std::vector<size_t> axes_mapping(output_shape.size());
std::iota(axes_mapping.begin(), axes_mapping.end(), 0);
for (auto i = broadcast_axes.rbegin(); i != broadcast_axes.rend(); ++i)
{
axes_mapping.erase(axes_mapping.begin() + *i);
}
return axes_mapping;
}
Output<Node> get_axes_mapping_output(const Shape& output_shape,
const Shape& input_shape,
std::size_t start_match_axis)
{
NGRAPH_CHECK((input_shape.size() + start_match_axis <= output_shape.size()));
std::vector<std::size_t> mapping(input_shape.size());
std::iota(std::begin(mapping), std::end(mapping), start_match_axis);
return Constant::create(element::i64, Shape{mapping.size()}, mapping);
}
Output<Node> get_axes_mapping_output(const Shape& output_shape,
const AxisSet& broadcast_axes)
{
std::vector<size_t> axes_mapping{get_axes_mapping(output_shape, broadcast_axes)};
return Constant::create(element::i64, Shape{axes_mapping.size()}, axes_mapping);
}
Output<Node> make_broadcast(const Output<Node>& node,
const Shape& target_shape,
const AxisSet& broadcast_axes)
{
return std::make_shared<v1::Broadcast>(
node,
Constant::create(element::i64, Shape{target_shape.size()}, target_shape),
get_axes_mapping_output(target_shape, broadcast_axes));
}
Output<Node> make_broadcast(const Output<Node>& node,
const Shape& target_shape,
std::size_t start_match_axis)
{
return std::make_shared<v1::Broadcast>(
node,
Constant::create(element::i64, Shape{target_shape.size()}, target_shape),
get_axes_mapping_output(target_shape, node.get_shape(), start_match_axis));
}
} // namespace opset1
} // namespace op
} // namespace ngraph
//*****************************************************************************
// Copyright 2017-2020 Intel Corporation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//*****************************************************************************
#pragma once
#include <cstddef>
#include <memory>
#include <vector>
#include "ngraph/axis_set.hpp"
#include "ngraph/node.hpp"
#include "ngraph/op/broadcast.hpp"
#include "ngraph/shape.hpp"
namespace ngraph
{
namespace op
{
/// \brief Cast shape of all input nodes for an element-wise operation that requires
/// shape-compatibility
///
/// \param inputs Original list of inputs
///
/// \return Numpy-style broadcasted list of nodes.
NodeVector numpy_style_broadcast(const NodeVector& inputs)
NGRAPH_DEPRECATED("Replace with numpy_style_broadcast_values");
/// \brief Cast shape of all input nodes for an element-wise operation that requires
/// shape-compatibility
///
/// \param values Original list of inputs
///
/// \return Numpy-style broadcasted list of nodes.
OutputVector numpy_style_broadcast_values(const OutputVector& values);
/// \brief Cast shape of an output to the requested output shape using NumPy's broadcasting
/// rules
///
/// \param value original value
/// \param shape requested output shape
///
/// \return Broadcast output.
std::shared_ptr<Node> numpy_style_broadcast(const Output<Node>& value, const Shape& shape);
/// \brief Cast shape of two outputs to make them compatible for an element-wise binary
/// operation.
///
/// If necessary the right-hand-side argument will be broadcast to match the shape
/// of left-hand-side argument. The starting of the mutually equal shape is
/// specified by the argument "start_match_axis", and if it is not set,
/// suffix matching is assumed.
///
/// This style of broadcast was used in ONNX Op sets prior to version 7, where it was
/// replaced by numpy-style broadcasting.
///
/// \param left Node which contain input of binary op.
/// \param right Node which contain input of binary op.
/// \param start_match_axis position in shape denoting start of the mutually equal shape
///
/// \return Left and right node after broadcasting.
OutputVector legacy_style_broadcast_for_binary_operation(const Output<Node>& left,
const Output<Node>& right,
size_t start_match_axis);
/// \brief Broadcast shape of two nodes to make them compatible for a matrix
/// multiplication.
///
/// \note This function is reflecting broadcasting behaviour of NumPy's `matmul`
/// operation.
/// (https://docs.scipy.org/doc/numpy/reference/generated/numpy.matmul.html)
/// This mean that only \"stack of matrices\" axes are bidirectionally
/// broadcasted. The last two dimension are left untouched.
///
/// \param[in] left The Node providing data for the left-hand side of matrix
/// multiplication.
/// \param[in] right The Node providing data for the right-hand side of matrix
/// multiplication.
///
/// \return The vector containing both outputs broadcasted.
///
OutputVector numpy_style_broadcast_for_matmul_operation(const Output<Node>& left,
const Output<Node>& right);
/// \brief Cast shape of all input nodes for an element-wise operation that requires
/// shape-compatibility
///
/// \param inputs Original list of inputs
/// \param axis Index starting to align
///
/// \return pdpd-style broadcasted list of nodes.
NodeVector pdpd_style_broadcast(const NodeVector& inputs, int64_t axis);
OutputVector pdpd_style_broadcast(const OutputVector& inputs, int64_t axis);
/// \brief Generate a list of broadcast axes.
///
/// \details Informally, a broadcast "adds" axes to the input tensor, replicating
/// elements from the input tensor as needed to fill the new dimensions.
/// Function calculate which of the output axes are added in this way.
///
/// \param output_shape The new shape for the output tensor.
/// \param input_shape The shape of input tensor.
/// \param start_match_axis The axis along which we want to replicate elements.
/// The starting axis position (0-based) int the output
/// shape from which the current shape of the tensor
/// matches the desired new shape.
///
/// \return The indices of added axes.
AxisSet calculate_broadcast_axes(const Shape& output_shape,
const Shape& input_shape,
std::size_t start_match_axis);
/// \brief Generate a list of broadcast along axes.
///
/// \details Broadcast "adds" elements along axes to the input tensor, replicating
/// elements from the input tensor as needed to fill the new dimensions.
/// Function calculate which of the output axes are added in this way.
///
/// This function will attempt to match shapes, assuming the current shape
/// matches the rightmost positions of the desired new shape. This behaviour
/// is similar to NumPy's broadcasting.
///
/// \param output_shape The new shape for the output tensor.
/// \param input_shape The shape of input tensor.
///
/// \return The indices of added axes.
inline AxisSet calculate_broadcast_axes(const Shape& output_shape, const Shape& input_shape)
{
return calculate_broadcast_axes(
output_shape, input_shape, output_shape.size() - input_shape.size());
}
inline std::shared_ptr<Node> make_broadcast_node(const Output<Node>& output,
Shape new_shape)
{
return std::make_shared<op::Broadcast>(
output, new_shape, calculate_broadcast_axes(new_shape, output.get_shape()));
}
inline std::shared_ptr<Node> make_broadcast_node(const Output<Node>& value,
const Shape& new_shape,
std::size_t start_match_axis)
{
return std::make_shared<op::Broadcast>(
value,
new_shape,
calculate_broadcast_axes(new_shape, value.get_shape(), start_match_axis));
}
namespace opset1
{
///
/// \brief Broadcast right node to left node's shape using legacy scheme.
///
/// \param[in] left The left hand side node of binary operation.
/// \param[in] right The right hand side node of binary operation. The one
/// to be broadcasted.
/// \param[in] start_match_axis The axis index starting mutually equal shapes
/// of both nodes.
///
/// \return The Output object connected to node producing broadcasted right node.
///
Output<Node> legacy_style_broadcast_for_binary_operation(const Output<Node>& left,
const Output<Node>& right,
size_t start_match_axis);
///
/// \brief Reconstructs axes mapping vector for Broadcast:v1 operation.
///
/// \param[in] output_shape The output shape of Broadcast operation.
/// \param[in] broadcast_axes The broadcast axes used for Broadcast:v0 operator.
///
/// \return The vector with axes indexes mapping .
///
std::vector<std::size_t> get_axes_mapping(const Shape& output_shape,
const AxisSet& broadcast_axes);
///
/// \brief Creates Node returning the axes mapping for Broadcast:v1 operation.
///
/// \param[in] output_shape The output shape of Broadcast operation.
/// \param[in] input_shape The input shape.
/// \param[in] start_match_axis The axis index at which input shape starts to be
/// identical as the output shape.
///
/// \return Returns the Output object pointing to node with the axes mapping.
///
Output<Node> get_axes_mapping_output(const Shape& output_shape,
const Shape& input_shape,
std::size_t start_match_axis);
///
/// \brief Creates Node returning the axes mapping for Broadcast:v1 operation.
///
/// \param[in] output_shape The output shape of Broadcast operation.
/// \param[in] broadcast_axes The broadcast axes used for Broadcast:v0 operator.
///
/// \return The Output object with Node returning axes mapping.
///
Output<Node> get_axes_mapping_output(const Shape& output_shape,
const AxisSet& broadcast_axes);
Output<Node> make_broadcast(const Output<Node>& node,
const Shape& target_shape,
const AxisSet& broadcast_axes);
Output<Node> make_broadcast(const Output<Node>& node,
const Shape& target_shape,
std::size_t start_match_axis);
} // namespace opset1
} // namespace op
} // namespace ngraph
...@@ -21,7 +21,6 @@ ...@@ -21,7 +21,6 @@
#include "ngraph/op/fused/clamp.hpp" #include "ngraph/op/fused/clamp.hpp"
#include "ngraph/op/multiply.hpp" #include "ngraph/op/multiply.hpp"
#include "ngraph/op/subtract.hpp" #include "ngraph/op/subtract.hpp"
#include "ngraph/op/util/broadcasting.hpp"
#include "ngraph/op/util/rnn_cell_base.hpp" #include "ngraph/op/util/rnn_cell_base.hpp"
#include "ngraph/util.hpp" #include "ngraph/util.hpp"
...@@ -68,20 +67,19 @@ op::util::ActivationFunction op::util::RNNCellBase::get_activation_function(size ...@@ -68,20 +67,19 @@ op::util::ActivationFunction op::util::RNNCellBase::get_activation_function(size
shared_ptr<Node> op::util::RNNCellBase::add(const Output<Node>& lhs, const Output<Node>& rhs) shared_ptr<Node> op::util::RNNCellBase::add(const Output<Node>& lhs, const Output<Node>& rhs)
{ {
auto args = op::numpy_style_broadcast_values({lhs, rhs}); return {make_shared<op::Add>(lhs, rhs, op::AutoBroadcastSpec(op::AutoBroadcastType::NUMPY))};
return {make_shared<op::Add>(args.at(0), args.at(1))};
} }
shared_ptr<Node> op::util::RNNCellBase::sub(const Output<Node>& lhs, const Output<Node>& rhs) shared_ptr<Node> op::util::RNNCellBase::sub(const Output<Node>& lhs, const Output<Node>& rhs)
{ {
auto args = op::numpy_style_broadcast_values({lhs, rhs}); return {
return {make_shared<op::Subtract>(args.at(0), args.at(1))}; make_shared<op::Subtract>(lhs, rhs, op::AutoBroadcastSpec(op::AutoBroadcastType::NUMPY))};
} }
shared_ptr<Node> op::util::RNNCellBase::mul(const Output<Node>& lhs, const Output<Node>& rhs) shared_ptr<Node> op::util::RNNCellBase::mul(const Output<Node>& lhs, const Output<Node>& rhs)
{ {
auto args = op::numpy_style_broadcast_values({lhs, rhs}); return {
return {make_shared<op::Multiply>(args.at(0), args.at(1))}; make_shared<op::Multiply>(lhs, rhs, op::AutoBroadcastSpec(op::AutoBroadcastType::NUMPY))};
} }
shared_ptr<Node> op::util::RNNCellBase::clip(const Output<Node>& data) const shared_ptr<Node> op::util::RNNCellBase::clip(const Output<Node>& data) const
......
...@@ -16,6 +16,7 @@ ...@@ -16,6 +16,7 @@
#include "ngraph/pass/implicit_broadcast_elimination.hpp" #include "ngraph/pass/implicit_broadcast_elimination.hpp"
#include "ngraph/builder/autobroadcast.hpp"
#include "ngraph/graph_util.hpp" #include "ngraph/graph_util.hpp"
#include "ngraph/op/util/binary_elementwise_arithmetic.hpp" #include "ngraph/op/util/binary_elementwise_arithmetic.hpp"
#include "ngraph/op/util/binary_elementwise_comparison.hpp" #include "ngraph/op/util/binary_elementwise_comparison.hpp"
...@@ -24,7 +25,7 @@ ...@@ -24,7 +25,7 @@
using namespace std; using namespace std;
using namespace ngraph; using namespace ngraph;
bool ngraph::pass::ImplicitBroadcastElimination::run_on_node(std::shared_ptr<ngraph::Node> node) bool ngraph::pass::ImplicitBroadcastElimination::run_on_node(std::shared_ptr<Node> node)
{ {
if (node->supports_auto_broadcast()) if (node->supports_auto_broadcast())
{ {
...@@ -53,11 +54,11 @@ NodeVector ngraph::pass::explicit_broadcast(std::shared_ptr<Node>& node) ...@@ -53,11 +54,11 @@ NodeVector ngraph::pass::explicit_broadcast(std::shared_ptr<Node>& node)
} }
else if (autob.m_type == op::AutoBroadcastType::NUMPY) else if (autob.m_type == op::AutoBroadcastType::NUMPY)
{ {
rc = op::numpy_style_broadcast(node->get_arguments()); rc = as_node_vector(builder::numpy_broadcast_outputs(node->input_values()));
} }
else if (autob.m_type == op::AutoBroadcastType::PDPD) else if (autob.m_type == op::AutoBroadcastType::PDPD)
{ {
rc = op::pdpd_style_broadcast(node->get_arguments(), autob.m_axis); rc = as_node_vector(builder::pdpd_broadcast(node->input_values(), autob.m_axis));
} }
else else
{ {
......
...@@ -16,7 +16,7 @@ ...@@ -16,7 +16,7 @@
#pragma once #pragma once
#include "ngraph/op/util/broadcasting.hpp" #include "ngraph/node.hpp"
#include "ngraph/pass/pass.hpp" #include "ngraph/pass/pass.hpp"
namespace ngraph namespace ngraph
......
...@@ -19,11 +19,11 @@ ...@@ -19,11 +19,11 @@
#include <functional> #include <functional>
#include <numeric> #include <numeric>
#include "ngraph/builder/autobroadcast.hpp"
#include "ngraph/builder/reshape.hpp" #include "ngraph/builder/reshape.hpp"
#include "ngraph/graph_util.hpp" #include "ngraph/graph_util.hpp"
#include "ngraph/node.hpp" #include "ngraph/node.hpp"
#include "ngraph/op/util/attr_types.hpp" #include "ngraph/op/util/attr_types.hpp"
#include "ngraph/op/util/broadcasting.hpp"
#include "ngraph/ops.hpp" #include "ngraph/ops.hpp"
#include "ngraph/pass/implicit_broadcast_elimination.hpp" #include "ngraph/pass/implicit_broadcast_elimination.hpp"
#include "ngraph/pass/opset0_downgrade.hpp" #include "ngraph/pass/opset0_downgrade.hpp"
...@@ -157,7 +157,7 @@ namespace ...@@ -157,7 +157,7 @@ namespace
// (Re)construct axes_mapping. // (Re)construct axes_mapping.
AxisSet broadcast_axes = node->get_broadcast_axes().second; AxisSet broadcast_axes = node->get_broadcast_axes().second;
std::vector<size_t> axes_mapping{ std::vector<size_t> axes_mapping{
ngraph::op::opset1::get_axes_mapping(target_shape, broadcast_axes)}; ngraph::builder::opset1::get_axes_mapping(target_shape, broadcast_axes)};
Output<Node> squeezed_arg = arg; Output<Node> squeezed_arg = arg;
// Collect axes to squeeze. Broadcast v0 "adds" new axes, thus we have to squeeze // Collect axes to squeeze. Broadcast v0 "adds" new axes, thus we have to squeeze
...@@ -536,10 +536,10 @@ namespace ...@@ -536,10 +536,10 @@ namespace
shared_ptr<Node> op_cast(shared_ptr<op::v1::OneHot> node) shared_ptr<Node> op_cast(shared_ptr<op::v1::OneHot> node)
{ {
const auto indices = node->input_value(0).get_node_shared_ptr(); const auto indices = node->input_value(0);
const auto depth = node->input_value(1).get_node_shared_ptr(); const auto depth = node->input_value(1).get_node_shared_ptr();
auto on_value = node->input_value(2).get_node_shared_ptr(); auto on_value = node->input_value(2);
auto off_value = node->input_value(3).get_node_shared_ptr(); auto off_value = node->input_value(3);
const auto axis = node->get_axis(); const auto axis = node->get_axis();
NGRAPH_CHECK(depth->is_constant(), "depth input must be constant", *node); NGRAPH_CHECK(depth->is_constant(), "depth input must be constant", *node);
...@@ -549,9 +549,9 @@ namespace ...@@ -549,9 +549,9 @@ namespace
auto one_hot = std::make_shared<ngraph::op::Convert>( auto one_hot = std::make_shared<ngraph::op::Convert>(
std::make_shared<ngraph::op::OneHot>(indices, output_shape, axis), std::make_shared<ngraph::op::OneHot>(indices, output_shape, axis),
on_value->get_element_type()); on_value.get_element_type());
auto broadcasted_values = op::numpy_style_broadcast({one_hot, on_value, off_value}); auto broadcasted_values = builder::numpy_broadcast_outputs({one_hot, on_value, off_value});
on_value = broadcasted_values[1]; on_value = broadcasted_values[1];
off_value = broadcasted_values[2]; off_value = broadcasted_values[2];
......
...@@ -13,17 +13,17 @@ ...@@ -13,17 +13,17 @@
// See the License for the specific language governing permissions and // See the License for the specific language governing permissions and
// limitations under the License. // limitations under the License.
//***************************************************************************** //*****************************************************************************
#include "ngraph/pass/opset1_upgrade.hpp"
#include <functional> #include <functional>
#include <iterator> #include <iterator>
#include <limits> #include <limits>
#include <numeric> #include <numeric>
#include "ngraph/builder/autobroadcast.hpp"
#include "ngraph/builder/reshape.hpp" #include "ngraph/builder/reshape.hpp"
#include "ngraph/graph_util.hpp" #include "ngraph/graph_util.hpp"
#include "ngraph/op/util/broadcasting.hpp"
#include "ngraph/ops.hpp" #include "ngraph/ops.hpp"
#include "ngraph/pass/opset1_upgrade.hpp"
#include "ngraph/provenance.hpp" #include "ngraph/provenance.hpp"
using namespace std; using namespace std;
...@@ -108,7 +108,7 @@ namespace ...@@ -108,7 +108,7 @@ namespace
shared_ptr<Node> op_cast(shared_ptr<op::Broadcast> node) shared_ptr<Node> op_cast(shared_ptr<op::Broadcast> node)
{ {
auto replacement_node = ngraph::op::opset1::make_broadcast( auto replacement_node = ngraph::builder::opset1::make_broadcast(
node->input_value(0), node->get_broadcast_shape(), node->get_broadcast_axes()); node->input_value(0), node->get_broadcast_shape(), node->get_broadcast_axes());
replace_node(node, replacement_node.get_node_shared_ptr()); replace_node(node, replacement_node.get_node_shared_ptr());
return replacement_node.get_node_shared_ptr(); return replacement_node.get_node_shared_ptr();
......
...@@ -21,12 +21,12 @@ ...@@ -21,12 +21,12 @@
using namespace std; using namespace std;
using namespace ngraph; using namespace ngraph;
std::shared_ptr<ngraph::op::Parameter> getParamFromShape(const ngraph::Shape& shape) shared_ptr<op::Parameter> getParamFromShape(const Shape& shape)
{ {
return std::make_shared<ngraph::op::Parameter>(ngraph::element::f32, shape); return make_shared<op::Parameter>(element::f32, shape);
} }
inline const ngraph::Shape& getShapeFromParam(const shared_ptr<ngraph::Node>& node) inline const Shape& getShapeFromParam(const shared_ptr<Node>& node)
{ {
return node->get_shape(); return node->get_shape();
} }
...@@ -34,11 +34,11 @@ inline const ngraph::Shape& getShapeFromParam(const shared_ptr<ngraph::Node>& no ...@@ -34,11 +34,11 @@ inline const ngraph::Shape& getShapeFromParam(const shared_ptr<ngraph::Node>& no
// input shapes are equal so AutoBroadcast does nothing // input shapes are equal so AutoBroadcast does nothing
TEST(autobroadcast, no_broadcast_equal) TEST(autobroadcast, no_broadcast_equal)
{ {
ngraph::Shape s2345{2, 3, 4, 5}; Shape s2345{2, 3, 4, 5};
auto lhs = getParamFromShape(s2345); auto lhs = getParamFromShape(s2345);
auto rhs = getParamFromShape(s2345); auto rhs = getParamFromShape(s2345);
auto shaped = ngraph::builder::numpy_broadcast({lhs, rhs}); auto shaped = builder::numpy_broadcast({lhs, rhs});
const shared_ptr<Node>& ab_lhs = shaped.first; const shared_ptr<Node>& ab_lhs = shaped.first;
const shared_ptr<Node>& ab_rhs = shaped.second; const shared_ptr<Node>& ab_rhs = shaped.second;
...@@ -52,13 +52,13 @@ TEST(autobroadcast, no_broadcast_equal) ...@@ -52,13 +52,13 @@ TEST(autobroadcast, no_broadcast_equal)
// input shapes are incompatable // input shapes are incompatable
TEST(autobroadcast, no_broadcast_incompatable) TEST(autobroadcast, no_broadcast_incompatable)
{ {
ngraph::Shape s2345{2, 3, 4, 5}; Shape s2345{2, 3, 4, 5};
ngraph::Shape s6789{6, 7, 8, 9}; Shape s6789{6, 7, 8, 9};
auto lhs = getParamFromShape(s2345); auto lhs = getParamFromShape(s2345);
auto rhs = getParamFromShape(s6789); auto rhs = getParamFromShape(s6789);
EXPECT_THROW(ngraph::builder::numpy_broadcast({lhs, rhs}), EXPECT_THROW(builder::numpy_broadcast({lhs, rhs}),
ngraph::builder::autobroadcast_incompatible_shapes); builder::numpy_autobroadcast_incompatible_shapes);
} }
// basic broadcast test // basic broadcast test
...@@ -66,12 +66,12 @@ TEST(autobroadcast, no_broadcast_incompatable) ...@@ -66,12 +66,12 @@ TEST(autobroadcast, no_broadcast_incompatable)
// lhs broadcast to 2,3 // lhs broadcast to 2,3
TEST(autobroadcast, normal_broadcast_2d) TEST(autobroadcast, normal_broadcast_2d)
{ {
ngraph::Shape s3{3}; Shape s3{3};
ngraph::Shape s23{2, 3}; Shape s23{2, 3};
auto lhs = getParamFromShape(s3); auto lhs = getParamFromShape(s3);
auto rhs = getParamFromShape(s23); auto rhs = getParamFromShape(s23);
auto shaped = ngraph::builder::numpy_broadcast({lhs, rhs}); auto shaped = builder::numpy_broadcast({lhs, rhs});
const shared_ptr<Node>& ab_lhs = shaped.first; const shared_ptr<Node>& ab_lhs = shaped.first;
const shared_ptr<Node>& ab_rhs = shaped.second; const shared_ptr<Node>& ab_rhs = shaped.second;
...@@ -87,12 +87,12 @@ TEST(autobroadcast, normal_broadcast_2d) ...@@ -87,12 +87,12 @@ TEST(autobroadcast, normal_broadcast_2d)
// lhs broadcast to 2,3,4 // lhs broadcast to 2,3,4
TEST(autobroadcast, normal_broadcast_3d) TEST(autobroadcast, normal_broadcast_3d)
{ {
ngraph::Shape s34{3, 4}; Shape s34{3, 4};
ngraph::Shape s234{2, 3, 4}; Shape s234{2, 3, 4};
auto lhs = getParamFromShape(s34); auto lhs = getParamFromShape(s34);
auto rhs = getParamFromShape(s234); auto rhs = getParamFromShape(s234);
auto shaped = ngraph::builder::numpy_broadcast({lhs, rhs}); auto shaped = builder::numpy_broadcast({lhs, rhs});
const shared_ptr<Node>& ab_lhs = shaped.first; const shared_ptr<Node>& ab_lhs = shaped.first;
const shared_ptr<Node>& ab_rhs = shaped.second; const shared_ptr<Node>& ab_rhs = shaped.second;
...@@ -108,12 +108,12 @@ TEST(autobroadcast, normal_broadcast_3d) ...@@ -108,12 +108,12 @@ TEST(autobroadcast, normal_broadcast_3d)
// lhs broadcast to 2,3,4,5 // lhs broadcast to 2,3,4,5
TEST(autobroadcast, normal_broadcast_4d) TEST(autobroadcast, normal_broadcast_4d)
{ {
ngraph::Shape s345{3, 4, 5}; Shape s345{3, 4, 5};
ngraph::Shape s2345{2, 3, 4, 5}; Shape s2345{2, 3, 4, 5};
auto lhs = getParamFromShape(s345); auto lhs = getParamFromShape(s345);
auto rhs = getParamFromShape(s2345); auto rhs = getParamFromShape(s2345);
auto shaped = ngraph::builder::numpy_broadcast({lhs, rhs}); auto shaped = builder::numpy_broadcast({lhs, rhs});
const shared_ptr<Node>& ab_lhs = shaped.first; const shared_ptr<Node>& ab_lhs = shaped.first;
const shared_ptr<Node>& ab_rhs = shaped.second; const shared_ptr<Node>& ab_rhs = shaped.second;
...@@ -129,12 +129,12 @@ TEST(autobroadcast, normal_broadcast_4d) ...@@ -129,12 +129,12 @@ TEST(autobroadcast, normal_broadcast_4d)
// rhs broadcast to 2,3,4,5 // rhs broadcast to 2,3,4,5
TEST(autobroadcast, reshape_1x_broadcast) TEST(autobroadcast, reshape_1x_broadcast)
{ {
ngraph::Shape s2345{2, 3, 4, 5}; Shape s2345{2, 3, 4, 5};
ngraph::Shape s2341{2, 3, 4, 1}; Shape s2341{2, 3, 4, 1};
auto lhs = getParamFromShape(s2345); auto lhs = getParamFromShape(s2345);
auto rhs = getParamFromShape(s2341); auto rhs = getParamFromShape(s2341);
auto shaped = ngraph::builder::numpy_broadcast({lhs, rhs}); auto shaped = builder::numpy_broadcast({lhs, rhs});
const shared_ptr<Node>& ab_lhs = shaped.first; const shared_ptr<Node>& ab_lhs = shaped.first;
const shared_ptr<Node>& ab_rhs = shaped.second; const shared_ptr<Node>& ab_rhs = shaped.second;
...@@ -150,16 +150,16 @@ TEST(autobroadcast, reshape_1x_broadcast) ...@@ -150,16 +150,16 @@ TEST(autobroadcast, reshape_1x_broadcast)
// lhs broadcast to 2,3,4,5 // lhs broadcast to 2,3,4,5
TEST(autobroadcast, reshape_2x_broadcast) TEST(autobroadcast, reshape_2x_broadcast)
{ {
ngraph::Shape s2145{2, 1, 4, 5}; Shape s2145{2, 1, 4, 5};
ngraph::Shape s2341{2, 3, 4, 1}; Shape s2341{2, 3, 4, 1};
auto lhs = getParamFromShape(s2145); auto lhs = getParamFromShape(s2145);
auto rhs = getParamFromShape(s2341); auto rhs = getParamFromShape(s2341);
auto shaped = ngraph::builder::numpy_broadcast({lhs, rhs}); auto shaped = builder::numpy_broadcast({lhs, rhs});
const shared_ptr<Node>& ab_lhs = shaped.first; const shared_ptr<Node>& ab_lhs = shaped.first;
const shared_ptr<Node>& ab_rhs = shaped.second; const shared_ptr<Node>& ab_rhs = shaped.second;
ngraph::Shape s2345{2, 3, 4, 5}; Shape s2345{2, 3, 4, 5};
EXPECT_NE(ab_lhs, lhs); EXPECT_NE(ab_lhs, lhs);
EXPECT_EQ(getShapeFromParam(ab_lhs), s2345); EXPECT_EQ(getShapeFromParam(ab_lhs), s2345);
...@@ -174,12 +174,12 @@ TEST(autobroadcast, reshape_2x_broadcast) ...@@ -174,12 +174,12 @@ TEST(autobroadcast, reshape_2x_broadcast)
// rhs broadcast to 2,3,1,5 // rhs broadcast to 2,3,1,5
TEST(autobroadcast, broadcast_with_dim1) TEST(autobroadcast, broadcast_with_dim1)
{ {
ngraph::Shape s2315{2, 3, 1, 5}; Shape s2315{2, 3, 1, 5};
ngraph::Shape s315{3, 1, 5}; Shape s315{3, 1, 5};
auto lhs = getParamFromShape(s2315); auto lhs = getParamFromShape(s2315);
auto rhs = getParamFromShape(s315); auto rhs = getParamFromShape(s315);
auto shaped = ngraph::builder::numpy_broadcast({lhs, rhs}); auto shaped = builder::numpy_broadcast({lhs, rhs});
const shared_ptr<Node>& ab_lhs = shaped.first; const shared_ptr<Node>& ab_lhs = shaped.first;
const shared_ptr<Node>& ab_rhs = shaped.second; const shared_ptr<Node>& ab_rhs = shaped.second;
...@@ -194,12 +194,12 @@ TEST(autobroadcast, broadcast_with_dim1) ...@@ -194,12 +194,12 @@ TEST(autobroadcast, broadcast_with_dim1)
// rhs reshape to 1,3,4,5 with no broadcast // rhs reshape to 1,3,4,5 with no broadcast
TEST(autobroadcast, broadcast_with_leading_dim1) TEST(autobroadcast, broadcast_with_leading_dim1)
{ {
ngraph::Shape s1345{1, 3, 4, 5}; Shape s1345{1, 3, 4, 5};
ngraph::Shape s345{3, 4, 5}; Shape s345{3, 4, 5};
auto lhs = getParamFromShape(s1345); auto lhs = getParamFromShape(s1345);
auto rhs = getParamFromShape(s345); auto rhs = getParamFromShape(s345);
auto shaped = ngraph::builder::numpy_broadcast({lhs, rhs}); auto shaped = builder::numpy_broadcast({lhs, rhs});
const shared_ptr<Node>& ab_lhs = shaped.first; const shared_ptr<Node>& ab_lhs = shaped.first;
const shared_ptr<Node>& ab_rhs = shaped.second; const shared_ptr<Node>& ab_rhs = shaped.second;
...@@ -212,25 +212,345 @@ TEST(autobroadcast, broadcast_with_leading_dim1) ...@@ -212,25 +212,345 @@ TEST(autobroadcast, broadcast_with_leading_dim1)
TEST(autobroadcast, make_node_2_args) TEST(autobroadcast, make_node_2_args)
{ {
ngraph::Shape s21{2, 1}; Shape s21{2, 1};
ngraph::Shape s23{2, 3}; Shape s23{2, 3};
auto lhs = getParamFromShape(s21); auto lhs = getParamFromShape(s21);
auto rhs = getParamFromShape(s23); auto rhs = getParamFromShape(s23);
shared_ptr<Node> op = ngraph::builder::make_with_numpy_broadcast<ngraph::op::Add>(lhs, rhs); shared_ptr<Node> op = builder::make_with_numpy_broadcast<op::Add>(lhs, rhs);
EXPECT_NE(op, nullptr); EXPECT_NE(op, nullptr);
} }
TEST(autobroadcast, make_node_3_args) TEST(autobroadcast, make_node_3_args)
{ {
ngraph::Shape s21{2, 1}; Shape s21{2, 1};
ngraph::Shape s23{2, 3}; Shape s23{2, 3};
auto predicates = std::make_shared<ngraph::op::Parameter>(ngraph::element::boolean, s23); auto predicates = make_shared<op::Parameter>(element::boolean, s23);
auto lhs = getParamFromShape(s21); auto lhs = getParamFromShape(s21);
auto rhs = getParamFromShape(s23); auto rhs = getParamFromShape(s23);
shared_ptr<Node> op = shared_ptr<Node> op = builder::make_with_numpy_broadcast<op::Select>(predicates, lhs, rhs);
ngraph::builder::make_with_numpy_broadcast<ngraph::op::Select>(predicates, lhs, rhs);
EXPECT_NE(op, nullptr); EXPECT_NE(op, nullptr);
} }
TEST(autobroadcast, numpy_broadcast_for_matmul_op_2d)
{
const Shape lhs{3, 1, 4, 6};
const Shape rhs{6, 5};
const auto lhs_node = make_shared<op::Parameter>(element::f32, lhs);
const auto rhs_node = make_shared<op::Parameter>(element::f32, rhs);
const OutputVector result = builder::numpy_broadcast_for_matmul_operation(lhs_node, rhs_node);
EXPECT_EQ(result.at(0).get_shape(), (Shape{3, 1, 4, 6}));
EXPECT_EQ(result.at(1).get_shape(), (Shape{3, 1, 6, 5}));
}
TEST(autobroadcast, numpy_broadcast_for_matmul_op_3d)
{
const Shape lhs{3, 1, 4, 6};
const Shape rhs{2, 6, 5};
const auto lhs_node = make_shared<op::Parameter>(element::f32, lhs);
const auto rhs_node = make_shared<op::Parameter>(element::f32, rhs);
const OutputVector result = builder::numpy_broadcast_for_matmul_operation(lhs_node, rhs_node);
EXPECT_EQ(result.at(0).get_shape(), (Shape{3, 2, 4, 6}));
EXPECT_EQ(result.at(1).get_shape(), (Shape{3, 2, 6, 5}));
}
TEST(autobroadcast, numpy_broadcast_for_matmul_op_nop)
{
const Shape lhs{4, 6};
const Shape rhs{6, 5};
const auto lhs_node = make_shared<op::Parameter>(element::f32, lhs);
const auto rhs_node = make_shared<op::Parameter>(element::f32, rhs);
const OutputVector result = builder::numpy_broadcast_for_matmul_operation(lhs_node, rhs_node);
EXPECT_EQ(result.at(0).get_shape(), (Shape{4, 6}));
EXPECT_EQ(result.at(1).get_shape(), (Shape{6, 5}));
}
TEST(autobroadcast, legacy_broadcast_scalar)
{
const Shape lhs{2, 3, 4, 5};
const Shape rhs{};
size_t start_match_axis{3};
const auto lhs_node = make_shared<op::Parameter>(element::f32, lhs);
const auto rhs_node = make_shared<op::Parameter>(element::f32, rhs);
const OutputVector result =
builder::legacy_broadcast_for_binary_operation(lhs_node, rhs_node, start_match_axis);
EXPECT_EQ(result.at(0).get_shape(), lhs);
EXPECT_EQ(result.at(1).get_shape(), lhs);
}
TEST(autobroadcast, legacy_broadcast_1elem_tensor)
{
const Shape lhs{2, 3, 4, 5};
const Shape rhs{1, 1, 1};
size_t start_match_axis{1};
const auto lhs_node = make_shared<op::Parameter>(element::f32, lhs);
const auto rhs_node = make_shared<op::Parameter>(element::f32, rhs);
const OutputVector result =
builder::legacy_broadcast_for_binary_operation(lhs_node, rhs_node, start_match_axis);
EXPECT_EQ(result.at(0).get_shape(), lhs);
EXPECT_EQ(result.at(1).get_shape(), lhs);
}
TEST(autobroadcast, legacy_broadcast_1d)
{
const Shape lhs{2, 3, 4, 5};
const Shape rhs{5};
size_t start_match_axis{3};
const auto lhs_node = make_shared<op::Parameter>(element::f32, lhs);
const auto rhs_node = make_shared<op::Parameter>(element::f32, rhs);
const OutputVector result =
builder::legacy_broadcast_for_binary_operation(lhs_node, rhs_node, start_match_axis);
EXPECT_EQ(result.at(0).get_shape(), lhs);
EXPECT_EQ(result.at(1).get_shape(), lhs);
}
TEST(autobroadcast, legacy_broadcast_2d)
{
const Shape lhs{2, 3, 4, 5};
const Shape rhs{4, 5};
size_t start_match_axis{2};
const auto lhs_node = make_shared<op::Parameter>(element::f32, lhs);
const auto rhs_node = make_shared<op::Parameter>(element::f32, rhs);
const OutputVector result =
builder::legacy_broadcast_for_binary_operation(lhs_node, rhs_node, start_match_axis);
EXPECT_EQ(result.at(0).get_shape(), lhs);
EXPECT_EQ(result.at(1).get_shape(), lhs);
}
TEST(autobroadcast, legacy_broadcast_2d_inside)
{
const Shape lhs{2, 3, 4, 5};
const Shape rhs{3, 4};
size_t start_match_axis{1};
const auto lhs_node = make_shared<op::Parameter>(element::f32, lhs);
const auto rhs_node = make_shared<op::Parameter>(element::f32, rhs);
const OutputVector result =
builder::legacy_broadcast_for_binary_operation(lhs_node, rhs_node, start_match_axis);
EXPECT_EQ(result.at(0).get_shape(), lhs);
EXPECT_EQ(result.at(1).get_shape(), lhs);
}
TEST(autobroadcast, legacy_broadcast_1d_left)
{
const Shape lhs{2, 3, 4, 5};
const Shape rhs{2};
size_t start_match_axis{0};
const auto lhs_node = make_shared<op::Parameter>(element::f32, lhs);
const auto rhs_node = make_shared<op::Parameter>(element::f32, rhs);
const OutputVector result =
builder::legacy_broadcast_for_binary_operation(lhs_node, rhs_node, start_match_axis);
EXPECT_EQ(result.at(0).get_shape(), lhs);
EXPECT_EQ(result.at(1).get_shape(), lhs);
}
TEST(autobroadcast, legacy_broadcast_identical)
{
const Shape lhs{2, 3, 4, 5};
size_t start_match_axis{0};
const auto lhs_node = make_shared<op::Parameter>(element::f32, lhs);
const auto rhs_node = make_shared<op::Parameter>(element::f32, lhs);
const OutputVector result =
builder::legacy_broadcast_for_binary_operation(lhs_node, rhs_node, start_match_axis);
EXPECT_EQ(result.at(0).get_shape(), lhs);
EXPECT_EQ(result.at(1).get_shape(), lhs);
}
TEST(autobroadcast, opset1_legacy_broadcast_scalar)
{
const Shape lhs{2, 3, 4, 5};
const Shape rhs{};
size_t start_match_axis{3};
const auto lhs_node = make_shared<op::Parameter>(element::f32, lhs);
const auto rhs_node = make_shared<op::Parameter>(element::f32, rhs);
const Output<Node> result = builder::opset1::legacy_broadcast_for_binary_operation(
lhs_node, rhs_node, start_match_axis);
EXPECT_EQ(result.get_shape(), lhs);
}
TEST(autobroadcast, opset1_legacy_broadcast_1elem_tensor)
{
const Shape lhs{2, 3, 4, 5};
const Shape rhs{1, 1, 1};
size_t start_match_axis{1};
const auto lhs_node = make_shared<op::Parameter>(element::f32, lhs);
const auto rhs_node = make_shared<op::Parameter>(element::f32, rhs);
const Output<Node> result = builder::opset1::legacy_broadcast_for_binary_operation(
lhs_node, rhs_node, start_match_axis);
EXPECT_EQ(result.get_shape(), lhs);
}
TEST(autobroadcast, opset1_legacy_broadcast_1d)
{
const Shape lhs{2, 3, 4, 5};
const Shape rhs{5};
size_t start_match_axis{3};
const auto lhs_node = make_shared<op::Parameter>(element::f32, lhs);
const auto rhs_node = make_shared<op::Parameter>(element::f32, rhs);
const Output<Node> result = builder::opset1::legacy_broadcast_for_binary_operation(
lhs_node, rhs_node, start_match_axis);
EXPECT_EQ(result.get_shape(), lhs);
}
TEST(autobroadcast, opset1_legacy_broadcast_2d)
{
const Shape lhs{2, 3, 4, 5};
const Shape rhs{4, 5};
size_t start_match_axis{2};
const auto lhs_node = make_shared<op::Parameter>(element::f32, lhs);
const auto rhs_node = make_shared<op::Parameter>(element::f32, rhs);
const Output<Node> result = builder::opset1::legacy_broadcast_for_binary_operation(
lhs_node, rhs_node, start_match_axis);
EXPECT_EQ(result.get_shape(), lhs);
}
TEST(autobroadcast, opset1_legacy_broadcast_2d_inside)
{
const Shape lhs{2, 3, 4, 5};
const Shape rhs{3, 4};
size_t start_match_axis{1};
const auto lhs_node = make_shared<op::Parameter>(element::f32, lhs);
const auto rhs_node = make_shared<op::Parameter>(element::f32, rhs);
const Output<Node> result = builder::opset1::legacy_broadcast_for_binary_operation(
lhs_node, rhs_node, start_match_axis);
EXPECT_EQ(result.get_shape(), lhs);
}
TEST(autobroadcast, opset1_legacy_broadcast_1d_left)
{
const Shape lhs{2, 3, 4, 5};
const Shape rhs{2};
size_t start_match_axis{0};
const auto lhs_node = make_shared<op::Parameter>(element::f32, lhs);
const auto rhs_node = make_shared<op::Parameter>(element::f32, rhs);
const Output<Node> result = builder::opset1::legacy_broadcast_for_binary_operation(
lhs_node, rhs_node, start_match_axis);
EXPECT_EQ(result.get_shape(), lhs);
}
TEST(autobroadcast, opset1_legacy_broadcast_identical)
{
const Shape lhs{2, 3, 4, 5};
size_t start_match_axis{0};
const auto lhs_node = make_shared<op::Parameter>(element::f32, lhs);
const auto rhs_node = make_shared<op::Parameter>(element::f32, lhs);
const Output<Node> result = builder::opset1::legacy_broadcast_for_binary_operation(
lhs_node, rhs_node, start_match_axis);
EXPECT_EQ(result.get_shape(), lhs);
}
TEST(autobroadcast, axes_mapping_from_bcast_axes)
{
const Shape output_shape{2, 3, 4, 5};
const Shape input_shape{3, 5};
const AxisSet broadcast_axes{0, 2};
auto axes_mapping = builder::opset1::get_axes_mapping_output(output_shape, broadcast_axes);
EXPECT_TRUE(axes_mapping.get_node()->is_constant());
Shape axes_mapping_shape = as_type<op::v0::Constant>(axes_mapping.get_node())->get_shape_val();
EXPECT_EQ(axes_mapping_shape.size(), 2);
EXPECT_EQ(axes_mapping_shape, (Shape{1, 3}));
}
TEST(autobroadcast, axes_mapping_from_bcast_axes_scalar)
{
const Shape output_shape{2, 3, 4, 5};
const Shape input_shape{};
const AxisSet broadcast_axes{0, 1, 2, 3};
auto axes_mapping = builder::opset1::get_axes_mapping_output(output_shape, broadcast_axes);
EXPECT_TRUE(axes_mapping.get_node()->is_constant());
Shape axes_mapping_shape = as_type<op::v0::Constant>(axes_mapping.get_node())->get_shape_val();
EXPECT_EQ(axes_mapping_shape.size(), 0);
EXPECT_EQ(axes_mapping_shape, (Shape{}));
}
TEST(autobroadcast, axes_mapping_from_bcast_axes_identical)
{
const Shape output_shape{2, 3, 4, 5};
const Shape input_shape(output_shape);
const AxisSet broadcast_axes{};
auto axes_mapping = builder::opset1::get_axes_mapping_output(output_shape, broadcast_axes);
EXPECT_TRUE(axes_mapping.get_node()->is_constant());
Shape axes_mapping_shape = as_type<op::v0::Constant>(axes_mapping.get_node())->get_shape_val();
EXPECT_EQ(axes_mapping_shape.size(), output_shape.size());
EXPECT_EQ(axes_mapping_shape, (Shape{0, 1, 2, 3}));
}
TEST(autobroadcast, axes_mapping_start_match_axis)
{
const Shape output_shape{2, 3, 4, 5};
const Shape input_shape{3, 4};
const std::size_t start_match_axis{1};
auto axes_mapping =
builder::opset1::get_axes_mapping_output(output_shape, input_shape, start_match_axis);
EXPECT_TRUE(axes_mapping.get_node()->is_constant());
Shape axes_mapping_shape = as_type<op::v0::Constant>(axes_mapping.get_node())->get_shape_val();
EXPECT_EQ(axes_mapping_shape.size(), 2);
EXPECT_EQ(axes_mapping_shape, (Shape{1, 2}));
}
TEST(autobroadcast, axes_mapping_start_match_axis_scalar)
{
const Shape output_shape{2, 3, 4, 5};
const Shape input_shape{};
const std::size_t start_match_axis{4};
auto axes_mapping =
builder::opset1::get_axes_mapping_output(output_shape, input_shape, start_match_axis);
EXPECT_TRUE(axes_mapping.get_node()->is_constant());
Shape axes_mapping_shape = as_type<op::v0::Constant>(axes_mapping.get_node())->get_shape_val();
EXPECT_EQ(axes_mapping_shape.size(), 0);
EXPECT_EQ(axes_mapping_shape, (Shape{}));
}
TEST(autobroadcast, axes_mapping_start_match_axis_identical)
{
const Shape output_shape{2, 3, 4, 5};
const Shape input_shape{2, 3, 4, 5};
const std::size_t start_match_axis{0};
auto axes_mapping =
builder::opset1::get_axes_mapping_output(output_shape, input_shape, start_match_axis);
EXPECT_TRUE(axes_mapping.get_node()->is_constant());
Shape axes_mapping_shape = as_type<op::v0::Constant>(axes_mapping.get_node())->get_shape_val();
EXPECT_EQ(axes_mapping_shape.size(), output_shape.size());
EXPECT_EQ(axes_mapping_shape, (Shape{0, 1, 2, 3}));
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment