Commit 288e2ed4 authored by Mahbub Zaman's avatar Mahbub Zaman Committed by Scott Cyphers

Dyn broadcast initial (#2564)

* Adds new core op DynBroadcast

* Adds new core op DynBroadcast

* Fixes build error caused by recent changes in node validation API

* Addresses code review comments.

* Moves new op under experimental.

* Fixes style errors.

* Silee2/external project rpath (#2525)

* Set rpath for mkl-dnn.

* Set library rpath to  on Linux.

* Use patchelf to set rpath of prebuilt mklml library.

* Add patchelf to Linux Dockerfiles.

* Revert "Add patchelf to Linux Dockerfiles."

This reverts commit 1769505a866061552942e19467ddcc0dad0922e8.

* Revert "Use patchelf to set rpath of prebuilt mklml library."

This reverts commit 726f6553a0450520328607177d64baf48fa93dd2.

* Copy cldnn runtime.

* Copy mlsl libraries.

* add unit tests for the two versions of Backend create_tensor (#2607)

* add unit tests for the two versions of Backend create_tensor

* disable new unit test on GPU until we have time to address it

* Resolves merge conflicts

* Addresses code review comments.

* Fixes merge issues

* Fixes style errors

* Fixes type check to use compatible()

* Reverts unintenional change

* Reverts unintenional change

* Fixes typo in comment

* Addresses code review comments.
parent f8146495
......@@ -128,6 +128,8 @@ set (SRC
op/equal.hpp
op/exp.cpp
op/exp.hpp
op/experimental/dyn_broadcast.cpp
op/experimental/dyn_broadcast.hpp
op/experimental/generate_mask.cpp
op/experimental/generate_mask.hpp
op/experimental/quantized_avg_pool.cpp
......
......@@ -86,6 +86,8 @@
#include "ngraph/op/embedding_lookup.hpp"
#include "ngraph/op/equal.hpp"
#include "ngraph/op/exp.hpp"
#include "ngraph/op/experimental/dyn_broadcast.hpp"
#include "ngraph/op/experimental/dyn_broadcast.hpp"
#include "ngraph/op/experimental/shape_of.hpp"
#include "ngraph/op/experimental/transpose.hpp"
#include "ngraph/op/floor.hpp"
......
//*****************************************************************************
// Copyright 2017-2019 Intel Corporation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//*****************************************************************************
#include "ngraph/op/experimental/dyn_broadcast.hpp"
#include "ngraph/op/sum.hpp"
using namespace std;
using namespace ngraph;
op::DynBroadcast::DynBroadcast(const shared_ptr<Node>& arg,
const shared_ptr<Node>& shape,
const shared_ptr<Node>& broadcast_axes)
: Op("DynBroadcast", check_single_output_args({arg, shape, broadcast_axes}))
{
constructor_validate_and_infer_types();
}
void op::DynBroadcast::validate_and_infer_types()
{
// shape node should have integer data type. For now we only allow i64
//TODO: potenially make the type more flexible to include other integer types
auto shape_et = get_input_element_type(1);
NODE_VALIDATION_CHECK(this,
shape_et.compatible(element::Type_t::i64),
"DynBroadcast shape must have element type i64, but has ",
shape_et);
//shape node should produce a one dimensional shape.
auto broadcast_shape_rank = get_input_partial_shape(1).rank();
NODE_VALIDATION_CHECK(this,
broadcast_shape_rank.compatible(1),
"DynBroadcast shape rank must be 1, but has ",
broadcast_shape_rank);
// axes node should have integer data type. For now we only allow i64
//TODO: potenially make the type more flexible to include other integer types
auto axes_et = get_input_element_type(2);
NODE_VALIDATION_CHECK(this,
axes_et.compatible(element::Type_t::i64),
"DynBroadcast axes must have element type i64, but has ",
axes_et);
//axes node should produce a one dimensional shape.
auto axes_shape_rank = get_input_partial_shape(2).rank();
NODE_VALIDATION_CHECK(this,
axes_shape_rank.compatible(1),
"DynBroadcast axes rank must be 1, but has ",
axes_shape_rank);
set_output_type(0, get_input_element_type(0), PartialShape::dynamic());
}
shared_ptr<Node> op::DynBroadcast::copy_with_new_args(const NodeVector& new_args) const
{
check_new_args_count(this, new_args);
return make_shared<DynBroadcast>(new_args.at(0), new_args.at(1), new_args.at(2));
}
// TODO: This function is not implemented!
void op::DynBroadcast::generate_adjoints(autodiff::Adjoints& adjoints, const NodeVector& deltas)
{
throw ngraph_error("generate_adjoints not implemented for DynBroadcast");
}
//*****************************************************************************
// Copyright 2017-2019 Intel Corporation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//*****************************************************************************
#pragma once
#include "ngraph/axis_set.hpp"
#include "ngraph/op/op.hpp"
namespace ngraph
{
namespace op
{
/// \brief Operation which "adds" axes to an input tensor, replicating elements from the input as needed along the new axes.
/// This is basically the "dynamic shape" version of the static Broadcast op.
class DynBroadcast : public Op
{
public:
/// \brief Constructs a dynamic broadcast operation.
///
/// \param arg Node that produces the input tensor to be broadcast.
/// \param shape Node that produces shape of the output tensor.
/// \param broadcast_axes Node that produces the axis positions (0-based) in the result that are being broadcast. The
/// remaining axes in shape must be the same as the shape of arg.
DynBroadcast(const std::shared_ptr<Node>& arg,
const std::shared_ptr<Node>& shape,
const std::shared_ptr<Node>& broadcast_axes);
void validate_and_infer_types() override;
virtual std::shared_ptr<Node>
copy_with_new_args(const NodeVector& new_args) const override;
protected:
virtual void generate_adjoints(autodiff::Adjoints& adjoints,
const NodeVector& deltas) override;
};
}
}
......@@ -79,6 +79,7 @@ NGRAPH_OP(Cos, ngraph::op)
NGRAPH_OP(Cosh, ngraph::op)
NGRAPH_OP(Dequantize, ngraph::op)
NGRAPH_OP(Divide, ngraph::op)
NGRAPH_OP(DynBroadcast, ngraph::op)
NGRAPH_OP(Dot, ngraph::op)
NGRAPH_OP(Equal, ngraph::op)
NGRAPH_OP(Exp, ngraph::op)
......
......@@ -225,6 +225,7 @@ bool runtime::gpu::GPU_Backend::is_supported(const Node& op) const
"StopGradient",
"EmbeddingLookup",
"GenerateMask",
"DynBroadcast",
"Transpose"};
set<string> float_only = {"MaxPoolBackprop", "AvgPoolBackprop", "MaxPool", "Dot"};
......
......@@ -57,6 +57,7 @@
#include "ngraph/op/embedding_lookup.hpp"
#include "ngraph/op/equal.hpp"
#include "ngraph/op/exp.hpp"
#include "ngraph/op/experimental/dyn_broadcast.hpp"
#include "ngraph/op/experimental/generate_mask.hpp"
#include "ngraph/op/experimental/quantized_avg_pool.hpp"
#include "ngraph/op/experimental/quantized_conv.hpp"
......@@ -1382,6 +1383,11 @@ std::string runtime::gpu::GPU_Emitter::emit_TopK(EMIT_ARGS)
return compiled_function->add_to_runtime(index, function_name, args, out);
}
std::string runtime::gpu::GPU_Emitter::emit_DynBroadcast(EMIT_ARGS)
{
throw unsupported_op("Unsupported op '" + node->description() + "'");
}
std::string runtime::gpu::GPU_Emitter::emit_Transpose(EMIT_ARGS)
{
throw unsupported_op("Unsupported op '" + node->description() + "'");
......
......@@ -2012,6 +2012,7 @@ shared_ptr<runtime::Executable>
case OP_TYPEID::TopK:
case OP_TYPEID::Transpose:
case OP_TYPEID::EmbeddingLookup:
case OP_TYPEID::DynBroadcast:
case OP_TYPEID::Passthrough:
{
throw unsupported_op("Unsupported op '" + op->description() +
......
......@@ -35,6 +35,8 @@
#include "ngraph/op/dequantize.hpp"
#include "ngraph/op/dot.hpp"
#include "ngraph/op/embedding_lookup.hpp"
#include "ngraph/op/experimental/dyn_broadcast.hpp"
#include "ngraph/op/experimental/dyn_broadcast.hpp"
#include "ngraph/op/experimental/generate_mask.hpp"
#include "ngraph/op/experimental/shape_of.hpp"
#include "ngraph/op/get_output_element.hpp"
......@@ -1247,6 +1249,7 @@ private:
}
break;
}
case OP_TYPEID::DynBroadcast:
case OP_TYPEID::Transpose:
default: throw unsupported_op("Unsupported op '" + node.description() + "'");
#pragma GCC diagnostic pop
......
......@@ -47,6 +47,7 @@
#include "ngraph/op/embedding_lookup.hpp"
#include "ngraph/op/equal.hpp"
#include "ngraph/op/exp.hpp"
#include "ngraph/op/experimental/dyn_broadcast.hpp"
#include "ngraph/op/experimental/generate_mask.hpp"
#include "ngraph/op/experimental/quantized_avg_pool.hpp"
#include "ngraph/op/experimental/quantized_conv.hpp"
......@@ -747,6 +748,11 @@ static shared_ptr<ngraph::Function>
}
break;
}
case OP_TYPEID::DynBroadcast:
{
node = make_shared<op::DynBroadcast>(args[0], args[1], args[2]);
break;
}
case OP_TYPEID::EmbeddingLookup:
{
node = make_shared<op::EmbeddingLookup>(args[0], args[1]);
......@@ -1500,6 +1506,8 @@ static json write(const Node& n, bool binary_constant_data)
node["reduction_axes_count"] = tmp->get_reduction_axes_count();
break;
}
case OP_TYPEID::DynBroadcast: { break;
}
case OP_TYPEID::EmbeddingLookup: { break;
}
case OP_TYPEID::Equal: { break;
......
......@@ -204,6 +204,104 @@ TEST(type_prop, broadcast_partial_rank_static_dynamic_shape_mismatch_wrong_size)
}
}
TEST(type_prop, dyn_broadcast_shape_wrong_rank)
{
auto arg = make_shared<op::Parameter>(element::f32, Shape{2, 4});
auto bc_shape = make_shared<op::Parameter>(element::i64, Shape{1, 1});
auto bc_axes = make_shared<op::Parameter>(element::i64, Shape{1});
try
{
auto bc = make_shared<op::DynBroadcast>(arg, bc_shape, bc_axes);
FAIL() << "DynBroadcast: wrong shape rank not detected";
}
catch (const NodeValidationFailure& error)
{
EXPECT_HAS_SUBSTRING(error.what(), "DynBroadcast shape rank must be 1");
}
catch (...)
{
FAIL() << "Deduced type check failed for unexpected reason";
}
}
TEST(type_prop, dyn_broadcast_axes_wrong_rank)
{
auto arg = make_shared<op::Parameter>(element::f32, Shape{2, 4});
auto bc_shape = make_shared<op::Parameter>(element::i64, Shape{1});
auto bc_axes = make_shared<op::Parameter>(element::i64, Shape{2, 2});
try
{
auto bc = make_shared<op::DynBroadcast>(arg, bc_shape, bc_axes);
FAIL() << "DynBroadcast: axes shape rank not detected";
}
catch (const NodeValidationFailure& error)
{
EXPECT_HAS_SUBSTRING(error.what(), "DynBroadcast axes rank must be 1");
}
catch (...)
{
FAIL() << "Deduced type check failed for unexpected reason";
}
}
TEST(type_prop, dyn_broadcast_output_partial_shape_dynamic)
{
auto arg = make_shared<op::Parameter>(element::f32, Shape{2, 4});
auto bc_shape = make_shared<op::Parameter>(element::i64, Shape{1});
auto bc_axes = make_shared<op::Parameter>(element::i64, Shape{2});
auto bc = make_shared<op::DynBroadcast>(arg, bc_shape, bc_axes);
ASSERT_TRUE(bc->get_output_partial_shape(0).is_dynamic());
}
TEST(type_prop, dyn_broadcast_broadcast_shape_et_wrong)
{
auto arg = make_shared<op::Parameter>(element::f32, Shape{2, 4});
// wrong element type
auto bc_shape = make_shared<op::Parameter>(element::boolean, Shape{1});
auto bc_axes = make_shared<op::Parameter>(element::i64, Shape{2});
try
{
auto bc = make_shared<op::DynBroadcast>(arg, bc_shape, bc_axes);
FAIL() << "DynBroadcast: did not detect shape element type not i64";
}
catch (const NodeValidationFailure& error)
{
EXPECT_HAS_SUBSTRING(error.what(),
std::string("DynBroadcast shape must have element type i64"));
}
catch (...)
{
FAIL() << "Deduced type check failed for unexpected reason";
}
}
TEST(type_prop, dyn_broadcast_axes_et_wrong)
{
auto arg = make_shared<op::Parameter>(element::f32, Shape{2, 4});
auto bc_shape = make_shared<op::Parameter>(element::i64, Shape{1});
// wrong element type
auto bc_axes = make_shared<op::Parameter>(element::f32, Shape{2});
try
{
auto bc = make_shared<op::DynBroadcast>(arg, bc_shape, bc_axes);
FAIL() << "DynBroadcast: did not detect axes element type not i64";
}
catch (const NodeValidationFailure& error)
{
EXPECT_HAS_SUBSTRING(error.what(),
std::string("DynBroadcast axes must have element type i64"));
}
catch (...)
{
FAIL() << "Deduced type check failed for unexpected reason";
}
}
TEST(type_prop, batchnorm_training_rank_less_than_2)
{
auto dummy = make_shared<op::Parameter>(element::f32, Shape{1});
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment