Commit 579e44b8 authored by Adam Procter's avatar Adam Procter Committed by Scott Cyphers

Split up type_prop.cpp (#3210)

* Start splitting type_prop.cpp (just a few for now, to get feedback on general pattern)

* Split out some more type_prop stuff

* A bunch more

* Move one test out of type_prop into build_graph

* Split Reverse from ReverseSequence (oops), and fix a typo I noticed in dyn_reshape.cpp

* fix EOF newline

* Style.

* Add newline at eof.
parent 306d284a
......@@ -76,7 +76,75 @@ set(SRC
shape.cpp
specialize_function.cpp
tensor.cpp
type_prop.cpp
type_prop/all.cpp
type_prop/any.cpp
type_prop/avg_pool.cpp
type_prop/batch_mat_mul.cpp
type_prop/batch_norm.cpp
type_prop/binary_elementwise.cpp
type_prop/broadcast.cpp
type_prop/clamp.cpp
type_prop/concat.cpp
type_prop/constant.cpp
type_prop/convert.cpp
type_prop/convolution.cpp
type_prop/convolution_bias.cpp
type_prop/depth_to_space.cpp
type_prop/dequantize.cpp
type_prop/dot.cpp
type_prop/dyn_broadcast.cpp
type_prop/dyn_pad.cpp
type_prop/dyn_replace_slice.cpp
type_prop/dyn_reshape.cpp
type_prop/dyn_slice.cpp
type_prop/elu.cpp
type_prop/embedding_lookup.cpp
type_prop/fake_quantize.cpp
type_prop/gather.cpp
type_prop/gather_nd.cpp
type_prop/gemm.cpp
type_prop/get_output_element.cpp
type_prop/grn.cpp
type_prop/group_convolution.cpp
type_prop/group_convolution_transpose.cpp
type_prop/gru_cell.cpp
type_prop/hard_sigmoid.cpp
type_prop/index_reduction.cpp
type_prop/leaky_relu.cpp
type_prop/lstm_cell.cpp
type_prop/max_pool.cpp
type_prop/mvn.cpp
type_prop/normalize.cpp
type_prop/one_hot.cpp
type_prop/pad.cpp
type_prop/parameter.cpp
type_prop/prelu.cpp
type_prop/quantize.cpp
type_prop/quantized_convolution.cpp
type_prop/range.cpp
type_prop/replace_slice.cpp
type_prop/reshape.cpp
type_prop/reverse.cpp
type_prop/reverse_sequence.cpp
type_prop/rnn_cell.cpp
type_prop/scale_shift.cpp
type_prop/scatter_add.cpp
type_prop/scatter_nd.cpp
type_prop/select.cpp
type_prop/shape_of.cpp
type_prop/shuffle_channels.cpp
type_prop/slice.cpp
type_prop/space_to_depth.cpp
type_prop/split.cpp
type_prop/squared_difference.cpp
type_prop/squeeze.cpp
type_prop/sum.cpp
type_prop/tile.cpp
type_prop/top_k.cpp
type_prop/transpose.cpp
type_prop/unary_elementwise.cpp
type_prop/unsqueeze.cpp
type_prop_benchmark.cpp
type_prop_layers.cpp
util.cpp
zero_dim_tensor_elimination.cpp
......
......@@ -166,3 +166,24 @@ TEST(build_graph, multi_output_split)
2);
EXPECT_EQ(conv->get_shape(), (Shape{64, 128, 91, 131}));
}
TEST(build_graph, function_revalidate_and_infer)
{
auto arg = make_shared<op::Parameter>(element::f32, Shape{2, 4, 6, 8});
auto pattern = op::Constant::create(element::i64, Shape{6}, {1, 3, 16, 2, 2, 2});
auto r = make_shared<op::DynReshape>(arg, pattern);
auto relu = make_shared<op::Relu>(r);
auto f = make_shared<Function>(relu, ParameterVector{arg});
EXPECT_EQ(r->get_output_element_type(0), element::f32);
EXPECT_EQ(r->get_output_shape(0), (Shape{1, 3, 16, 2, 2, 2}));
EXPECT_EQ(f->get_output_shape(0), (Shape{1, 3, 16, 2, 2, 2}));
auto new_pattern = op::Constant::create(element::i64, Shape{2}, {32, 12});
r->input(1).replace_source_output(new_pattern->output(0));
f->validate_nodes_and_infer_types();
EXPECT_EQ(r->get_output_shape(0), (Shape{32, 12}));
EXPECT_EQ(f->get_output_shape(0), (Shape{32, 12}));
}
This diff is collapsed.
//*****************************************************************************
// Copyright 2017-2019 Intel Corporation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//*****************************************************************************
#include "gtest/gtest.h"
#include "ngraph/ngraph.hpp"
#include "util/type_prop.hpp"
using namespace std;
using namespace ngraph;
TEST(type_prop, all_deduce)
{
auto param_0 = make_shared<op::Parameter>(element::boolean, Shape{2, 4});
auto r0 = make_shared<op::All>(param_0, AxisSet{0});
ASSERT_EQ(r0->get_element_type(), element::boolean);
ASSERT_EQ(r0->get_shape(), (Shape{4}));
auto r1 = make_shared<op::All>(param_0, AxisSet{1});
ASSERT_EQ(r1->get_element_type(), element::boolean);
ASSERT_EQ(r1->get_shape(), (Shape{2}));
auto r01 = make_shared<op::All>(param_0, AxisSet{0, 1});
ASSERT_EQ(r01->get_element_type(), element::boolean);
ASSERT_EQ(r01->get_shape(), (Shape{}));
auto r_none = make_shared<op::All>(param_0, AxisSet{});
ASSERT_EQ(r_none->get_element_type(), element::boolean);
ASSERT_EQ(r_none->get_shape(), (Shape{2, 4}));
}
TEST(type_prop, all_deduce_et_dynamic)
{
auto param_0 = make_shared<op::Parameter>(element::dynamic, Shape{2, 4});
auto r0 = make_shared<op::All>(param_0, AxisSet{0});
ASSERT_EQ(r0->get_element_type(), element::boolean);
ASSERT_EQ(r0->get_shape(), (Shape{4}));
auto r1 = make_shared<op::All>(param_0, AxisSet{1});
ASSERT_EQ(r1->get_element_type(), element::boolean);
ASSERT_EQ(r1->get_shape(), (Shape{2}));
auto r01 = make_shared<op::All>(param_0, AxisSet{0, 1});
ASSERT_EQ(r01->get_element_type(), element::boolean);
ASSERT_EQ(r01->get_shape(), (Shape{}));
auto r_none = make_shared<op::All>(param_0, AxisSet{});
ASSERT_EQ(r_none->get_element_type(), element::boolean);
ASSERT_EQ(r_none->get_shape(), (Shape{2, 4}));
}
TEST(type_prop, all_et_non_boolean)
{
auto param_0 = make_shared<op::Parameter>(element::i32, Shape{2, 4});
try
{
auto r = make_shared<op::All>(param_0, AxisSet{0, 1});
// Should have thrown, so fail if it didn't
FAIL() << "Did not detect invalid element type for All";
}
catch (const NodeValidationFailure& error)
{
EXPECT_HAS_SUBSTRING(error.what(), std::string("Input element type must be boolean"));
}
catch (...)
{
FAIL() << "Deduced type check failed for unexpected reason";
}
}
TEST(type_prop, all_axis_oob)
{
auto param_0 = make_shared<op::Parameter>(element::boolean, Shape{2, 4});
try
{
auto r = make_shared<op::All>(param_0, AxisSet{0, 2, 1});
// Should have thrown, so fail if it didn't
FAIL() << "Did not detect out-of-bound axis for All";
}
catch (const NodeValidationFailure& error)
{
EXPECT_HAS_SUBSTRING(error.what(), std::string("Reduction axis (2) is out of bounds"));
}
catch (...)
{
FAIL() << "Deduced type check failed for unexpected reason";
}
}
TEST(type_prop, all_partial_rank_dynamic)
{
auto param = make_shared<op::Parameter>(element::boolean, PartialShape::dynamic());
auto axes = AxisSet{2385, 0, 4404}; // arbitrary
auto all = make_shared<op::All>(param, axes);
EXPECT_EQ(all->get_output_element_type(0), element::boolean);
EXPECT_TRUE(all->get_output_partial_shape(0).is_dynamic());
}
TEST(type_prop, all_partial_rank_static_dynamic_ok_result_static)
{
auto param = make_shared<op::Parameter>(element::boolean,
PartialShape{1, 2, Dimension::dynamic(), 4, 5});
auto axes = AxisSet{2, 3};
auto all = make_shared<op::All>(param, axes);
EXPECT_EQ(all->get_output_element_type(0), element::boolean);
EXPECT_EQ(all->get_shape(), (Shape{1, 2, 5}));
}
TEST(type_prop, all_partial_rank_static_dynamic_ok_result_dynamic)
{
auto param = make_shared<op::Parameter>(
element::boolean, PartialShape{1, 2, Dimension::dynamic(), 4, Dimension::dynamic()});
auto axes = AxisSet{2, 3};
auto all = make_shared<op::All>(param, axes);
EXPECT_EQ(all->get_output_element_type(0), element::boolean);
EXPECT_TRUE(
all->get_output_partial_shape(0).same_scheme(PartialShape{1, 2, Dimension::dynamic()}));
}
TEST(type_prop, all_partial_rank_static_dynamic_axes_oob)
{
auto param = make_shared<op::Parameter>(
element::boolean, PartialShape{1, 2, Dimension::dynamic(), 4, Dimension::dynamic()});
auto axes = AxisSet{2, 5, 1};
try
{
auto all = make_shared<op::All>(param, axes);
// Should have thrown, so fail if it didn't
FAIL() << "Did not detect out-of-bound axis for All (rank-static dynamic input)";
}
catch (const NodeValidationFailure& error)
{
EXPECT_HAS_SUBSTRING(error.what(), std::string("Reduction axis (5) is out of bounds"));
}
catch (...)
{
FAIL() << "Deduced type check failed for unexpected reason";
}
}
//*****************************************************************************
// Copyright 2017-2019 Intel Corporation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//*****************************************************************************
#include "gtest/gtest.h"
#include "ngraph/ngraph.hpp"
#include "util/type_prop.hpp"
using namespace std;
using namespace ngraph;
TEST(type_prop, any_deduce)
{
auto param_0 = make_shared<op::Parameter>(element::boolean, Shape{2, 4});
auto r0 = make_shared<op::Any>(param_0, AxisSet{0});
ASSERT_EQ(r0->get_element_type(), element::boolean);
ASSERT_EQ(r0->get_shape(), (Shape{4}));
auto r1 = make_shared<op::Any>(param_0, AxisSet{1});
ASSERT_EQ(r1->get_element_type(), element::boolean);
ASSERT_EQ(r1->get_shape(), (Shape{2}));
auto r01 = make_shared<op::Any>(param_0, AxisSet{0, 1});
ASSERT_EQ(r01->get_element_type(), element::boolean);
ASSERT_EQ(r01->get_shape(), (Shape{}));
auto r_none = make_shared<op::Any>(param_0, AxisSet{});
ASSERT_EQ(r_none->get_element_type(), element::boolean);
ASSERT_EQ(r_none->get_shape(), (Shape{2, 4}));
}
TEST(type_prop, any_deduce_et_dynamic)
{
auto param_0 = make_shared<op::Parameter>(element::dynamic, Shape{2, 4});
auto r0 = make_shared<op::Any>(param_0, AxisSet{0});
ASSERT_EQ(r0->get_element_type(), element::boolean);
ASSERT_EQ(r0->get_shape(), (Shape{4}));
auto r1 = make_shared<op::Any>(param_0, AxisSet{1});
ASSERT_EQ(r1->get_element_type(), element::boolean);
ASSERT_EQ(r1->get_shape(), (Shape{2}));
auto r01 = make_shared<op::Any>(param_0, AxisSet{0, 1});
ASSERT_EQ(r01->get_element_type(), element::boolean);
ASSERT_EQ(r01->get_shape(), (Shape{}));
auto r_none = make_shared<op::Any>(param_0, AxisSet{});
ASSERT_EQ(r_none->get_element_type(), element::boolean);
ASSERT_EQ(r_none->get_shape(), (Shape{2, 4}));
}
TEST(type_prop, any_et_non_boolean)
{
auto param_0 = make_shared<op::Parameter>(element::i32, Shape{2, 4});
try
{
auto r = make_shared<op::Any>(param_0, AxisSet{0, 1});
// Should have thrown, so fail if it didn't
FAIL() << "Did not detect invalid element type for Any";
}
catch (const NodeValidationFailure& error)
{
EXPECT_HAS_SUBSTRING(error.what(), std::string("Input element type must be boolean"));
}
catch (...)
{
FAIL() << "Deduced type check failed for unexpected reason";
}
}
TEST(type_prop, any_axis_oob)
{
auto param_0 = make_shared<op::Parameter>(element::boolean, Shape{2, 4});
try
{
auto r = make_shared<op::Any>(param_0, AxisSet{0, 2, 1});
// Should have thrown, so fail if it didn't
FAIL() << "Did not detect out-of-bound axis for Any";
}
catch (const NodeValidationFailure& error)
{
EXPECT_HAS_SUBSTRING(error.what(), std::string("Reduction axis (2) is out of bounds"));
}
catch (...)
{
FAIL() << "Deduced type check failed for unexpected reason";
}
}
TEST(type_prop, any_partial_rank_dynamic)
{
auto param = make_shared<op::Parameter>(element::boolean, PartialShape::dynamic());
auto axes = AxisSet{2385, 0, 4404}; // arbitrary
auto any = make_shared<op::Any>(param, axes);
EXPECT_EQ(any->get_output_element_type(0), element::boolean);
EXPECT_TRUE(any->get_output_partial_shape(0).is_dynamic());
}
TEST(type_prop, any_partial_rank_static_dynamic_ok_result_static)
{
auto param = make_shared<op::Parameter>(element::boolean,
PartialShape{1, 2, Dimension::dynamic(), 4, 5});
auto axes = AxisSet{2, 3};
auto any = make_shared<op::Any>(param, axes);
EXPECT_EQ(any->get_output_element_type(0), element::boolean);
EXPECT_EQ(any->get_shape(), (Shape{1, 2, 5}));
}
TEST(type_prop, any_partial_rank_static_dynamic_ok_result_dynamic)
{
auto param = make_shared<op::Parameter>(
element::boolean, PartialShape{1, 2, Dimension::dynamic(), 4, Dimension::dynamic()});
auto axes = AxisSet{2, 3};
auto any = make_shared<op::Any>(param, axes);
EXPECT_EQ(any->get_output_element_type(0), element::boolean);
EXPECT_TRUE(
any->get_output_partial_shape(0).same_scheme(PartialShape{1, 2, Dimension::dynamic()}));
}
TEST(type_prop, any_partial_rank_static_dynamic_axes_oob)
{
auto param = make_shared<op::Parameter>(
element::boolean, PartialShape{1, 2, Dimension::dynamic(), 4, Dimension::dynamic()});
auto axes = AxisSet{2, 5, 1};
try
{
auto any = make_shared<op::Any>(param, axes);
// Should have thrown, so fail if it didn't
FAIL() << "Did not detect out-of-bound axis for Any (rank-static dynamic input)";
}
catch (const NodeValidationFailure& error)
{
EXPECT_HAS_SUBSTRING(error.what(), std::string("Reduction axis (5) is out of bounds"));
}
catch (...)
{
FAIL() << "Deduced type check failed for unexpected reason";
}
}
This diff is collapsed.
//*****************************************************************************
// Copyright 2017-2019 Intel Corporation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//*****************************************************************************
#include "gtest/gtest.h"
#include "ngraph/ngraph.hpp"
#include "util/type_prop.hpp"
using namespace std;
using namespace ngraph;
TEST(type_prop, batchmatmul_deduce_3d)
{
// Deduce type for matrix/matrix arguments
auto param1 = make_shared<op::Parameter>(element::f32, Shape{5, 4, 2});
auto param2 = make_shared<op::Parameter>(element::f32, Shape{5, 2, 3});
auto bc = make_shared<op::BatchMatMul>(param1, param2);
ASSERT_EQ(bc->get_element_type(), element::f32);
ASSERT_EQ(bc->get_shape(), (Shape{5, 4, 3}));
}
TEST(type_prop, batchmatmul_deduce_left_rank_wrong)
{
// Type deduction fails due to element type mismatch
auto param1 = make_shared<op::Parameter>(element::f32, Shape{2, 5, 4, 2});
auto param2 = make_shared<op::Parameter>(element::f32, Shape{5, 2, 5});
try
{
auto bc = make_shared<op::BatchMatMul>(param1, param2);
// Should have thrown, so fail if it didn't
FAIL() << "Element type mismatch not detected";
}
catch (const NodeValidationFailure& error)
{
EXPECT_HAS_SUBSTRING(error.what(), std::string("shape must have rank 3"));
}
catch (...)
{
FAIL() << "Deduced type check failed for unexpected reason";
}
}
TEST(type_prop, batchmatmul_deduce_right_rank_wrong)
{
// Type deduction fails due to element type mismatch
auto param1 = make_shared<op::Parameter>(element::f32, Shape{5, 4, 2});
auto param2 = make_shared<op::Parameter>(element::f32, Shape{2, 5, 2, 5});
try
{
auto bc = make_shared<op::BatchMatMul>(param1, param2);
// Should have thrown, so fail if it didn't
FAIL() << "Element type mismatch not detected";
}
catch (const NodeValidationFailure& error)
{
EXPECT_HAS_SUBSTRING(error.what(), std::string("shape must have rank 3"));
}
catch (...)
{
FAIL() << "Deduced type check failed for unexpected reason";
}
}
TEST(type_prop, batchmatmul_deduce_element_type_mismatch)
{
// Type deduction fails due to element type mismatch
auto param1 = make_shared<op::Parameter>(element::f32, Shape{5, 4, 2});
auto param2 = make_shared<op::Parameter>(element::i32, Shape{5, 2, 5});
try
{
auto bc = make_shared<op::BatchMatMul>(param1, param2);
// Should have thrown, so fail if it didn't
FAIL() << "Element type mismatch not detected";
}
catch (const NodeValidationFailure& error)
{
EXPECT_HAS_SUBSTRING(error.what(), std::string("compatible element type"));
}
catch (...)
{
FAIL() << "Deduced type check failed for unexpected reason";
}
}
TEST(type_prop, batchmatmul_deduce_reduction_axes_size_mismatch)
{
// Type deduction fails due to reduction axes size mismatch
auto param1 = make_shared<op::Parameter>(element::f32, Shape{6, 4, 2});
auto param2 = make_shared<op::Parameter>(element::f32, Shape{6, 3, 5});
try
{
auto bc = make_shared<op::BatchMatMul>(param1, param2);
// Should have thrown, so fail if it didn't
FAIL() << "BatchMatMul reduction axes size mismatch not detected";
}
catch (const NodeValidationFailure& error)
{
EXPECT_HAS_SUBSTRING(error.what(), std::string("Product dimensions are not equal"));
}
catch (...)
{
FAIL() << "Deduced type check failed for unexpected reason";
}
}
TEST(type_prop, batchmatmul_partial_both_rank_dynamic_implicit)
{
auto param0 = make_shared<op::Parameter>(element::f32, PartialShape::dynamic());
auto param1 = make_shared<op::Parameter>(element::f32, PartialShape::dynamic());
auto d = make_shared<op::BatchMatMul>(param0, param1);
ASSERT_TRUE(d->get_output_partial_shape(0).rank().same_scheme(3));
}
TEST(type_prop, batchmatmul_partial_left_rank_dynamic_right_rank_static_dynamic)
{
auto param0 = make_shared<op::Parameter>(element::f32, PartialShape::dynamic());
auto param1 =
make_shared<op::Parameter>(element::f32, PartialShape{Dimension::dynamic(), 2, 3});
auto d = make_shared<op::BatchMatMul>(param0, param1);
ASSERT_TRUE(d->get_output_partial_shape(0).rank().same_scheme(3));
}
TEST(type_prop, batchmatmul_partial_left_rank_static_dynamic_right_rank_dynamic)
{
auto param0 =
make_shared<op::Parameter>(element::f32, PartialShape{Dimension::dynamic(), 2, 3});
auto param1 = make_shared<op::Parameter>(element::f32, PartialShape::dynamic());
auto d = make_shared<op::BatchMatMul>(param0, param1);
ASSERT_TRUE(d->get_output_partial_shape(0).rank().same_scheme(3));
}
TEST(type_prop, batchmatmul_partial_left_rank_static_dynamic_right_rank_static)
{
auto param0 =
make_shared<op::Parameter>(element::f32, PartialShape{Dimension::dynamic(), 2, 4});
auto param1 = make_shared<op::Parameter>(element::f32, PartialShape{3, 4, 5});
auto d = make_shared<op::BatchMatMul>(param0, param1);
ASSERT_TRUE(d->get_output_partial_shape(0).same_scheme(PartialShape{3, 2, 5}));
}
TEST(type_prop, batchmatmul_partial_left_et_dynamic)
{
auto param0 = make_shared<op::Parameter>(element::dynamic, PartialShape::dynamic());
auto param1 = make_shared<op::Parameter>(element::f32, PartialShape::dynamic());
auto d = make_shared<op::BatchMatMul>(param0, param1);
ASSERT_EQ(d->get_output_element_type(0), element::f32);
}
TEST(type_prop, batchmatmul_partial_right_et_dynamic)
{
auto param0 = make_shared<op::Parameter>(element::i32, PartialShape::dynamic());
auto param1 = make_shared<op::Parameter>(element::dynamic, PartialShape::dynamic());
auto d = make_shared<op::BatchMatMul>(param0, param1);
ASSERT_EQ(d->get_output_element_type(0), element::i32);
}
TEST(type_prop, batchmatmul_partial_both_et_dynamic)
{
auto param0 = make_shared<op::Parameter>(element::dynamic, PartialShape::dynamic());
auto param1 = make_shared<op::Parameter>(element::dynamic, PartialShape::dynamic());
auto d = make_shared<op::BatchMatMul>(param0, param1);
ASSERT_EQ(d->get_output_element_type(0), element::dynamic);
}
This diff is collapsed.
This diff is collapsed.
//*****************************************************************************
// Copyright 2017-2019 Intel Corporation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//*****************************************************************************
#include "gtest/gtest.h"
#include "ngraph/ngraph.hpp"
#include "util/type_prop.hpp"
using namespace std;
using namespace ngraph;
TEST(type_prop, broadcast_deduce)
{
auto param = make_shared<op::Parameter>(element::f32, Shape{2, 4});
Shape bc_shape{2, 3, 4};
auto bc = make_shared<op::Broadcast>(param, bc_shape, AxisSet{1});
ASSERT_EQ(bc->get_element_type(), element::f32);
ASSERT_EQ(bc->get_shape(), bc_shape);
}
TEST(type_prop, broadcast_axes_oob)
{
auto param = make_shared<op::Parameter>(element::f32, Shape{2, 4});
auto bc_shape = Shape{2, 3, 4};
try
{
auto bc = make_shared<op::Broadcast>(param, bc_shape, AxisSet{1, 3});
FAIL() << "Broadcast axis out of bounds not detected";
}
catch (const NodeValidationFailure& error)
{
EXPECT_HAS_SUBSTRING(error.what(),
"Broadcast axis index (3) exceeds specified output shape rank");
}
catch (...)
{
FAIL() << "Deduced type check failed for unexpected reason";
}
}
TEST(type_prop, broadcast_shape_mismatch_wrong_rank)
{
auto param = make_shared<op::Parameter>(element::f32, Shape{2, 4});
auto bc_shape = Shape{2, 3, 4, 5};
try
{
auto bc = make_shared<op::Broadcast>(param, bc_shape, AxisSet{1});
FAIL() << "Output shape mismatch (wrong rank) not detected";
}
catch (const NodeValidationFailure& error)
{
EXPECT_HAS_SUBSTRING(
error.what(),
"Broadcast argument shape, specified output shape, and axes are incompatible");
}
catch (...)
{
FAIL() << "Deduced type check failed for unexpected reason";
}
}
TEST(type_prop, broadcast_shape_mismatch_wrong_size)
{
auto param = make_shared<op::Parameter>(element::f32, Shape{2, 4});
auto bc_shape = Shape{2, 3, 5};
try
{
auto bc = make_shared<op::Broadcast>(param, bc_shape, AxisSet{1});
FAIL() << "Output shape mismatch (wrong size) not detected";
}
catch (const NodeValidationFailure& error)
{
EXPECT_HAS_SUBSTRING(
error.what(),
"Broadcast argument shape, specified output shape, and axes are incompatible");
}
catch (...)
{
FAIL() << "Deduced type check failed for unexpected reason";
}
}
TEST(type_prop, broadcast_partial_rank_dynamic_ok)
{
auto param = make_shared<op::Parameter>(element::f32, PartialShape::dynamic());
Shape bc_shape{2, 3, 4};
auto bc = make_shared<op::Broadcast>(param, bc_shape, AxisSet{1});
ASSERT_EQ(bc->get_element_type(), element::f32);
ASSERT_EQ(bc->get_shape(), bc_shape);
}
TEST(type_prop, broadcast_partial_rank_dynamic_axes_oob)
{
auto param = make_shared<op::Parameter>(element::f32, PartialShape::dynamic());
auto bc_shape = Shape{2, 3, 4};
try
{
auto bc = make_shared<op::Broadcast>(param, bc_shape, AxisSet{1, 3});
FAIL() << "Broadcast axis out of bounds not detected";
}
catch (const NodeValidationFailure& error)
{
EXPECT_HAS_SUBSTRING(error.what(),
"Broadcast axis index (3) exceeds specified output shape rank");
}
catch (...)
{
FAIL() << "Deduced type check failed for unexpected reason";
}
}
TEST(type_prop, broadcast_partial_rank_static_dynamic_ok)
{
auto param = make_shared<op::Parameter>(element::f32, PartialShape{Dimension::dynamic(), 4});
Shape bc_shape{2, 3, 4};
auto bc = make_shared<op::Broadcast>(param, bc_shape, AxisSet{1});
ASSERT_EQ(bc->get_element_type(), element::f32);
ASSERT_EQ(bc->get_shape(), bc_shape);
}
TEST(type_prop, broadcast_partial_rank_static_dynamic_axes_oob)
{
auto param = make_shared<op::Parameter>(element::f32, PartialShape{Dimension::dynamic(), 4});
auto bc_shape = Shape{2, 3, 4};
try
{
auto bc = make_shared<op::Broadcast>(param, bc_shape, AxisSet{1, 3});
FAIL() << "Broadcast axis out of bounds not detected";
}
catch (const NodeValidationFailure& error)
{
EXPECT_HAS_SUBSTRING(error.what(),
"Broadcast axis index (3) exceeds specified output shape rank");
}
catch (...)
{
FAIL() << "Deduced type check failed for unexpected reason";
}
}
TEST(type_prop, broadcast_partial_rank_static_dynamic_shape_mismatch_wrong_rank)
{
auto param = make_shared<op::Parameter>(element::f32, PartialShape{Dimension::dynamic(), 4});
auto bc_shape = Shape{2, 3, 4, 5};
try
{
auto bc = make_shared<op::Broadcast>(param, bc_shape, AxisSet{1});
FAIL() << "Output shape mismatch (wrong rank) not detected";
}
catch (const NodeValidationFailure& error)
{
EXPECT_HAS_SUBSTRING(
error.what(),
"Broadcast argument shape, specified output shape, and axes are incompatible");
}
catch (...)
{
FAIL() << "Deduced type check failed for unexpected reason";
}
}
TEST(type_prop, broadcast_partial_rank_static_dynamic_shape_mismatch_wrong_size)
{
auto param = make_shared<op::Parameter>(element::f32, PartialShape{Dimension::dynamic(), 4});
auto bc_shape = Shape{2, 3, 5};
try
{
auto bc = make_shared<op::Broadcast>(param, bc_shape, AxisSet{1});
FAIL() << "Output shape mismatch (wrong size) not detected";
}
catch (const NodeValidationFailure& error)
{
EXPECT_HAS_SUBSTRING(
error.what(),
"Broadcast argument shape, specified output shape, and axes are incompatible");
}
catch (...)
{
FAIL() << "Deduced type check failed for unexpected reason";
}
}
//*****************************************************************************
// Copyright 2017-2019 Intel Corporation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//*****************************************************************************
#include "gtest/gtest.h"
#include "ngraph/ngraph.hpp"
#include "util/type_prop.hpp"
using namespace std;
using namespace ngraph;
TEST(type_prop, fused_clamp)
{
const auto data = make_shared<op::Parameter>(element::f64, Shape{2, 2});
try
{
const auto clamp = make_shared<op::Clamp>(data, 2.0, 1.0);
EXPECT_FALSE(clamp.get())
<< "Clamp validation did not work. Op node was created with incorrect params.";
}
catch (const NodeValidationFailure& error)
{
EXPECT_HAS_SUBSTRING(
error.what(), std::string("The 'min' parameter needs to be less than 'max' for Clamp"));
}
const auto clamp = make_shared<op::Clamp>(data, 1.0, 2.0);
EXPECT_EQ(clamp->get_element_type(), element::f64);
EXPECT_EQ(clamp->get_shape(), (Shape{2, 2}));
}
This diff is collapsed.
//*****************************************************************************
// Copyright 2017-2019 Intel Corporation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//*****************************************************************************
#include "gtest/gtest.h"
#include "ngraph/ngraph.hpp"
#include "util/type_prop.hpp"
using namespace std;
using namespace ngraph;
TEST(type_prop, scalar_constant_deduce_float32)
{
auto c = op::Constant::create(element::f32, Shape{}, {208});
ASSERT_EQ(c->get_element_type(), element::f32);
ASSERT_EQ(c->get_shape(), (Shape{}));
}
TEST(type_prop, scalar_constant_deduce_bool)
{
auto c = op::Constant::create(element::boolean, Shape{}, {1});
ASSERT_EQ(c->get_element_type(), element::boolean);
ASSERT_EQ(c->get_shape(), (Shape{}));
}
TEST(type_prop, tensor_constant_deduce_float32)
{
auto c = op::Constant::create(element::f32, Shape{2, 2}, {208, 208, 208, 208});
ASSERT_EQ(c->get_element_type(), element::f32);
ASSERT_EQ(c->get_shape(), (Shape{2, 2}));
}
TEST(type_prop, tensor_constant_deduce_bool)
{
auto c = op::Constant::create(element::boolean, Shape{2, 2}, {1, 1, 1, 1});
ASSERT_EQ(c->get_element_type(), element::boolean);
ASSERT_EQ(c->get_shape(), (Shape{2, 2}));
}
TEST(type_prop, tensor_constant_bad_count)
{
try
{
auto c = op::Constant::create(element::boolean, Shape{2, 2}, {1, 1, 1});
// Should have thrown, so fail if it didn't
FAIL() << "Incorrect number of literals not detected";
}
catch (const NodeValidationFailure& error)
{
EXPECT_HAS_SUBSTRING(error.what(),
std::string("Did not get the expected number of literals for a "
"constant of shape Shape{2, 2} (got 3, expected 1 or 4)"));
}
catch (...)
{
FAIL() << "Deduced type check failed for unexpected reason";
}
}
TEST(type_prop, constant_zero_elements_one_string)
{
auto c =
make_shared<op::Constant>(element::i64, Shape{2, 0, 2, 2}, std::vector<std::string>{"42"});
ASSERT_EQ(c->get_element_type(), element::i64);
ASSERT_EQ(c->get_shape(), (Shape{2, 0, 2, 2}));
}
//*****************************************************************************
// Copyright 2017-2019 Intel Corporation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//*****************************************************************************
#include "gtest/gtest.h"
#include "ngraph/ngraph.hpp"
#include "util/type_prop.hpp"
using namespace std;
using namespace ngraph;
TEST(type_prop, convert_deduce)
{
// Deduce type
auto param = make_shared<op::Parameter>(element::f32, Shape{2, 3, 4});
auto c = make_shared<op::Convert>(param, element::i32);
ASSERT_EQ(c->get_element_type(), element::i32);
ASSERT_EQ(c->get_shape(), (Shape{2, 3, 4}));
}
This diff is collapsed.
//*****************************************************************************
// Copyright 2017-2019 Intel Corporation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//*****************************************************************************
#include "gtest/gtest.h"
#include "ngraph/ngraph.hpp"
#include "util/type_prop.hpp"
using namespace std;
using namespace ngraph;
TEST(type_prop, conv_bias_2d_deduce)
{
// Deduce type
auto param0 = make_shared<op::Parameter>(element::f32, Shape{64, 3, 100, 150});
auto param1 = make_shared<op::Parameter>(element::f32, Shape{128, 3, 10, 20});
auto param2 = make_shared<op::Parameter>(element::f32, Shape{128});
auto conv = make_shared<op::ConvolutionBias>(param0, param1, param2);
EXPECT_EQ(conv->get_element_type(), element::f32);
EXPECT_EQ(conv->get_shape(), (Shape{64, 128, 91, 131}));
EXPECT_EQ(conv->get_window_movement_strides(), (Strides{1, 1}));
EXPECT_EQ(conv->get_window_dilation_strides(), (Strides{1, 1}));
EXPECT_EQ(conv->get_data_dilation_strides(), (Strides{1, 1}));
EXPECT_EQ(conv->get_padding_below(), (CoordinateDiff{0, 0}));
EXPECT_EQ(conv->get_padding_above(), (CoordinateDiff{0, 0}));
}
TEST(type_prop, conv_bias_add_2d_deduce)
{
// Deduce type
auto param0 = make_shared<op::Parameter>(element::f32, Shape{64, 3, 100, 150});
auto param1 = make_shared<op::Parameter>(element::f32, Shape{128, 3, 10, 20});
auto param2 = make_shared<op::Parameter>(element::f32, Shape{128});
auto param3 = make_shared<op::Parameter>(element::f32, Shape{64, 128, 91, 131});
auto conv = make_shared<op::ConvolutionBiasAdd>(param0,
param1,
param2,
param3,
Strides{1, 1},
Strides{1, 1},
CoordinateDiff{0, 0},
CoordinateDiff{0, 0},
Strides{1, 1});
EXPECT_EQ(conv->get_element_type(), element::f32);
EXPECT_EQ(conv->get_shape(), (Shape{64, 128, 91, 131}));
}
TEST(type_prop, conv_bias_bprop_2d_deduce)
{
// Deduce type
auto data = make_shared<op::Parameter>(element::f32, Shape{64, 3, 100, 150});
auto filters = make_shared<op::Parameter>(element::f32, Shape{128, 3, 10, 20});
auto bias = make_shared<op::Parameter>(element::f32, Shape{128});
auto delta = make_shared<op::Parameter>(element::f32, Shape{64, 128, 91, 131});
auto conv = make_shared<op::ConvolutionBiasBackpropFiltersBias>(data,
filters->get_shape(),
bias->get_shape(),
delta,
Strides{1, 1},
Strides{1, 1},
CoordinateDiff{0, 0},
CoordinateDiff{0, 0},
Strides{1, 1});
EXPECT_EQ(conv->get_output_element_type(0), element::f32);
EXPECT_EQ(conv->get_output_element_type(1), element::f32);
EXPECT_EQ(conv->get_output_shape(0), filters->get_shape());
EXPECT_EQ(conv->get_output_shape(1), bias->get_shape());
}
//*****************************************************************************
// Copyright 2017-2019 Intel Corporation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//*****************************************************************************
#include "gtest/gtest.h"
#include "ngraph/ngraph.hpp"
#include "util/type_prop.hpp"
using namespace std;
using namespace ngraph;
TEST(type_prop, depth_to_space)
{
auto A = make_shared<op::Parameter>(element::f32, Shape{1, 128, 8, 8});
auto space_to_depth = make_shared<op::DepthToSpace>(A, 8);
ASSERT_EQ(space_to_depth->get_element_type(), element::f32);
ASSERT_EQ(space_to_depth->get_shape(), (Shape{1, 2, 64, 64}));
}
This diff is collapsed.
This diff is collapsed.
//*****************************************************************************
// Copyright 2017-2019 Intel Corporation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//*****************************************************************************
#include "gtest/gtest.h"
#include "ngraph/ngraph.hpp"
#include "util/type_prop.hpp"
using namespace std;
using namespace ngraph;
TEST(type_prop, dyn_broadcast_shape_wrong_rank)
{
auto arg = make_shared<op::Parameter>(element::f32, Shape{2, 4});
auto bc_shape = make_shared<op::Parameter>(element::i64, Shape{1, 1});
auto bc_axes = make_shared<op::Parameter>(element::i64, Shape{1});
try
{
auto bc = make_shared<op::DynBroadcast>(arg, bc_shape, bc_axes);
FAIL() << "DynBroadcast: wrong shape rank not detected";
}
catch (const NodeValidationFailure& error)
{
EXPECT_HAS_SUBSTRING(error.what(), "DynBroadcast shape rank must be 1");
}
catch (...)
{
FAIL() << "Deduced type check failed for unexpected reason";
}
}
TEST(type_prop, dyn_broadcast_axes_wrong_rank)
{
auto arg = make_shared<op::Parameter>(element::f32, Shape{2, 4});
auto bc_shape = make_shared<op::Parameter>(element::i64, Shape{1});
auto bc_axes = make_shared<op::Parameter>(element::i64, Shape{2, 2});
try
{
auto bc = make_shared<op::DynBroadcast>(arg, bc_shape, bc_axes);
FAIL() << "DynBroadcast: axes shape rank not detected";
}
catch (const NodeValidationFailure& error)
{
EXPECT_HAS_SUBSTRING(error.what(), "DynBroadcast axes rank must be 1");
}
catch (...)
{
FAIL() << "Deduced type check failed for unexpected reason";
}
}
TEST(type_prop, dyn_broadcast_output_partial_shape_dynamic)
{
auto arg = make_shared<op::Parameter>(element::f32, Shape{2, 4});
auto bc_shape = make_shared<op::Parameter>(element::i64, Shape{1});
auto bc_axes = make_shared<op::Parameter>(element::i64, Shape{2});
auto bc = make_shared<op::DynBroadcast>(arg, bc_shape, bc_axes);
ASSERT_TRUE(bc->get_output_partial_shape(0).is_dynamic());
}
TEST(type_prop, dyn_broadcast_broadcast_shape_et_wrong)
{
auto arg = make_shared<op::Parameter>(element::f32, Shape{2, 4});
// wrong element type
auto bc_shape = make_shared<op::Parameter>(element::boolean, Shape{1});
auto bc_axes = make_shared<op::Parameter>(element::i64, Shape{2});
try
{
auto bc = make_shared<op::DynBroadcast>(arg, bc_shape, bc_axes);
FAIL() << "DynBroadcast: did not detect shape element type not i64";
}
catch (const NodeValidationFailure& error)
{
EXPECT_HAS_SUBSTRING(error.what(),
std::string("DynBroadcast shape must have element type i64"));
}
catch (...)
{
FAIL() << "Deduced type check failed for unexpected reason";
}
}
TEST(type_prop, dyn_broadcast_axes_et_wrong)
{
auto arg = make_shared<op::Parameter>(element::f32, Shape{2, 4});
auto bc_shape = make_shared<op::Parameter>(element::i64, Shape{1});
// wrong element type
auto bc_axes = make_shared<op::Parameter>(element::f32, Shape{2});
try
{
auto bc = make_shared<op::DynBroadcast>(arg, bc_shape, bc_axes);
FAIL() << "DynBroadcast: did not detect axes element type not i64";
}
catch (const NodeValidationFailure& error)
{
EXPECT_HAS_SUBSTRING(error.what(),
std::string("DynBroadcast axes must have element type i64"));
}
catch (...)
{
FAIL() << "Deduced type check failed for unexpected reason";
}
}
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment