Unverified Commit 27d26fa2 authored by Scott Cyphers's avatar Scott Cyphers Committed by GitHub

Test cleanups (#3942)

parent 8fb151f4
......@@ -11,6 +11,18 @@
using namespace std;
using namespace ngraph;
// Temporary until opset PR merges
namespace ngraph
{
namespace op
{
namespace v1
{
using GreaterEqual = GreaterEq;
}
}
}
//------------------------------------------------------------------------------
//
// Helper Functions
......@@ -38,7 +50,7 @@ void test_type_prop_opset0_downgrade_pass(const element::Type& output_type,
auto node = v0_result->input_value(0).get_node_shared_ptr();
auto v0_node = as_type_ptr<OpV0>(node);
EXPECT_TRUE(v0_node);
ASSERT_TRUE(v0_node);
EXPECT_EQ(v0_node->get_autob(), np_auto_b);
EXPECT_EQ(v0_node->output(0).get_element_type(), output_type);
EXPECT_EQ(v0_node->output(0).get_shape(), (Shape{1, 3, 2}));
......@@ -75,10 +87,8 @@ void test_type_prop_opset1_upgrade_pass(const element::Type& output_type,
auto v1_result = f->get_results().at(0);
auto node = v1_result->input(0).get_source_output().get_node_shared_ptr();
auto v1_node = static_pointer_cast<OpV1>(node);
EXPECT_EQ(v1_node->description(), (node_name.empty() ? v0_node->description() : node_name));
EXPECT_EQ(v1_node->get_version(), 1);
auto v1_node = as_type_ptr<OpV1>(node);
ASSERT_TRUE(v1_node);
EXPECT_EQ(v1_node->get_autob(), none_auto_b);
EXPECT_EQ(v1_node->output(0).get_element_type(), output_type);
EXPECT_EQ(v1_node->output(0).get_shape(), (Shape{1, 3, 2}));
......@@ -130,10 +140,8 @@ TEST(opset_transform, opset0_divide_downgrade_pass)
auto divide_v0_result = f->get_results().at(0);
auto node = divide_v0_result->input(0).get_source_output().get_node_shared_ptr();
auto divide_v0_node = static_pointer_cast<op::v0::Divide>(node);
EXPECT_EQ(divide_v0_node->description(), "Divide");
EXPECT_EQ(divide_v0_node->get_version(), 0);
auto divide_v0_node = as_type_ptr<op::v0::Divide>(node);
ASSERT_TRUE(divide_v0_node);
EXPECT_EQ(divide_v0_node->is_pythondiv(), pydiv);
EXPECT_EQ(divide_v0_node->get_autob(), np_auto_b);
EXPECT_EQ(divide_v0_node->output(0).get_element_type(), element::f32);
......@@ -157,10 +165,8 @@ TEST(opset_transform, opset1_divide_upgrade_pass)
auto divide_v1_result = f->get_results().at(0);
auto node = divide_v1_result->input(0).get_source_output().get_node_shared_ptr();
auto divide_v1_node = static_pointer_cast<op::v1::Divide>(node);
EXPECT_EQ(divide_v1_node->description(), "Divide");
EXPECT_EQ(divide_v1_node->get_version(), 1);
auto divide_v1_node = as_type_ptr<op::v1::Divide>(node);
ASSERT_TRUE(divide_v1_node);
EXPECT_EQ(divide_v1_node->is_pythondiv(), pydiv);
EXPECT_EQ(divide_v1_node->get_autob(), none_auto_b);
EXPECT_EQ(divide_v1_node->output(0).get_element_type(), element::f32);
......@@ -189,12 +195,12 @@ TEST(opset_transform, opset1_greater_upgrade_pass)
TEST(opset_transform, opset0_greater_eq_downgrade_pass)
{
test_opset0_comparison_downgrade_pass<op::v0::GreaterEq, op::v1::GreaterEq>();
test_opset0_comparison_downgrade_pass<op::v0::GreaterEq, op::v1::GreaterEqual>();
}
TEST(opset_transform, opset1_greater_eq_upgrade_pass)
{
test_opset1_comparison_upgrade_pass<op::v0::GreaterEq, op::v1::GreaterEq>();
test_opset1_comparison_upgrade_pass<op::v0::GreaterEq, op::v1::GreaterEqual>();
}
TEST(opset_transform, opset0_less_downgrade_pass)
......
......@@ -25,14 +25,14 @@ TEST(opset_transform, opset1_broadcast_upgrade_pass)
auto bcast_v1 = as_type_ptr<op::v1::Broadcast>(
f->get_results().at(0)->input_value(0).get_node_shared_ptr());
EXPECT_TRUE(bcast_v1);
ASSERT_TRUE(bcast_v1);
EXPECT_EQ(bcast_v1->get_broadcast_spec(), op::AutoBroadcastSpec());
EXPECT_EQ(bcast_v1->get_broadcast_axes(), (std::make_pair<bool, AxisSet>(true, AxisSet{0, 2})));
EXPECT_TRUE(bcast_v1->input_value(1).get_node()->is_constant());
EXPECT_TRUE(bcast_v1->input_value(2).get_node()->is_constant());
EXPECT_EQ(static_pointer_cast<op::Constant>(bcast_v1->input_value(1).get_node_shared_ptr())
->get_shape_val(),
(Shape{3, 5, 4, 6}));
ASSERT_TRUE(bcast_v1->input_value(1).get_node()->is_constant());
ASSERT_TRUE(bcast_v1->input_value(2).get_node()->is_constant());
EXPECT_EQ(
as_type_ptr<op::Constant>(bcast_v1->input_value(1).get_node_shared_ptr())->get_shape_val(),
(Shape{3, 5, 4, 6}));
EXPECT_EQ(as_type_ptr<op::Constant>(bcast_v1->input_value(2).get_node_shared_ptr())
->get_axis_set_val(),
(AxisSet{1, 3}));
......@@ -54,7 +54,7 @@ TEST(opset_transform, opset1_broadcast_downgrade_pass)
auto bcast_v0 = as_type_ptr<op::v0::Broadcast>(
f->get_results().at(0)->input_value(0).get_node_shared_ptr());
EXPECT_TRUE(bcast_v0);
ASSERT_TRUE(bcast_v0);
EXPECT_EQ(bcast_v0->get_broadcast_shape(), (Shape{3, 1, 4, 2, 3}));
EXPECT_EQ(bcast_v0->get_broadcast_axes(), (AxisSet{0, 2}));
}
......@@ -35,7 +35,7 @@ TEST(opset_transform, opset1_convolution_upgrade_pass)
auto node = convolution_s1_result->input(0).get_source_output().get_node_shared_ptr();
auto convolution_v1_node = as_type_ptr<op::v1::Convolution>(node);
EXPECT_TRUE(convolution_v1_node);
ASSERT_TRUE(convolution_v1_node);
EXPECT_EQ(convolution_v1_node->get_pads_begin(), pads_begin);
EXPECT_EQ(convolution_v1_node->get_pads_end(), pads_end);
......@@ -67,7 +67,7 @@ TEST(opset_transform, opset1_convolution_downgrade_pass)
auto node = conv_s0_result->input(0).get_source_output().get_node_shared_ptr();
auto conv_v0_node = as_type_ptr<op::v0::Convolution>(node);
EXPECT_TRUE(conv_v0_node);
ASSERT_TRUE(conv_v0_node);
EXPECT_EQ(conv_v0_node->get_window_movement_strides(), strides);
EXPECT_EQ(conv_v0_node->get_window_dilation_strides(), dilations);
EXPECT_EQ(conv_v0_node->get_padding_below(), pads_begin);
......@@ -99,7 +99,7 @@ TEST(opset_transform, opset1_convolution_backprop_data_downgrade_pass)
auto node = conv_s0_result->input(0).get_source_output().get_node_shared_ptr();
auto conv_v0_node = as_type_ptr<op::v0::ConvolutionBackpropData>(node);
EXPECT_TRUE(conv_v0_node);
ASSERT_TRUE(conv_v0_node);
EXPECT_EQ(conv_v0_node->get_data_batch_shape(), (Shape{64, 3, 100}));
EXPECT_EQ(conv_v0_node->get_window_movement_strides_forward(), strides);
EXPECT_EQ(conv_v0_node->get_window_dilation_strides_forward(), dilations);
......@@ -130,7 +130,7 @@ TEST(opset_transform, opset1_convolution_backprop_filters_downgrade_pass)
auto node = conv_s0_result->input(0).get_source_output().get_node_shared_ptr();
auto conv_v0_node = as_type_ptr<op::v0::ConvolutionBackpropFilters>(node);
EXPECT_TRUE(conv_v0_node);
ASSERT_TRUE(conv_v0_node);
EXPECT_EQ(conv_v0_node->get_filters_shape(), (Shape{128, 3, 10}));
EXPECT_EQ(conv_v0_node->get_window_movement_strides_forward(), strides);
EXPECT_EQ(conv_v0_node->get_window_dilation_strides_forward(), dilations);
......
......@@ -58,6 +58,6 @@ TEST(opset_transform, opset1_reshape_downgrade_pass)
const auto pass_replacement_node = f->get_result()->input_value(0).get_node_shared_ptr();
const auto reshape_v1 = as_type_ptr<op::v0::DynReshape>(pass_replacement_node);
EXPECT_TRUE(reshape_v1);
ASSERT_TRUE(reshape_v1);
EXPECT_EQ(reshape_v1->get_zero_flag(), true);
}
......@@ -42,6 +42,6 @@ TEST(opset_transform, opset1_gather_upgrade_pass)
auto gather_s1_result = f->get_results().at(0);
auto gather_v1_node = as_type_ptr<op::v1::Gather>(
gather_s1_result->input(0).get_source_output().get_node_shared_ptr());
EXPECT_TRUE(gather_v1_node);
ASSERT_TRUE(gather_v1_node);
EXPECT_EQ(gather_v1_node->get_axis(), axis);
}
......@@ -28,6 +28,6 @@ TEST(opset_transform, opset1_generate_mask_downgrade_pass)
auto generate_mask_v0 = as_type_ptr<op::v0::GenerateMask>(
f->get_results().at(0)->input_value(0).get_node_shared_ptr());
EXPECT_TRUE(generate_mask_v0);
ASSERT_TRUE(generate_mask_v0);
EXPECT_EQ(generate_mask_v0->get_mask_shape(), (Shape{1, 128}));
}
......@@ -41,7 +41,7 @@ TEST(opset_transform, opset1_logical_and_upgrade_pass)
const auto pass_replacement_node =
f->get_result()->input(0).get_source_output().get_node_shared_ptr();
const auto and_v1 = as_type_ptr<op::v1::LogicalAnd>(pass_replacement_node);
EXPECT_TRUE(and_v1);
ASSERT_TRUE(and_v1);
const auto values_out_element_type = and_v1->output(0).get_element_type();
EXPECT_EQ(values_out_element_type, element::boolean);
......@@ -62,7 +62,7 @@ TEST(opset_transform, opset1_logical_and_downgrade_pass)
const auto pass_replacement_node =
f->get_result()->input(0).get_source_output().get_node_shared_ptr();
const auto and_v0 = as_type_ptr<op::v0::And>(pass_replacement_node);
EXPECT_TRUE(and_v0);
ASSERT_TRUE(and_v0);
const auto values_out_element_type = and_v0->output(0).get_element_type();
EXPECT_EQ(values_out_element_type, element::boolean);
......
......@@ -40,7 +40,7 @@ TEST(opset_transform, opset1_logical_not_upgrade_pass)
const auto pass_replacement_node =
f->get_result()->input(0).get_source_output().get_node_shared_ptr();
const auto not_v1 = as_type_ptr<op::v1::LogicalNot>(pass_replacement_node);
EXPECT_TRUE(not_v1);
ASSERT_TRUE(not_v1);
const auto values_out_element_type = not_v1->output(0).get_element_type();
EXPECT_EQ(values_out_element_type, element::boolean);
......@@ -60,7 +60,7 @@ TEST(opset_transform, opset1_logical_not_downgrade_pass)
const auto pass_replacement_node =
f->get_result()->input(0).get_source_output().get_node_shared_ptr();
const auto not_v0 = as_type_ptr<op::v0::Not>(pass_replacement_node);
EXPECT_TRUE(not_v0);
ASSERT_TRUE(not_v0);
const auto values_out_element_type = not_v0->output(0).get_element_type();
EXPECT_EQ(values_out_element_type, element::boolean);
......
......@@ -41,7 +41,7 @@ TEST(opset_transform, opset1_logical_or_upgrade_pass)
const auto pass_replacement_node =
f->get_result()->input(0).get_source_output().get_node_shared_ptr();
const auto or_v1 = as_type_ptr<op::v1::LogicalOr>(pass_replacement_node);
EXPECT_TRUE(or_v1);
ASSERT_TRUE(or_v1);
const auto values_out_element_type = or_v1->output(0).get_element_type();
EXPECT_EQ(values_out_element_type, element::boolean);
......@@ -62,7 +62,7 @@ TEST(opset_transform, opset1_logical_or_downgrade_pass)
const auto pass_replacement_node =
f->get_result()->input(0).get_source_output().get_node_shared_ptr();
const auto or_v0 = as_type_ptr<op::v0::Or>(pass_replacement_node);
EXPECT_TRUE(or_v0);
ASSERT_TRUE(or_v0);
const auto values_out_element_type = or_v0->output(0).get_element_type();
EXPECT_EQ(values_out_element_type, element::boolean);
......
......@@ -41,7 +41,7 @@ TEST(opset_transform, opset1_logical_xor_upgrade_pass)
const auto pass_replacement_node =
f->get_result()->input(0).get_source_output().get_node_shared_ptr();
const auto xor_v1 = as_type_ptr<op::v1::LogicalXor>(pass_replacement_node);
EXPECT_TRUE(xor_v1);
ASSERT_TRUE(xor_v1);
const auto values_out_element_type = xor_v1->output(0).get_element_type();
EXPECT_EQ(values_out_element_type, element::boolean);
......@@ -62,7 +62,7 @@ TEST(opset_transform, opset1_logical_xor_downgrade_pass)
const auto pass_replacement_node =
f->get_result()->input(0).get_source_output().get_node_shared_ptr();
const auto xor_v0 = as_type_ptr<op::v0::Xor>(pass_replacement_node);
EXPECT_TRUE(xor_v0);
ASSERT_TRUE(xor_v0);
const auto values_out_element_type = xor_v0->output(0).get_element_type();
EXPECT_EQ(values_out_element_type, element::boolean);
......
......@@ -27,10 +27,8 @@ TEST(opset_transform, opset1_one_hot_upgrade_pass)
const auto pass_replacement_node =
f->get_result()->input(0).get_source_output().get_node_shared_ptr();
const auto one_hot_v1 = static_pointer_cast<op::v1::OneHot>(pass_replacement_node);
EXPECT_EQ(one_hot_v1->description(), "OneHot");
EXPECT_EQ(one_hot_v1->get_version(), 1);
const auto one_hot_v1 = as_type_ptr<op::v1::OneHot>(pass_replacement_node);
ASSERT_TRUE(one_hot_v1);
EXPECT_EQ(one_hot_v1->get_axis(), one_hot_axis);
auto one_hot_v1_depth =
......@@ -62,11 +60,10 @@ TEST(opset_transform, opset1_one_hot_downgrade_pass)
pass_manager.register_pass<pass::Opset0Downgrade>();
pass_manager.run_passes(f);
const auto pass_replacement_node =
f->get_result()->input(0).get_source_output().get_node_shared_ptr();
const auto one_hot_v0 = static_pointer_cast<op::v0::OneHot>(pass_replacement_node);
const auto pass_replacement_node = f->get_result()->input_value(0).get_node_shared_ptr();
ASSERT_FALSE(is_type<op::v1::OneHot>(pass_replacement_node));
EXPECT_EQ(one_hot_v0->get_shape(), (Shape{1, 3, 2, 4, 3}));
EXPECT_EQ(pass_replacement_node->get_shape(), (Shape{1, 3, 2, 4, 3}));
}
TEST(opset_transform, opset1_one_hot_downgrade_pass_depth_not_constant)
......
......@@ -30,7 +30,7 @@ TEST(opset_transform, opset1_pad_upgrade_pass)
auto pad_s1_result = f->get_results().at(0);
auto node = pad_s1_result->input(0).get_source_output().get_node_shared_ptr();
auto pad_v1_node = as_type_ptr<op::v1::Pad>(node);
EXPECT_TRUE(pad_v1_node);
ASSERT_TRUE(pad_v1_node);
EXPECT_EQ(pad_v1_node->get_pad_mode(), pad_mode);
EXPECT_EQ(pad_v1_node->get_pads_begin(), padding_below);
......@@ -57,7 +57,7 @@ TEST(opset_transform, opset1_pad_downgrade_pass)
auto pad_s0_result = f->get_results().at(0);
auto node = pad_s0_result->input(0).get_source_output().get_node_shared_ptr();
auto pad_v0_node = as_type_ptr<op::v0::Pad>(node);
EXPECT_TRUE(pad_v0_node);
ASSERT_TRUE(pad_v0_node);
EXPECT_EQ(pad_v0_node->get_pad_mode(), pad_mode);
EXPECT_EQ(pad_v0_node->get_padding_below(), CoordinateDiff({1, 2}));
......
......@@ -34,7 +34,7 @@ TEST(opset_transform, opset1_avgpool_upgrade_pass)
auto avgpool_s1_result = f->get_results().at(0);
auto node = avgpool_s1_result->input(0).get_source_output().get_node_shared_ptr();
auto avg_pool_v1_node = as_type_ptr<op::v1::AvgPool>(node);
EXPECT_TRUE(avg_pool_v1_node);
ASSERT_TRUE(avg_pool_v1_node);
EXPECT_EQ(avg_pool_v1_node->get_pads_begin(), pads_begin);
EXPECT_EQ(avg_pool_v1_node->get_pads_end(), pads_end);
......@@ -67,7 +67,7 @@ TEST(opset_transform, opset1_maxpool_upgrade_pass)
auto maxpool_s1_result = f->get_results().at(0);
auto node = maxpool_s1_result->input(0).get_source_output().get_node_shared_ptr();
auto max_pool_v1_node = as_type_ptr<op::v1::MaxPool>(node);
EXPECT_TRUE(max_pool_v1_node);
ASSERT_TRUE(max_pool_v1_node);
EXPECT_EQ(max_pool_v1_node->get_pads_begin(), pads_begin);
EXPECT_EQ(max_pool_v1_node->get_pads_end(), pads_end);
......@@ -106,7 +106,7 @@ TEST(opset_transform, opset1_avgpool_downgrade_pass)
auto avgpool_s0_result = f->get_results().at(0);
auto node = avgpool_s0_result->input(0).get_source_output().get_node_shared_ptr();
auto avg_pool_v0_node = as_type_ptr<op::v0::AvgPool>(node);
EXPECT_TRUE(avg_pool_v0_node);
ASSERT_TRUE(avg_pool_v0_node);
EXPECT_EQ(avg_pool_v0_node->get_padding_below(), padding_below);
EXPECT_EQ(avg_pool_v0_node->get_padding_above(), padding_above);
......@@ -144,7 +144,7 @@ TEST(opset_transform, opset1_maxpool_downgrade_pass)
auto maxpool_s0_result = f->get_results().at(0);
auto node = maxpool_s0_result->input(0).get_source_output().get_node_shared_ptr();
auto max_pool_v0_node = as_type_ptr<op::v0::MaxPool>(node);
EXPECT_TRUE(max_pool_v0_node);
ASSERT_TRUE(max_pool_v0_node);
EXPECT_EQ(max_pool_v0_node->get_padding_below(), padding_below);
EXPECT_EQ(max_pool_v0_node->get_padding_above(), padding_above);
......@@ -182,7 +182,7 @@ TEST(opset_transform, opset1_avgpool_backprop_downgrade_pass)
auto avgpool_backprop_s0_result = f->get_results().at(0);
auto node = avgpool_backprop_s0_result->input(0).get_source_output().get_node_shared_ptr();
auto avg_pool_backprop_v0_node = as_type_ptr<op::v0::AvgPoolBackprop>(node);
EXPECT_TRUE(avg_pool_backprop_v0_node);
ASSERT_TRUE(avg_pool_backprop_v0_node);
EXPECT_EQ(avg_pool_backprop_v0_node->get_padding_below(), padding_below);
EXPECT_EQ(avg_pool_backprop_v0_node->get_padding_above(), padding_above);
......@@ -220,7 +220,7 @@ TEST(opset_transform, opset1_maxpool_backprop_downgrade_pass)
auto max_pool_backprop_s0_result = f->get_results().at(0);
auto node = max_pool_backprop_s0_result->input(0).get_source_output().get_node_shared_ptr();
auto max_pool_backprop_v0_node = as_type_ptr<op::v0::MaxPoolBackprop>(node);
EXPECT_TRUE(max_pool_backprop_v0_node);
ASSERT_TRUE(max_pool_backprop_v0_node);
EXPECT_EQ(max_pool_backprop_v0_node->get_padding_below(), padding_below);
EXPECT_EQ(max_pool_backprop_v0_node->get_padding_above(), padding_above);
EXPECT_EQ(max_pool_backprop_v0_node->get_window_movement_strides(), window_movement_strides);
......
......@@ -42,7 +42,7 @@ TEST(opset_transform, opset1_product_upgrade_pass)
const auto pass_replacement_node =
f->get_result()->input(0).get_source_output().get_node_shared_ptr();
const auto reduce_prod_v1 = as_type_ptr<op::v1::ReduceProd>(pass_replacement_node);
EXPECT_TRUE(reduce_prod_v1);
ASSERT_TRUE(reduce_prod_v1);
EXPECT_EQ(reduce_prod_v1->get_keep_dims(), false);
}
......@@ -62,11 +62,11 @@ TEST(opset_transform, opset0_reduce_prod_downgrade_pass)
const auto reshape_replacement_node =
f->get_result()->input(0).get_source_output().get_node_shared_ptr();
const auto reshape = as_type_ptr<op::Reshape>(reshape_replacement_node);
EXPECT_TRUE(reshape);
ASSERT_TRUE(reshape);
const auto product_replace_node =
reshape_replacement_node->input(0).get_source_output().get_node_shared_ptr();
const auto product_v0 = as_type_ptr<op::v0::Product>(product_replace_node);
EXPECT_TRUE(product_v0);
ASSERT_TRUE(product_v0);
}
TEST(opset_transform, opset0_reduce_prod_downgrade_pass_axes_not_constant)
......
......@@ -42,7 +42,7 @@ TEST(opset_transform, opset1_reverse_upgrade_pass)
const auto pass_replacement_node =
f->get_result()->input(0).get_source_output().get_node_shared_ptr();
const auto reverse_v1 = as_type_ptr<op::v1::Reverse>(pass_replacement_node);
EXPECT_TRUE(reverse_v1);
ASSERT_TRUE(reverse_v1);
EXPECT_EQ(reverse_v1->get_mode(), op::v1::Reverse::Mode::INDEX);
const auto& rev_axes_input_shape = reverse_v1->get_input_shape(1);
......@@ -68,7 +68,7 @@ TEST(opset_transform, opset0_reverse_downgrade_pass_index_mode)
const auto pass_replacement_node =
f->get_result()->input(0).get_source_output().get_node_shared_ptr();
const auto reverse_v0 = as_type_ptr<op::v0::Reverse>(pass_replacement_node);
EXPECT_TRUE(reverse_v0);
ASSERT_TRUE(reverse_v0);
EXPECT_EQ(reverse_v0->get_reversed_axes(), AxisSet({1, 2}));
}
......@@ -90,7 +90,7 @@ TEST(opset_transform, opset0_reverse_downgrade_pass_mask_mode)
const auto pass_replacement_node =
f->get_result()->input(0).get_source_output().get_node_shared_ptr();
const auto reverse_v0 = as_type_ptr<op::v0::Reverse>(pass_replacement_node);
EXPECT_TRUE(reverse_v0);
ASSERT_TRUE(reverse_v0);
EXPECT_EQ(reverse_v0->get_reversed_axes(), AxisSet({0, 2}));
}
......
......@@ -45,16 +45,16 @@ TEST(opset_transform, opset1_dyn_slice_upgrade_pass)
const auto pass_replacement_node =
f->get_result()->input(0).get_source_output().get_node_shared_ptr();
const auto strided_slice_v1 = as_type_ptr<op::v1::StridedSlice>(pass_replacement_node);
EXPECT_TRUE(strided_slice_v1);
ASSERT_TRUE(strided_slice_v1);
auto begin_const =
as_type_ptr<op::Constant>(strided_slice_v1->input_value(1).get_node_shared_ptr());
EXPECT_TRUE(begin_const);
ASSERT_TRUE(begin_const);
auto end_const =
as_type_ptr<op::Constant>(strided_slice_v1->input_value(2).get_node_shared_ptr());
EXPECT_TRUE(end_const);
ASSERT_TRUE(end_const);
auto strides_const =
as_type_ptr<op::Constant>(strided_slice_v1->input_value(3).get_node_shared_ptr());
EXPECT_TRUE(strides_const);
ASSERT_TRUE(strides_const);
EXPECT_EQ(strided_slice_v1->get_begin_mask(), vector<int64_t>(4, 0));
EXPECT_EQ(strided_slice_v1->get_end_mask(), vector<int64_t>(4, 0));
......@@ -85,7 +85,7 @@ TEST(opset_transform, opset1_strided_slice_downgrade_pass)
const auto pass_replacement_node =
f->get_result()->input(0).get_source_output().get_node_shared_ptr();
const auto slice_v0 = as_type_ptr<op::v0::Slice>(pass_replacement_node);
EXPECT_TRUE(slice_v0);
ASSERT_TRUE(slice_v0);
EXPECT_EQ(slice_v0->get_lower_bounds(), Coordinate({1, 2, 0, 2}));
EXPECT_EQ(slice_v0->get_upper_bounds(), Coordinate({5, 4, 5, 6}));
EXPECT_EQ(slice_v0->get_strides(), Strides({1, 1, 1, 1}));
......
......@@ -41,7 +41,7 @@ TEST(opset_transform, opset1_softmax_upgrade_pass_axis)
auto softmax_s1_result = f->get_results().at(0);
auto node = softmax_s1_result->input(0).get_source_output().get_node_shared_ptr();
auto softmax_s1_node = as_type_ptr<op::v1::Softmax>(node);
EXPECT_TRUE(softmax_s1_node);
ASSERT_TRUE(softmax_s1_node);
EXPECT_EQ(softmax_s1_node->get_axis(), axis);
}
......
......@@ -42,7 +42,7 @@ TEST(opset_transform, opset1_reduce_sum_upgrade_pass)
const auto pass_replacement_node =
f->get_result()->input(0).get_source_output().get_node_shared_ptr();
const auto reduce_sum_v1 = as_type_ptr<op::v1::ReduceSum>(pass_replacement_node);
EXPECT_TRUE(reduce_sum_v1);
ASSERT_TRUE(reduce_sum_v1);
EXPECT_EQ(reduce_sum_v1->get_keep_dims(), false);
}
......@@ -62,11 +62,11 @@ TEST(opset_transform, opset0_reduce_sum_downgrade_pass)
const auto reshape_replacement_node =
f->get_result()->input(0).get_source_output().get_node_shared_ptr();
const auto reshape = as_type_ptr<op::Reshape>(reshape_replacement_node);
EXPECT_TRUE(reshape);
ASSERT_TRUE(reshape);
const auto sum_replace_node =
reshape_replacement_node->input(0).get_source_output().get_node_shared_ptr();
const auto sum_v0 = as_type_ptr<op::v0::Sum>(sum_replace_node);
EXPECT_TRUE(sum_v0);
ASSERT_TRUE(sum_v0);
}
TEST(opset_transform, opset0_reduce_sum_downgrade_pass_not_constant_axes)
......
......@@ -42,7 +42,7 @@ TEST(opset_transform, opset1_topk_upgrade_pass)
const auto pass_replacement_node =
f->get_result()->input(0).get_source_output().get_node_shared_ptr();
const auto topk_v1 = as_type_ptr<op::v1::TopK>(pass_replacement_node);
EXPECT_TRUE(topk_v1);
ASSERT_TRUE(topk_v1);
EXPECT_EQ(topk_v1->get_axis(), axis);
EXPECT_EQ(topk_v1->get_mode(), op::v1::TopK::Mode::MAX);
EXPECT_EQ(topk_v1->get_sort_type(), op::v1::TopK::SortType::SORT_VALUES);
......@@ -72,7 +72,7 @@ TEST(opset_transform, opset1_topk_downgrade_pass)
const auto pass_replacement_node =
f->get_result()->input(0).get_source_output().get_node_shared_ptr();
const auto topk_v0 = as_type_ptr<op::v0::TopK>(pass_replacement_node);
EXPECT_TRUE(topk_v0);
ASSERT_TRUE(topk_v0);
EXPECT_EQ(topk_v0->get_k(), k);
EXPECT_EQ(topk_v0->get_top_k_axis(), axis);
EXPECT_EQ(topk_v0->get_compute_max(), true);
......
......@@ -144,8 +144,8 @@ public:
size_t const_node_index =
m.get_match_root()->get_arguments().at(0) == pattern_map[pattern];
auto const_node = static_pointer_cast<op::Constant>(
m.get_match_root()->get_arguments().at(const_node_index));
auto const_node =
as_type_ptr<op::Constant>(m.get_match_root()->get_arguments().at(const_node_index));
auto second_node = m.get_match_root()->get_arguments().at(const_node_index);
NGRAPH_DEBUG << "second_node = " << second_node->get_name()
<< " , pattern = " << pattern_map[pattern]->get_name();
......
......@@ -288,25 +288,25 @@ TEST(serialize, constant_infinity_nan)
{
if (node->get_friendly_name() == "A")
{
a = static_pointer_cast<op::Constant>(node);
a = as_type_ptr<op::Constant>(node);
}
else if (node->get_friendly_name() == "B")
{
b = static_pointer_cast<op::Constant>(node);
b = as_type_ptr<op::Constant>(node);
}
else if (node->get_friendly_name() == "C")
{
c = static_pointer_cast<op::Constant>(node);
c = as_type_ptr<op::Constant>(node);
}
else if (node->get_friendly_name() == "D")
{
d = static_pointer_cast<op::Constant>(node);
d = as_type_ptr<op::Constant>(node);
}
}
ASSERT_NE(a, nullptr);
ASSERT_NE(b, nullptr);
ASSERT_NE(c, nullptr);
ASSERT_NE(d, nullptr);
ASSERT_TRUE(a);
ASSERT_TRUE(b);
ASSERT_TRUE(c);
ASSERT_TRUE(d);
EXPECT_TRUE(test::all_close_f(a->get_vector<float>(), a_data));
EXPECT_TRUE(test::all_close_f(b->get_vector<float>(), b_data));
EXPECT_TRUE(test::all_close_f(c->get_vector<float>(), c_data));
......@@ -335,10 +335,10 @@ TEST(serialize, non_zero_node_output)
string s = serialize(f);
shared_ptr<Function> g = deserialize(s);
auto g_result = g->get_results().at(0);
auto g_abs = g_result->input(0).get_source_output().get_node_shared_ptr();
auto topk_out = g_abs->input(0).get_source_output();
auto g_abs = g_result->input_value(0).get_node_shared_ptr();
auto topk_out = g_abs->input_value(0);
EXPECT_EQ(topk_out.get_index(), 1);
EXPECT_EQ(topk_out.get_node()->description(), "TopK");
ASSERT_TRUE(is_type<op::TopK>(topk_out.get_node()));
}
TEST(serialize, opset1_softmax)
......@@ -352,9 +352,7 @@ TEST(serialize, opset1_softmax)
shared_ptr<Function> g = deserialize(s);
const auto g_result = g->get_results().at(0);
const auto g_softmax = g_result->input(0).get_source_output().get_node_shared_ptr();
EXPECT_EQ(g_softmax->description(), "Softmax");
EXPECT_EQ(g_softmax->get_version(), 1);
EXPECT_TRUE(is_type<op::v1::Softmax>(g_softmax));
}
TEST(serialize, opset1_gather)
......@@ -371,9 +369,7 @@ TEST(serialize, opset1_gather)
shared_ptr<Function> g = deserialize(s);
auto g_result = g->get_results().at(0);
auto g_gather = g_result->input(0).get_source_output().get_node_shared_ptr();
EXPECT_EQ(g_gather->description(), "Gather");
EXPECT_EQ(g_gather->get_version(), 1);
EXPECT_TRUE(is_type<op::v1::Gather>(g_gather));
}
TEST(serialize, opset1_product)
......@@ -389,12 +385,10 @@ TEST(serialize, opset1_product)
shared_ptr<Function> g = deserialize(s);
auto g_result = g->get_results().at(0);
auto g_red_prod = g_result->input(0).get_source_output().get_node_shared_ptr();
EXPECT_EQ(g_red_prod->description(), "Product");
EXPECT_EQ(g_red_prod->get_version(), 1);
EXPECT_EQ(dynamic_cast<const op::v1::ReduceProd*>(g_red_prod.get())->get_keep_dims(), 1);
EXPECT_EQ(dynamic_cast<const op::v1::ReduceProd*>(g_red_prod.get())->get_reduction_axes(),
AxisSet({1, 2}));
auto node = as_type_ptr<op::v1::ReduceProd>(g_red_prod);
EXPECT_TRUE(node);
EXPECT_EQ(node->get_keep_dims(), 1);
EXPECT_EQ(node->get_reduction_axes(), AxisSet({1, 2}));
}
TEST(serialize, opset1_sum)
......@@ -410,12 +404,10 @@ TEST(serialize, opset1_sum)
shared_ptr<Function> g = deserialize(s);
auto g_result = g->get_results().at(0);
auto g_red_sum = g_result->input(0).get_source_output().get_node_shared_ptr();
EXPECT_EQ(g_red_sum->description(), "Sum");
EXPECT_EQ(g_red_sum->get_version(), 1);
EXPECT_EQ(dynamic_cast<const op::v1::ReduceSum*>(g_red_sum.get())->get_keep_dims(), 1);
EXPECT_EQ(dynamic_cast<const op::v1::ReduceSum*>(g_red_sum.get())->get_reduction_axes(),
AxisSet({1, 2}));
auto node = as_type_ptr<op::v1::ReduceSum>(g_red_sum);
EXPECT_TRUE(node);
EXPECT_EQ(node->get_keep_dims(), 1);
EXPECT_EQ(node->get_reduction_axes(), AxisSet({1, 2}));
}
TEST(serialize, opset1_pad)
......@@ -434,11 +426,9 @@ TEST(serialize, opset1_pad)
shared_ptr<Function> g = deserialize(s);
auto g_result = g->get_results().at(0);
auto g_pad = g_result->input(0).get_source_output().get_node_shared_ptr();
EXPECT_EQ(g_pad->description(), "Pad");
EXPECT_EQ(g_pad->get_version(), 1);
EXPECT_EQ(dynamic_cast<const op::v1::Pad*>(g_pad.get())->get_pad_mode(), pad_mode);
auto g_pad = as_type_ptr<op::v1::Pad>(g_result->input_value(0).get_node_shared_ptr());
ASSERT_TRUE(g_pad);
EXPECT_EQ(g_pad->get_pad_mode(), pad_mode);
}
TEST(serialize, tensor_iterator_raw)
......@@ -732,8 +722,7 @@ TEST(serialize, opset1_strided_slice)
auto g_strided_slice_v1 = g_result->input(0).get_source_output().get_node_shared_ptr();
auto strided_slice_out = as_type_ptr<op::v1::StridedSlice>(g_strided_slice_v1);
EXPECT_EQ(strided_slice_out->description(), "Slice");
EXPECT_EQ(strided_slice_out->get_version(), 1);
ASSERT_TRUE(strided_slice_out);
EXPECT_EQ(strided_slice_out->get_begin_mask(), begin_mask);
EXPECT_EQ(strided_slice_out->get_end_mask(), end_mask);
EXPECT_EQ(strided_slice_out->get_new_axis_mask(), new_axis_mask);
......@@ -764,9 +753,7 @@ TEST(serialize, opset1_binary_convolution)
auto g_result = g->get_results().at(0);
auto g_binary_conv = g_result->input(0).get_source_output().get_node_shared_ptr();
auto binary_conv_out = as_type_ptr<op::v1::BinaryConvolution>(g_binary_conv);
EXPECT_EQ(binary_conv_out->description(), "BinaryConvolution");
EXPECT_EQ(binary_conv_out->get_version(), 1);
ASSERT_TRUE(binary_conv_out);
EXPECT_EQ(binary_conv_out->get_strides(), strides);
EXPECT_EQ(binary_conv_out->get_pads_begin(), pads_begin);
......@@ -793,9 +780,7 @@ TEST(serialize, depth_to_space)
auto g_result = g->get_results().at(0);
auto g_depth_to_space = g_result->input(0).get_source_output().get_node_shared_ptr();
auto depth_to_space_out = as_type_ptr<op::DepthToSpace>(g_depth_to_space);
EXPECT_EQ(depth_to_space_out->description(), "DepthToSpace");
EXPECT_EQ(depth_to_space_out->get_version(), 0);
ASSERT_TRUE(depth_to_space_out);
EXPECT_EQ(depth_to_space_out->get_block_size(), block_size);
EXPECT_EQ(depth_to_space_out->get_mode(), mode);
}
......@@ -815,9 +800,7 @@ TEST(serialize, space_to_depth)
auto g_result = g->get_results().at(0);
auto g_space_to_depth = g_result->input(0).get_source_output().get_node_shared_ptr();
auto depth_to_space_out = as_type_ptr<op::SpaceToDepth>(g_space_to_depth);
EXPECT_EQ(depth_to_space_out->description(), "SpaceToDepth");
EXPECT_EQ(depth_to_space_out->get_version(), 0);
ASSERT_TRUE(depth_to_space_out);
EXPECT_EQ(depth_to_space_out->get_block_size(), block_size);
EXPECT_EQ(depth_to_space_out->get_mode(), mode);
}
......@@ -24,8 +24,7 @@ class TestMatcher : public ngraph::pattern::Matcher
{
if (ngraph::as_type_ptr<::ngraph::op::Parameter>(pattern_node))
{
bool result =
pattern_node.get() == dynamic_cast<::ngraph::op::Parameter*>(graph_node.get());
bool result = pattern_node == ngraph::as_type_ptr<::ngraph::op::Parameter>(graph_node);
if (result)
{
m_matched_list.push_back(graph_node);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment