Commit 256a8b6d authored by Adam Procter's avatar Adam Procter Committed by Scott Cyphers

Partial Shapes and Types, Part 4k: AvgPool/MaxPool and backprops (#1871)

* Add merge_rank function

* Update infer_windowed_reduction_output_shape to use PartialShape

* Minor simplification

* Some unit tests and (whaddaya know) fixes for infer_windowed_reduction_output_shape

* Update infer_batched_pooling_forward to use PartialShape

* Update pooling fprop ops for partial shapes

* Update pooling bprop ops for partial shapes

* Add test-failing reminders to implement unit tests for partial shape/type prop for pooling ops

* Add unit tests for partial shape propagation for poolign ops

* Nuke C-style casts for Dimensions/Ranks in validation_util.cpp
parent 7f6f07ee
......@@ -52,6 +52,11 @@ Dimension Dimension::operator+(const Dimension& dim) const
return (is_static() && dim.is_static() ? m_dimension + size_t(dim) : Dimension::dynamic());
}
Dimension Dimension::operator-(const Dimension& dim) const
{
return (is_static() && dim.is_static() ? m_dimension - size_t(dim) : Dimension::dynamic());
}
Dimension Dimension::operator*(const Dimension& dim) const
{
return ((is_static() && dim.is_static())
......
......@@ -56,6 +56,16 @@ namespace ngraph
}
return m_dimension;
}
/// \brief Convert this dimension to `ptrdiff_t`. This dimension must be static.
/// \throws std::invalid_argument If this dimension is dynamic.
explicit operator ptrdiff_t() const
{
if (is_dynamic())
{
throw std::invalid_argument("Cannot convert dynamic dimension to ptrdiff_t");
}
return static_cast<ptrdiff_t>(m_dimension);
}
/// \brief Check whether this dimension represents the same scheme as the argument (both
/// dynamic, or equal).
......@@ -122,6 +132,12 @@ namespace ngraph
/// dimension with value `size_t(*this)+size_t(dim)`.
Dimension operator+(const Dimension& dim) const;
/// \brief Subtraction operator for Dimension.
/// \param dim Right operand for subtraction.
/// \return Dimension::dynamic() if either of `*this` or `dim` is dynamic; else, a static
/// dimension with value `size_t(*this)-size_t(dim)`.
Dimension operator-(const Dimension& dim) const;
/// \brief Multiplication operator for Dimension.
/// \param dim Right operand for multiplicaiton.
/// \return 0 if either of `*this` or `dim` is static and 0; else, Dimension::dynamic() if
......
......@@ -40,31 +40,22 @@ op::AvgPool::AvgPool(const shared_ptr<Node>& arg,
void op::AvgPool::validate_and_infer_types()
{
if (validate_punt_if_dynamic())
if (0 == m_window_movement_strides.size())
{
return;
m_window_movement_strides = Strides(m_window_shape.size(), 1);
}
auto& arg_shape = get_input_shape(0);
NODE_VALIDATION_ASSERT(this, arg_shape.size() >= 3)
<< "Data input shape does not have rank of at least 3 (data input shape: " << arg_shape
<< ").";
if (0 == m_window_movement_strides.size() && arg_shape.size() > 2)
if (0 == m_padding_below.size())
{
m_window_movement_strides = Strides(arg_shape.size() - 2, 1);
m_padding_below = Shape(m_window_shape.size(), 0);
}
if (0 == m_padding_below.size() && arg_shape.size() > 2)
if (0 == m_padding_above.size())
{
m_padding_below = Shape(arg_shape.size() - 2, 0);
m_padding_above = Shape(m_window_shape.size(), 0);
}
if (0 == m_padding_above.size() && arg_shape.size() > 2)
{
m_padding_above = Shape(arg_shape.size() - 2, 0);
}
const PartialShape& arg_shape = get_input_partial_shape(0);
// infer_batched_forward_pooling wants CoordinateDiffs for these, while the pooling ops for
// now still take Shape (no negative padding).
......@@ -125,19 +116,12 @@ op::AvgPoolBackprop::AvgPoolBackprop(const Shape& forward_arg_shape,
void op::AvgPoolBackprop::validate_and_infer_types()
{
if (validate_punt_if_dynamic())
{
return;
}
auto& delta_shape = get_input_shape(0);
// infer_batched_forward_pooling wants CoordinateDiffs for these, while the pooling ops for
// now still take Shape (no negative padding).
CoordinateDiff padding_below(m_padding_below.begin(), m_padding_below.end());
CoordinateDiff padding_above(m_padding_above.begin(), m_padding_above.end());
Shape forward_result_shape =
PartialShape forward_result_shape =
infer_batched_pooling_forward(this,
m_forward_arg_shape,
padding_below,
......@@ -146,10 +130,15 @@ void op::AvgPoolBackprop::validate_and_infer_types()
m_window_movement_strides,
m_include_padding_in_avg_computation);
NODE_VALIDATION_ASSERT(this, forward_result_shape == delta_shape)
const PartialShape& delta_shape = get_input_shape(0);
NODE_VALIDATION_ASSERT(this, forward_result_shape.compatible(delta_shape))
<< "Inferred forward output shape does not match delta shape (inferred forward output "
<< "shape: " << forward_result_shape << ", delta shape: " << delta_shape << ").";
// TODO(amprocte): Once m_forward_arg_shape is allowed to be dynamic, we may technically be
// able to infer some extra information from forward_result_shape that was not present in the
// forward arg shape---namely batch size and channel count. Merge that info in.
set_output_type(0, get_input_element_type(0), m_forward_arg_shape);
}
......
......@@ -42,31 +42,22 @@ op::MaxPool::MaxPool(const shared_ptr<Node>& arg,
void op::MaxPool::validate_and_infer_types()
{
if (validate_punt_if_dynamic())
if (0 == m_window_movement_strides.size())
{
return;
m_window_movement_strides = Strides(m_window_shape.size(), 1);
}
auto& arg_shape = get_input_shape(0);
NODE_VALIDATION_ASSERT(this, arg_shape.size() >= 3)
<< "Data input shape does not have rank of at least 3 (data input shape: " << arg_shape
<< ").";
if (0 == m_window_movement_strides.size() && arg_shape.size() > 2)
if (0 == m_padding_below.size())
{
m_window_movement_strides = Strides(arg_shape.size() - 2, 1);
m_padding_below = Shape(m_window_shape.size(), 0);
}
if (0 == m_padding_below.size() && arg_shape.size() > 2)
if (0 == m_padding_above.size())
{
m_padding_below = Shape(arg_shape.size() - 2, 0);
m_padding_above = Shape(m_window_shape.size(), 0);
}
if (0 == m_padding_above.size() && arg_shape.size() > 2)
{
m_padding_above = Shape(arg_shape.size() - 2, 0);
}
const PartialShape& arg_shape = get_input_partial_shape(0);
// infer_batched_forward_pooling wants CoordinateDiffs for these, while the pooling ops for
// now still take Shape (no negative padding).
......@@ -125,17 +116,12 @@ op::MaxPoolBackprop::MaxPoolBackprop(const shared_ptr<Node>& arg_forward,
void op::MaxPoolBackprop::validate_and_infer_types()
{
if (validate_punt_if_dynamic())
{
return;
}
element::Type forward_arg_et = get_input_element_type(0);
element::Type delta_et = get_input_element_type(1);
auto forward_arg_et = get_input_element_type(0);
auto& forward_arg_shape = get_input_shape(0);
auto delta_et = get_input_element_type(1);
auto& delta_shape = get_input_shape(1);
element::Type result_et;
NODE_VALIDATION_ASSERT(this, forward_arg_et == delta_et)
NODE_VALIDATION_ASSERT(this, element::Type::merge(result_et, forward_arg_et, delta_et))
<< "Element types for forward argument (" << forward_arg_et << ") and delta (" << delta_et
<< ") do not match.";
......@@ -144,7 +130,9 @@ void op::MaxPoolBackprop::validate_and_infer_types()
CoordinateDiff padding_below(m_padding_below.begin(), m_padding_below.end());
CoordinateDiff padding_above(m_padding_above.begin(), m_padding_above.end());
Shape forward_result_shape = infer_batched_pooling_forward(this,
const PartialShape& forward_arg_shape = get_input_partial_shape(0);
PartialShape forward_result_shape = infer_batched_pooling_forward(this,
forward_arg_shape,
padding_below,
padding_above,
......@@ -152,10 +140,15 @@ void op::MaxPoolBackprop::validate_and_infer_types()
m_window_movement_strides,
true);
NODE_VALIDATION_ASSERT(this, forward_result_shape == delta_shape)
const PartialShape& delta_shape = get_input_partial_shape(1);
NODE_VALIDATION_ASSERT(this, forward_result_shape.compatible(delta_shape))
<< "Inferred forward output shape does not match delta shape (inferred forward output "
<< "shape: " << forward_result_shape << ", delta shape: " << delta_shape << ").";
// TODO(amprocte): We may technically be able to infer some extra information from
// forward_result_shape that was not present in the forward arg shape---namely batch size and
// channel count. Merge that info in.
set_output_type(0, get_input_element_type(0), forward_arg_shape);
}
......
......@@ -79,6 +79,12 @@ std::ostream& ngraph::operator<<(std::ostream& str, const PartialShape& shape)
}
}
PartialShape PartialShape::dynamic(Rank r)
{
return PartialShape(
r.is_static(), std::vector<Dimension>(r.is_static() ? size_t(r) : 0, Dimension::dynamic()));
}
bool PartialShape::compatible(const PartialShape& s) const
{
// If we don't know *this's rank, or we don't know s's rank, they are compatible.
......@@ -182,6 +188,24 @@ bool PartialShape::refines(const PartialShape& s) const
}
}
bool PartialShape::merge_rank(Rank r)
{
if (r.is_dynamic())
{
return true;
}
else if (!m_rank_is_static)
{
m_rank_is_static = true;
m_dimensions = std::vector<Dimension>(size_t(r), Dimension::dynamic());
return true;
}
else
{
return (m_dimensions.size() == size_t(r));
}
}
Shape PartialShape::to_shape() const
{
if (is_dynamic())
......
......@@ -88,9 +88,9 @@ namespace ngraph
/// \return The rank of the shape. This will be Rank::dynamic() if the rank of
/// the shape is dynamic.
Rank rank() const { return m_rank_is_static ? Rank(m_dimensions.size()) : Rank::dynamic(); }
/// \brief Construct a PartialShape with dynamic rank.
/// \return A PartialShape with dynamic rank.
static PartialShape dynamic() { return PartialShape(false, {}); }
/// \brief Construct a PartialShape with the given rank and all dimensions (if any) dynamic.
/// \return A PartialShape with the given rank, and all dimensions (if any) dynamic.
static PartialShape dynamic(Rank r = Rank::dynamic());
/// \brief Check whether this shape is compatible with the argument, i.e., whether it is
/// possible to merge them.
/// \param s The shape to be checked for compatibility with this shape.
......@@ -152,6 +152,12 @@ namespace ngraph
/// either `s2[i]` is dynamic, or `s1[i]` == `s2[i]`.
bool refines(const PartialShape& s) const;
/// \brief Checks that this shape's rank is compatible with `r`, and, if this shape's
/// rank is dynamic and `r` is static, updates this shape to have a rank of `r`
/// with dimensions all dynamic.
/// \return `true` if this shape's rank is compatible with `r`, else `false`.
bool merge_rank(Rank r);
/// \brief Convert a static PartialShape to a Shape.
/// \return A new Shape `s` where `s[i] = size_t((*this)[i])`.
/// \throws std::invalid_argument If this PartialShape is dynamic.
......@@ -199,11 +205,10 @@ namespace ngraph
static bool merge_into(PartialShape& dst, const PartialShape& src);
private:
// Private constructor so PartialShape::dynamic() can construct a shape with
// m_rank_is_static set to false.
PartialShape(bool rank_is_static, std::initializer_list<Dimension> init)
// Private constructor for PartialShape::dynamic().
PartialShape(bool rank_is_static, std::vector<Dimension> dimensions)
: m_rank_is_static(rank_is_static)
, m_dimensions(init)
, m_dimensions(dimensions)
{
}
......
......@@ -24,81 +24,99 @@ using namespace ngraph;
// Infers the output shape of a windowed reduction operation, where the data may be dilated and/or
// padded, and the reduction window may be strided and/or dilated.
//
Shape ngraph::infer_windowed_reduction_output_shape(const Node* node,
const Shape& data_shape,
// TODO(amprocte): The messages here would be a bit friendlier if we didn't say "after
// padding/after dilation" for cases where there is actually no padding/dilation.
//
PartialShape ngraph::infer_windowed_reduction_output_shape(const Node* node,
const PartialShape& data_shape,
const Strides& data_dilation,
const CoordinateDiff& data_padding_below,
const CoordinateDiff& data_padding_above,
const Shape& window_shape,
const PartialShape& window_shape,
const Strides& window_strides,
const Strides& window_dilation,
bool is_window_all_in_padding_allowed)
{
NODE_VALIDATION_ASSERT(node, data_shape.size() == data_dilation.size())
<< "Data shape (" << data_shape << ") does not have same rank as "
<< "the data dilation (" << data_dilation << ").";
NODE_VALIDATION_ASSERT(node, data_shape.size() == data_padding_below.size())
<< "Data shape (" << data_shape << ") does not have same rank as "
<< "the data padding below (" << data_padding_below << ").";
NODE_VALIDATION_ASSERT(node, data_shape.size() == data_padding_above.size())
<< "Data shape (" << data_shape << ") does not have same rank as "
<< "the data padding above (" << data_padding_above << ").";
NODE_VALIDATION_ASSERT(node, data_shape.size() == window_shape.size())
<< "Data shape (" << data_shape << ") does not have same rank as "
<< "the window shape (" << window_shape << ").";
NODE_VALIDATION_ASSERT(node, data_shape.size() == window_strides.size())
<< "Data shape (" << data_shape << ") does not have same rank as "
<< "the window strides (" << window_strides << ").";
NODE_VALIDATION_ASSERT(node, data_shape.size() == window_dilation.size())
<< "Data shape (" << data_shape << ") does not have same rank as "
<< "the window dilation (" << window_dilation << ").";
PartialShape data_shape_merged{PartialShape::dynamic()};
Shape output_shape(data_shape.size());
for (size_t i = 0; i < data_shape.size(); i++)
NODE_VALIDATION_ASSERT(node,
data_shape_merged.merge_rank(data_shape.rank()) &&
data_shape_merged.merge_rank(data_dilation.size()) &&
data_shape_merged.merge_rank(data_padding_below.size()) &&
data_shape_merged.merge_rank(data_padding_above.size()) &&
data_shape_merged.merge_rank(window_shape.rank()) &&
data_shape_merged.merge_rank(window_strides.size()) &&
data_shape_merged.merge_rank(window_dilation.size()))
<< "Ranks for data shape (" << data_shape << "), data dilation (" << data_dilation
<< "), padding below (" << data_padding_below << "), padding above (" << data_padding_above
<< "), window shape (" << window_shape << "), window strides (" << window_strides
<< "), and window dilation (" << window_dilation << ") do not match.";
PartialShape output_shape = PartialShape::dynamic(data_shape_merged.rank());
if (output_shape.rank().is_static())
{
for (size_t i = 0; i < static_cast<size_t>(output_shape.rank()); i++)
{
NODE_VALIDATION_ASSERT(node, data_dilation[i] > 0)
<< "Data dilation (" << data_dilation << ") has zero dimension at axis " << i << ".";
<< "Data dilation (" << data_dilation << ") has zero dimension at axis " << i
<< ".";
NODE_VALIDATION_ASSERT(node, window_strides[i] > 0)
<< "Window strides (" << window_strides << ") has zero dimension at axis " << i << ".";
<< "Window strides (" << window_strides << ") has zero dimension at axis " << i
<< ".";
NODE_VALIDATION_ASSERT(node, window_dilation[i] > 0)
<< "Window dilation (" << window_dilation << ") has zero dimension at axis " << i
<< ".";
ptrdiff_t data_padded_dilated_dim =
(ptrdiff_t(data_dilation[i]) * (ptrdiff_t(data_shape[i]) - 1)) + 1 +
data_padding_below[i] + data_padding_above[i];
ptrdiff_t window_dilated_dim =
ptrdiff_t(window_dilation[i]) * (ptrdiff_t(window_shape[i]) - 1) + 1;
bool data_dim_static = data_shape.rank().is_static() && data_shape[i].is_static();
bool window_dim_static = window_shape.rank().is_static() && window_shape[i].is_static();
ptrdiff_t data_padded_dilated_dim = -1;
if (data_dim_static)
{
data_padded_dilated_dim = (static_cast<ptrdiff_t>(data_dilation[i]) *
(static_cast<ptrdiff_t>(data_shape[i]) - 1)) +
1 + data_padding_below[i] + data_padding_above[i];
NODE_VALIDATION_ASSERT(node, data_padded_dilated_dim > 0)
<< "Data shape after padding and dilation has dimension less than 1 (dim: "
<< data_padded_dilated_dim << ") at axis " << i << ".";
}
ptrdiff_t window_dilated_dim = -1;
if (window_dim_static)
{
window_dilated_dim = static_cast<ptrdiff_t>(window_dilation[i]) *
(static_cast<ptrdiff_t>(window_shape[i]) - 1) +
1;
NODE_VALIDATION_ASSERT(node, window_dilated_dim > 0)
<< "Window after dilation has dimension less than 1 (dim: " << window_dilated_dim
<< ") at axis " << i << ".";
NODE_VALIDATION_ASSERT(node, window_dilated_dim <= data_padded_dilated_dim)
<< "Window after dilation has dimension (dim: " << window_dilated_dim
<< ") larger than the data shape after padding (dim: " << data_padded_dilated_dim
<< ") at axis " << i << ".";
<< "Window after dilation has dimension less than 1 (dim: "
<< window_dilated_dim << ") at axis " << i << ".";
NODE_VALIDATION_ASSERT(node,
is_window_all_in_padding_allowed ||
(window_dilated_dim >= data_padding_below[i] &&
window_dilated_dim >= data_padding_above[i]))
<< "Window after dilation is sometimes entirely in the padding area for axis " << i
<< "(dilated window dimension: " << window_dilated_dim
(window_dilated_dim > data_padding_below[i] &&
window_dilated_dim > data_padding_above[i]))
<< "Window after dilation is sometimes entirely in the padding area for axis "
<< i << " (dilated window dimension: " << window_dilated_dim
<< ", padding below dimension: " << data_padding_below[i]
<< ", padding above dimension: " << data_padding_above[i] << ") and this is not "
<< ", padding above dimension: " << data_padding_above[i]
<< ") and this is not "
<< "allowed.";
}
if (data_dim_static && window_dim_static)
{
NODE_VALIDATION_ASSERT(node, window_dilated_dim <= data_padded_dilated_dim)
<< "Window after dilation has dimension (dim: " << window_dilated_dim
<< ") larger than the data shape after padding (dim: "
<< data_padded_dilated_dim << ") at axis " << i << ".";
size_t output_dim = ceil_div(
size_t(data_padded_dilated_dim) - size_t(window_dilated_dim) + 1, window_strides[i]);
output_shape[i] = output_dim;
output_shape[i] = ceil_div(static_cast<size_t>(data_padded_dilated_dim) -
static_cast<size_t>(window_dilated_dim) + 1,
window_strides[i]);
}
}
}
return output_shape;
......@@ -160,7 +178,8 @@ std::tuple<element::Type, Shape>
filter_spatial_shape,
filter_strides,
filter_dilation,
true);
true)
.to_shape();
Shape batch_output_shape(data_batch_shape.size());
batch_output_shape[0] = batch_size;
......@@ -173,50 +192,60 @@ std::tuple<element::Type, Shape>
//
// Infers the output batch shape and element type for batched pooling fprop.
//
Shape ngraph::infer_batched_pooling_forward(const Node* node,
const Shape& data_batch_shape,
PartialShape ngraph::infer_batched_pooling_forward(const Node* node,
const PartialShape& data_batch_shape,
const CoordinateDiff& data_padding_below,
const CoordinateDiff& data_padding_above,
const Shape& window_shape,
const PartialShape& window_shape,
const Strides& window_strides,
bool is_window_all_in_padding_allowed)
{
NODE_VALIDATION_ASSERT(node, data_batch_shape.size() >= 3)
NODE_VALIDATION_ASSERT(node,
data_batch_shape.rank().is_dynamic() ||
static_cast<size_t>(data_batch_shape.rank()) >= 3)
<< "Data batch must have rank of at least 3 (one batch axis, "
<< "one input-channel axis, and at least one spatial dimension) "
<< "(data batch shape: " << data_batch_shape << ").";
size_t spatial_dimension_count = data_batch_shape.size() - 2;
PartialShape data_spatial_shape{PartialShape::dynamic()};
NODE_VALIDATION_ASSERT(node, data_padding_below.size() == spatial_dimension_count)
<< "Data padding below (" << data_padding_below << ") does not have required rank ("
<< spatial_dimension_count << ").";
NODE_VALIDATION_ASSERT(node, data_padding_above.size() == spatial_dimension_count)
<< "Data padding above (" << data_padding_above << ") does not have required rank ("
<< spatial_dimension_count << ").";
NODE_VALIDATION_ASSERT(node, window_shape.size() == spatial_dimension_count)
<< "Window shape (" << window_shape << ") does not have required rank ("
<< spatial_dimension_count << ").";
NODE_VALIDATION_ASSERT(node, window_strides.size() == spatial_dimension_count)
<< "Window shape (" << window_strides << ") does not have required rank ("
<< spatial_dimension_count << ").";
NODE_VALIDATION_ASSERT(node,
data_spatial_shape.merge_rank(data_batch_shape.rank() - 2) &&
data_spatial_shape.merge_rank(data_padding_below.size()) &&
data_spatial_shape.merge_rank(data_padding_above.size()) &&
data_spatial_shape.merge_rank(window_shape.rank()) &&
data_spatial_shape.merge_rank(window_strides.size()))
<< "Ranks for data item shape (data batch has shape " << data_batch_shape
<< ", so data item rank is " << (data_batch_shape.rank() - 2) << "), padding below ("
<< data_padding_below << "), padding above (" << data_padding_above << "), window shape ("
<< window_shape << "), and window strides (" << window_strides << ") do not match.";
Dimension batch_size{Dimension::dynamic()};
Dimension channel_count{Dimension::dynamic()};
PartialShape data_output_spatial_shape{PartialShape::dynamic(data_spatial_shape.rank())};
if (data_batch_shape.rank().is_static())
{
batch_size = data_batch_shape[0];
channel_count = data_batch_shape[1];
size_t batch_size = data_batch_shape[0];
size_t channel_count = data_batch_shape[1];
Shape data_spatial_shape(data_batch_shape.begin() + 2, data_batch_shape.end());
for (size_t i = 0; i < static_cast<size_t>(data_spatial_shape.rank()); i++)
{
data_spatial_shape[i] = data_batch_shape[i + 2];
}
NODE_VALIDATION_ASSERT(node, batch_size > 0) << "Batch size is zero.";
NODE_VALIDATION_ASSERT(node, batch_size.is_dynamic() || static_cast<size_t>(batch_size) > 0)
<< "Batch size is zero.";
NODE_VALIDATION_ASSERT(node, channel_count > 0) << "Channel count is zero.";
NODE_VALIDATION_ASSERT(node,
channel_count.is_dynamic() || static_cast<size_t>(channel_count) > 0)
<< "Channel count is zero.";
// For pooling ops we don't need dilation, so we fill in the identity value (all 1).
Strides data_dilation(spatial_dimension_count, 1);
Strides window_dilation(spatial_dimension_count, 1);
Strides data_dilation(static_cast<size_t>(data_spatial_shape.rank()), 1);
Strides window_dilation(static_cast<size_t>(data_spatial_shape.rank()), 1);
Shape data_output_shape =
data_output_spatial_shape =
infer_windowed_reduction_output_shape(node,
data_spatial_shape,
data_dilation,
......@@ -226,11 +255,17 @@ Shape ngraph::infer_batched_pooling_forward(const Node* node,
window_strides,
window_dilation,
is_window_all_in_padding_allowed);
}
Shape batch_output_shape(data_batch_shape.size());
batch_output_shape[0] = batch_size;
batch_output_shape[1] = channel_count;
std::copy(data_output_shape.begin(), data_output_shape.end(), batch_output_shape.begin() + 2);
PartialShape data_batch_output_shape{
PartialShape::dynamic(data_output_spatial_shape.rank() + 2)};
data_batch_output_shape[0] = batch_size;
data_batch_output_shape[1] = channel_count;
for (size_t i = 0; i < static_cast<size_t>(data_spatial_shape.rank()); i++)
{
data_batch_output_shape[i + 2] = data_output_spatial_shape[i];
}
return batch_output_shape;
return data_batch_output_shape;
}
......@@ -23,12 +23,12 @@
namespace ngraph
{
Shape infer_windowed_reduction_output_shape(const Node* node,
const Shape& data_shape,
PartialShape infer_windowed_reduction_output_shape(const Node* node,
const PartialShape& data_shape,
const Strides& data_dilation,
const CoordinateDiff& data_padding_below,
const CoordinateDiff& data_padding_above,
const Shape& window_shape,
const PartialShape& window_shape,
const Strides& window_strides,
const Strides& window_dilation,
bool is_window_all_in_padding_allowed);
......@@ -45,11 +45,11 @@ namespace ngraph
const Strides& filter_strides,
const Strides& filter_dilation);
Shape infer_batched_pooling_forward(const Node* node,
const Shape& data_batch_shape,
PartialShape infer_batched_pooling_forward(const Node* node,
const PartialShape& data_batch_shape,
const CoordinateDiff& data_padding_below,
const CoordinateDiff& data_padding_above,
const Shape& window_shape,
const PartialShape& window_shape,
const Strides& window_strides,
bool is_window_all_in_padding_allowed);
}
......@@ -17,6 +17,7 @@
#include "gtest/gtest.h"
#include "ngraph/ngraph.hpp"
#include "ngraph/validation_util.hpp"
#include "util/test_tools.hpp"
using namespace ngraph;
......@@ -790,3 +791,565 @@ TEST(partial_shape, partial_shape_inject_pairs_rank_static)
ASSERT_TRUE(s2.same_scheme(
PartialShape{Dimension::dynamic(), 1, 909, Dimension::dynamic(), Dimension::dynamic()}));
}
TEST(partial_shape, merge_rank_dyn_dyn)
{
PartialShape s{PartialShape::dynamic()};
ASSERT_TRUE(s.merge_rank(Rank::dynamic()));
ASSERT_TRUE(s.rank().is_dynamic());
}
TEST(partial_shape, merge_rank_dyn_static)
{
PartialShape s{PartialShape::dynamic()};
ASSERT_TRUE(s.merge_rank(4));
ASSERT_TRUE(s.same_scheme(PartialShape{
Dimension::dynamic(), Dimension::dynamic(), Dimension::dynamic(), Dimension::dynamic()}));
}
TEST(partial_shape, merge_rank_static_dyn)
{
PartialShape s{2, 3, Dimension::dynamic(), 5};
ASSERT_TRUE(s.merge_rank(Rank::dynamic()));
ASSERT_TRUE(s.same_scheme(PartialShape{2, 3, Dimension::dynamic(), 5}));
}
TEST(partial_shape, merge_rank_static_static_ok)
{
PartialShape s{2, 3, Dimension::dynamic(), 5};
ASSERT_TRUE(s.merge_rank(4));
ASSERT_TRUE(s.same_scheme(PartialShape{2, 3, Dimension::dynamic(), 5}));
}
TEST(partial_shape, merge_rank_static_static_fail)
{
PartialShape s{2, 3, Dimension::dynamic(), 5};
ASSERT_FALSE(s.merge_rank(5));
ASSERT_TRUE(s.same_scheme(PartialShape{2, 3, Dimension::dynamic(), 5}));
}
TEST(partial_shape, infer_windowed_reduction_rank_dynamic_rank_dynamic_ok)
{
auto node = std::make_shared<op::Parameter>(element::f32, Shape{});
PartialShape data_shape{PartialShape::dynamic()};
Strides data_dilation{1, 1, 1, 1};
CoordinateDiff data_padding_below{0, 0, 0, 0};
CoordinateDiff data_padding_above{0, 0, 0, 0};
PartialShape window_shape{PartialShape::dynamic()};
Strides window_strides{1, 1, 1, 1};
Strides window_dilation{1, 1, 1, 1};
bool is_window_all_in_padding_allowed = true;
PartialShape result_shape =
infer_windowed_reduction_output_shape(node.get(),
data_shape,
data_dilation,
data_padding_below,
data_padding_above,
window_shape,
window_strides,
window_dilation,
is_window_all_in_padding_allowed);
ASSERT_TRUE(result_shape.same_scheme(PartialShape{
Dimension::dynamic(), Dimension::dynamic(), Dimension::dynamic(), Dimension::dynamic()}));
}
TEST(partial_shape, infer_windowed_reduction_rank_dynamic_rank_dynamic_zero_data_dilation)
{
auto node = std::make_shared<op::Parameter>(element::f32, Shape{});
PartialShape data_shape{PartialShape::dynamic()};
Strides data_dilation{1, 1, 0, 1};
CoordinateDiff data_padding_below{0, 0, 0, 0};
CoordinateDiff data_padding_above{0, 0, 0, 0};
PartialShape window_shape{PartialShape::dynamic()};
Strides window_strides{1, 1, 1, 1};
Strides window_dilation{1, 1, 1, 1};
bool is_window_all_in_padding_allowed = true;
ASSERT_THROW(
{
PartialShape result_shape =
infer_windowed_reduction_output_shape(node.get(),
data_shape,
data_dilation,
data_padding_below,
data_padding_above,
window_shape,
window_strides,
window_dilation,
is_window_all_in_padding_allowed);
},
NodeValidationError);
}
TEST(partial_shape, infer_windowed_reduction_rank_dynamic_rank_dynamic_zero_window_dilation)
{
auto node = std::make_shared<op::Parameter>(element::f32, Shape{});
PartialShape data_shape{PartialShape::dynamic()};
Strides data_dilation{1, 1, 1, 1};
CoordinateDiff data_padding_below{0, 0, 0, 0};
CoordinateDiff data_padding_above{0, 0, 0, 0};
PartialShape window_shape{PartialShape::dynamic()};
Strides window_strides{1, 1, 1, 1};
Strides window_dilation{1, 0, 1, 1};
bool is_window_all_in_padding_allowed = true;
ASSERT_THROW(
{
PartialShape result_shape =
infer_windowed_reduction_output_shape(node.get(),
data_shape,
data_dilation,
data_padding_below,
data_padding_above,
window_shape,
window_strides,
window_dilation,
is_window_all_in_padding_allowed);
},
NodeValidationError);
}
TEST(partial_shape, infer_windowed_reduction_rank_dynamic_rank_dynamic_zero_window_strides)
{
auto node = std::make_shared<op::Parameter>(element::f32, Shape{});
PartialShape data_shape{PartialShape::dynamic()};
Strides data_dilation{1, 1, 1, 1};
CoordinateDiff data_padding_below{0, 0, 0, 0};
CoordinateDiff data_padding_above{0, 0, 0, 0};
PartialShape window_shape{PartialShape::dynamic()};
Strides window_strides{1, 1, 1, 0};
Strides window_dilation{1, 1, 1, 1};
bool is_window_all_in_padding_allowed = true;
ASSERT_THROW(
{
PartialShape result_shape =
infer_windowed_reduction_output_shape(node.get(),
data_shape,
data_dilation,
data_padding_below,
data_padding_above,
window_shape,
window_strides,
window_dilation,
is_window_all_in_padding_allowed);
},
NodeValidationError);
}
TEST(partial_shape, infer_windowed_reduction_rank_static_dynamic_rank_dynamic_ok)
{
auto node = std::make_shared<op::Parameter>(element::f32, Shape{});
PartialShape data_shape{Dimension::dynamic(), 2, 3, Dimension::dynamic()};
Strides data_dilation{1, 1, 1, 1};
CoordinateDiff data_padding_below{0, 0, 0, 0};
CoordinateDiff data_padding_above{0, 0, 0, 0};
PartialShape window_shape{PartialShape::dynamic()};
Strides window_strides{1, 1, 1, 1};
Strides window_dilation{1, 1, 1, 1};
bool is_window_all_in_padding_allowed = true;
PartialShape result_shape =
infer_windowed_reduction_output_shape(node.get(),
data_shape,
data_dilation,
data_padding_below,
data_padding_above,
window_shape,
window_strides,
window_dilation,
is_window_all_in_padding_allowed);
ASSERT_TRUE(result_shape.same_scheme(PartialShape::dynamic(4)));
}
TEST(partial_shape,
infer_windowed_reduction_rank_static_dynamic_rank_dynamic_zero_data_post_padding)
{
auto node = std::make_shared<op::Parameter>(element::f32, Shape{});
PartialShape data_shape{Dimension::dynamic(), 2, 3, Dimension::dynamic()};
Strides data_dilation{1, 1, 1, 1};
CoordinateDiff data_padding_below{0, -1, 0, 0};
CoordinateDiff data_padding_above{0, -1, 0, 0};
PartialShape window_shape{PartialShape::dynamic()};
Strides window_strides{1, 1, 1, 1};
Strides window_dilation{1, 1, 1, 1};
bool is_window_all_in_padding_allowed = true;
ASSERT_THROW(
{
PartialShape result_shape =
infer_windowed_reduction_output_shape(node.get(),
data_shape,
data_dilation,
data_padding_below,
data_padding_above,
window_shape,
window_strides,
window_dilation,
is_window_all_in_padding_allowed);
},
NodeValidationError);
}
TEST(partial_shape, infer_windowed_reduction_rank_static_dynamic_rank_dynamic_neg_padding_ok)
{
auto node = std::make_shared<op::Parameter>(element::f32, Shape{});
PartialShape data_shape{Dimension::dynamic(), 4, 3, Dimension::dynamic()};
Strides data_dilation{1, 1, 1, 1};
CoordinateDiff data_padding_below{0, -1, 0, 0};
CoordinateDiff data_padding_above{0, -2, 0, 0};
PartialShape window_shape{PartialShape::dynamic()};
Strides window_strides{1, 1, 1, 1};
Strides window_dilation{1, 1, 1, 1};
bool is_window_all_in_padding_allowed = true;
PartialShape result_shape =
infer_windowed_reduction_output_shape(node.get(),
data_shape,
data_dilation,
data_padding_below,
data_padding_above,
window_shape,
window_strides,
window_dilation,
is_window_all_in_padding_allowed);
ASSERT_TRUE(result_shape.same_scheme(PartialShape::dynamic(4)));
}
TEST(partial_shape, infer_windowed_reduction_rank_dynamic_rank_static_dynamic_ok)
{
auto node = std::make_shared<op::Parameter>(element::f32, Shape{});
PartialShape data_shape{PartialShape::dynamic()};
Strides data_dilation{1, 1, 1, 1};
CoordinateDiff data_padding_below{0, 0, 0, 0};
CoordinateDiff data_padding_above{0, 0, 0, 0};
PartialShape window_shape{Dimension::dynamic(), 2, 3, Dimension::dynamic()};
Strides window_strides{1, 1, 1, 1};
Strides window_dilation{1, 1, 1, 1};
bool is_window_all_in_padding_allowed = true;
PartialShape result_shape =
infer_windowed_reduction_output_shape(node.get(),
data_shape,
data_dilation,
data_padding_below,
data_padding_above,
window_shape,
window_strides,
window_dilation,
is_window_all_in_padding_allowed);
ASSERT_TRUE(result_shape.same_scheme(PartialShape::dynamic(4)));
}
TEST(partial_shape, infer_windowed_reduction_rank_dynamic_rank_static_dynamic_window_dim_zero)
{
auto node = std::make_shared<op::Parameter>(element::f32, Shape{});
PartialShape data_shape{PartialShape::dynamic()};
Strides data_dilation{1, 1, 1, 1};
CoordinateDiff data_padding_below{0, 0, 0, 0};
CoordinateDiff data_padding_above{0, 0, 0, 0};
PartialShape window_shape{Dimension::dynamic(), 2, 0, Dimension::dynamic()};
Strides window_strides{1, 1, 1, 1};
Strides window_dilation{1, 1, 1, 1};
bool is_window_all_in_padding_allowed = true;
ASSERT_THROW(
{
PartialShape result_shape =
infer_windowed_reduction_output_shape(node.get(),
data_shape,
data_dilation,
data_padding_below,
data_padding_above,
window_shape,
window_strides,
window_dilation,
is_window_all_in_padding_allowed);
},
NodeValidationError);
}
TEST(partial_shape,
infer_windowed_reduction_rank_dynamic_rank_static_dynamic_window_dilated_dim_zero)
{
auto node = std::make_shared<op::Parameter>(element::f32, Shape{});
PartialShape data_shape{PartialShape::dynamic()};
Strides data_dilation{1, 1, 1, 1};
CoordinateDiff data_padding_below{0, 0, 0, 0};
CoordinateDiff data_padding_above{0, 0, 0, 0};
PartialShape window_shape{Dimension::dynamic(), 2, 0, Dimension::dynamic()};
Strides window_strides{1, 1, 1, 1};
Strides window_dilation{1, 1, 3, 1};
bool is_window_all_in_padding_allowed = true;
ASSERT_THROW(
{
PartialShape result_shape =
infer_windowed_reduction_output_shape(node.get(),
data_shape,
data_dilation,
data_padding_below,
data_padding_above,
window_shape,
window_strides,
window_dilation,
is_window_all_in_padding_allowed);
},
NodeValidationError);
}
TEST(partial_shape,
infer_windowed_reduction_rank_dynamic_rank_static_dynamic_window_all_in_padding_ok)
{
auto node = std::make_shared<op::Parameter>(element::f32, Shape{});
PartialShape data_shape{PartialShape::dynamic()};
Strides data_dilation{1, 1, 1, 1};
CoordinateDiff data_padding_below{0, 0, 3, 0};
CoordinateDiff data_padding_above{0, 0, 0, 0};
PartialShape window_shape{Dimension::dynamic(), 2, 3, Dimension::dynamic()};
Strides window_strides{1, 1, 1, 1};
Strides window_dilation{1, 1, 1, 1};
bool is_window_all_in_padding_allowed = true;
PartialShape result_shape =
infer_windowed_reduction_output_shape(node.get(),
data_shape,
data_dilation,
data_padding_below,
data_padding_above,
window_shape,
window_strides,
window_dilation,
is_window_all_in_padding_allowed);
ASSERT_TRUE(result_shape.same_scheme(PartialShape::dynamic(4)));
}
TEST(partial_shape,
infer_windowed_reduction_rank_dynamic_rank_static_dynamic_window_all_in_padding_not_ok)
{
auto node = std::make_shared<op::Parameter>(element::f32, Shape{});
PartialShape data_shape{PartialShape::dynamic()};
Strides data_dilation{1, 1, 1, 1};
CoordinateDiff data_padding_below{0, 0, 3, 0};
CoordinateDiff data_padding_above{0, 0, 0, 0};
PartialShape window_shape{Dimension::dynamic(), 2, 3, Dimension::dynamic()};
Strides window_strides{1, 1, 1, 1};
Strides window_dilation{1, 1, 1, 1};
bool is_window_all_in_padding_allowed = false;
ASSERT_THROW(
{
PartialShape result_shape =
infer_windowed_reduction_output_shape(node.get(),
data_shape,
data_dilation,
data_padding_below,
data_padding_above,
window_shape,
window_strides,
window_dilation,
is_window_all_in_padding_allowed);
},
NodeValidationError);
}
TEST(partial_shape,
infer_windowed_reduction_rank_dynamic_rank_static_dynamic_dilated_window_not_all_in_padding)
{
auto node = std::make_shared<op::Parameter>(element::f32, Shape{});
PartialShape data_shape{PartialShape::dynamic()};
Strides data_dilation{1, 1, 1, 1};
CoordinateDiff data_padding_below{0, 0, 3, 0};
CoordinateDiff data_padding_above{0, 0, 0, 0};
PartialShape window_shape{Dimension::dynamic(), 2, 3, Dimension::dynamic()};
Strides window_strides{1, 1, 1, 1};
Strides window_dilation{1, 1, 2, 1};
bool is_window_all_in_padding_allowed = false;
PartialShape result_shape =
infer_windowed_reduction_output_shape(node.get(),
data_shape,
data_dilation,
data_padding_below,
data_padding_above,
window_shape,
window_strides,
window_dilation,
is_window_all_in_padding_allowed);
ASSERT_TRUE(result_shape.same_scheme(PartialShape::dynamic(4)));
}
TEST(partial_shape, infer_windowed_reduction_rank_static_dynamic_rank_static_dynamic_ok)
{
auto node = std::make_shared<op::Parameter>(element::f32, Shape{});
PartialShape data_shape{Dimension::dynamic(), Dimension::dynamic(), 6, 4};
Strides data_dilation{1, 1, 1, 1};
CoordinateDiff data_padding_below{0, 0, 0, 0};
CoordinateDiff data_padding_above{0, 0, 0, 0};
PartialShape window_shape{Dimension::dynamic(), 2, 3, Dimension::dynamic()};
Strides window_strides{1, 1, 1, 1};
Strides window_dilation{1, 1, 1, 1};
bool is_window_all_in_padding_allowed = true;
PartialShape result_shape =
infer_windowed_reduction_output_shape(node.get(),
data_shape,
data_dilation,
data_padding_below,
data_padding_above,
window_shape,
window_strides,
window_dilation,
is_window_all_in_padding_allowed);
ASSERT_TRUE(result_shape.same_scheme(
PartialShape{Dimension::dynamic(), Dimension::dynamic(), 4, Dimension::dynamic()}));
}
TEST(partial_shape,
infer_windowed_reduction_rank_static_dynamic_rank_static_dynamic_with_padding_ok)
{
auto node = std::make_shared<op::Parameter>(element::f32, Shape{});
PartialShape data_shape{Dimension::dynamic(), Dimension::dynamic(), 6, 4};
Strides data_dilation{1, 1, 1, 1};
CoordinateDiff data_padding_below{0, 0, 2, 0};
CoordinateDiff data_padding_above{0, 0, -1, 0};
PartialShape window_shape{Dimension::dynamic(), 2, 3, Dimension::dynamic()};
Strides window_strides{1, 1, 1, 1};
Strides window_dilation{1, 1, 1, 1};
bool is_window_all_in_padding_allowed = true;
PartialShape result_shape =
infer_windowed_reduction_output_shape(node.get(),
data_shape,
data_dilation,
data_padding_below,
data_padding_above,
window_shape,
window_strides,
window_dilation,
is_window_all_in_padding_allowed);
ASSERT_TRUE(result_shape.same_scheme(
PartialShape{Dimension::dynamic(), Dimension::dynamic(), 5, Dimension::dynamic()}));
}
TEST(partial_shape,
infer_windowed_reduction_rank_static_dynamic_rank_static_dynamic_with_padding_and_stride_ok)
{
auto node = std::make_shared<op::Parameter>(element::f32, Shape{});
PartialShape data_shape{Dimension::dynamic(), Dimension::dynamic(), 6, 4};
Strides data_dilation{1, 1, 1, 1};
CoordinateDiff data_padding_below{0, 0, 2, 0};
CoordinateDiff data_padding_above{0, 0, -1, 0};
PartialShape window_shape{Dimension::dynamic(), 2, 3, Dimension::dynamic()};
Strides window_strides{1, 1, 2, 1};
Strides window_dilation{1, 1, 1, 1};
bool is_window_all_in_padding_allowed = true;
PartialShape result_shape =
infer_windowed_reduction_output_shape(node.get(),
data_shape,
data_dilation,
data_padding_below,
data_padding_above,
window_shape,
window_strides,
window_dilation,
is_window_all_in_padding_allowed);
ASSERT_TRUE(result_shape.same_scheme(
PartialShape{Dimension::dynamic(), Dimension::dynamic(), 3, Dimension::dynamic()}));
}
TEST(partial_shape, infer_windowed_reduction_rank_static_dynamic_rank_static_dynamic_window_too_big)
{
auto node = std::make_shared<op::Parameter>(element::f32, Shape{});
PartialShape data_shape{Dimension::dynamic(), Dimension::dynamic(), 6, 4};
Strides data_dilation{1, 1, 1, 1};
CoordinateDiff data_padding_below{0, 0, 0, 0};
CoordinateDiff data_padding_above{0, 0, 0, 0};
PartialShape window_shape{Dimension::dynamic(), 2, 7, Dimension::dynamic()};
Strides window_strides{1, 1, 1, 1};
Strides window_dilation{1, 1, 1, 1};
bool is_window_all_in_padding_allowed = true;
ASSERT_THROW(
{
PartialShape result_shape =
infer_windowed_reduction_output_shape(node.get(),
data_shape,
data_dilation,
data_padding_below,
data_padding_above,
window_shape,
window_strides,
window_dilation,
is_window_all_in_padding_allowed);
},
NodeValidationError);
}
TEST(partial_shape,
infer_windowed_reduction_rank_static_dynamic_rank_static_dynamic_window_not_too_big_padding)
{
auto node = std::make_shared<op::Parameter>(element::f32, Shape{});
PartialShape data_shape{Dimension::dynamic(), Dimension::dynamic(), 6, 4};
Strides data_dilation{1, 1, 1, 1};
CoordinateDiff data_padding_below{0, 0, 5, 0};
CoordinateDiff data_padding_above{0, 0, -3, 0};
PartialShape window_shape{Dimension::dynamic(), 2, 7, Dimension::dynamic()};
Strides window_strides{1, 1, 1, 1};
Strides window_dilation{1, 1, 1, 1};
bool is_window_all_in_padding_allowed = true;
PartialShape result_shape =
infer_windowed_reduction_output_shape(node.get(),
data_shape,
data_dilation,
data_padding_below,
data_padding_above,
window_shape,
window_strides,
window_dilation,
is_window_all_in_padding_allowed);
ASSERT_TRUE(result_shape.same_scheme(
PartialShape{Dimension::dynamic(), Dimension::dynamic(), 2, Dimension::dynamic()}));
}
TEST(partial_shape,
infer_windowed_reduction_rank_static_dynamic_rank_static_dynamic_window_dilated_too_big)
{
auto node = std::make_shared<op::Parameter>(element::f32, Shape{});
PartialShape data_shape{Dimension::dynamic(), Dimension::dynamic(), 6, 4};
Strides data_dilation{1, 1, 1, 1};
CoordinateDiff data_padding_below{0, 0, 5, 0};
CoordinateDiff data_padding_above{0, 0, -3, 0};
PartialShape window_shape{Dimension::dynamic(), 2, 7, Dimension::dynamic()};
Strides window_strides{1, 1, 1, 1};
Strides window_dilation{1, 1, 2, 1};
bool is_window_all_in_padding_allowed = true;
ASSERT_THROW(
{
PartialShape result_shape =
infer_windowed_reduction_output_shape(node.get(),
data_shape,
data_dilation,
data_padding_below,
data_padding_above,
window_shape,
window_strides,
window_dilation,
is_window_all_in_padding_allowed);
},
NodeValidationError);
}
......@@ -4411,9 +4411,12 @@ TEST(type_prop, conv_invalid_wrong_number_of_filter_dimensions_too_many)
}
catch (const NodeValidationError& error)
{
EXPECT_HAS_SUBSTRING(error.what(),
std::string("Data shape (Shape{10, 10}) does not have same rank as "
"the window shape (Shape{3, 3, 3})"));
EXPECT_HAS_SUBSTRING(
error.what(),
std::string("Ranks for data shape ({10,10}), data dilation (Strides{1, 1}), padding "
"below (CoordinateDiff{0, 0}), padding above (CoordinateDiff{0, 0}), "
"window shape ({3,3,3}), window strides (Strides{1, 1}), and window "
"dilation (Strides{1, 1}) do not match"));
}
catch (...)
{
......@@ -4435,9 +4438,12 @@ TEST(type_prop, conv_invalid_wrong_number_of_filter_dimensions_too_few)
}
catch (const NodeValidationError& error)
{
EXPECT_HAS_SUBSTRING(error.what(),
std::string("Data shape (Shape{10, 10}) does not have "
"same rank as the window shape (Shape{3})"));
EXPECT_HAS_SUBSTRING(
error.what(),
std::string("Ranks for data shape ({10,10}), data dilation (Strides{1, 1}), padding "
"below (CoordinateDiff{0, 0}), padding above (CoordinateDiff{0, 0}), "
"window shape ({3}), window strides (Strides{1, 1}), and window dilation "
"(Strides{1, 1}) do not match"));
}
catch (...)
{
......@@ -4506,9 +4512,12 @@ TEST(type_prop, conv_invalid_movement_stride_rank)
}
catch (const NodeValidationError& error)
{
EXPECT_HAS_SUBSTRING(error.what(),
std::string("Data shape (Shape{10, 10}) does not have same rank as "
"the window strides (Strides{2, 3, 8})"));
EXPECT_HAS_SUBSTRING(
error.what(),
std::string("Ranks for data shape ({10,10}), data dilation (Strides{1, 1}), padding "
"below (CoordinateDiff{0, 0}), padding above (CoordinateDiff{0, 0}), "
"window shape ({3,3}), window strides (Strides{2, 3, 8}), and window "
"dilation (Strides{1, 1}) do not match"));
}
catch (...)
{
......@@ -4530,9 +4539,12 @@ TEST(type_prop, conv_invalid_window_dilation_stride_rank)
}
catch (const NodeValidationError& error)
{
EXPECT_HAS_SUBSTRING(error.what(),
std::string("Data shape (Shape{10, 10}) does not have same rank as "
"the window dilation (Strides{2, 3, 8})"));
EXPECT_HAS_SUBSTRING(
error.what(),
std::string("Ranks for data shape ({10,10}), data dilation (Strides{1, 1}), padding "
"below (CoordinateDiff{0, 0}), padding above (CoordinateDiff{0, 0}), "
"window shape ({3,3}), window strides (Strides{2, 3}), and window dilation "
"(Strides{2, 3, 8}) do not match"));
}
catch (...)
{
......@@ -4560,9 +4572,12 @@ TEST(type_prop, conv_invalid_data_dilation_stride_rank)
}
catch (const NodeValidationError& error)
{
EXPECT_HAS_SUBSTRING(error.what(),
std::string("Data shape (Shape{10, 10}) does not have same rank as "
"the data dilation (Strides{2, 3, 8})"));
EXPECT_HAS_SUBSTRING(
error.what(),
std::string("Ranks for data shape ({10,10}), data dilation (Strides{2, 3, 8}), padding "
"below (CoordinateDiff{0, 0}), padding above (CoordinateDiff{0, 0}), "
"window shape ({3,3}), window strides (Strides{2, 3}), and window dilation "
"(Strides{2, 3}) do not match"));
}
catch (...)
{
......@@ -4589,9 +4604,12 @@ TEST(type_prop, conv_invalid_padding_below_rank)
}
catch (const NodeValidationError& error)
{
EXPECT_HAS_SUBSTRING(error.what(),
std::string("Data shape (Shape{10, 10}) does not have same rank as "
"the data padding below (CoordinateDiff{0, 0, 0})"));
EXPECT_HAS_SUBSTRING(
error.what(),
std::string("Ranks for data shape ({10,10}), data dilation (Strides{1, 1}), padding "
"below (CoordinateDiff{0, 0, 0}), padding above (CoordinateDiff{0, 0}), "
"window shape ({3,3}), window strides (Strides{2, 3}), and window dilation "
"(Strides{1, 1}) do not match"));
}
catch (...)
{
......@@ -4618,9 +4636,12 @@ TEST(type_prop, conv_invalid_padding_above_rank)
}
catch (const NodeValidationError& error)
{
EXPECT_HAS_SUBSTRING(error.what(),
std::string("Data shape (Shape{10, 10}) does not have same rank as "
"the data padding above (CoordinateDiff{0, 0, 0})"));
EXPECT_HAS_SUBSTRING(
error.what(),
std::string("Ranks for data shape ({10,10}), data dilation (Strides{1, 1}), padding "
"below (CoordinateDiff{0, 0}), padding above (CoordinateDiff{0, 0, 0}), "
"window shape ({3,3}), window strides (Strides{2, 3}), and window dilation "
"(Strides{2, 3}) do not match"));
}
catch (...)
{
......@@ -4953,8 +4974,7 @@ TEST(type_prop, max_pool_invalid_0d_input)
}
catch (const NodeValidationError& error)
{
EXPECT_HAS_SUBSTRING(error.what(),
std::string("Data input shape does not have rank of at least 3"));
EXPECT_HAS_SUBSTRING(error.what(), std::string("Data batch must have rank of at least 3"));
}
catch (...)
{
......@@ -4976,8 +4996,7 @@ TEST(type_prop, max_pool_invalid_1d_input)
}
catch (const NodeValidationError& error)
{
EXPECT_HAS_SUBSTRING(error.what(),
std::string("Data input shape does not have rank of at least 3"));
EXPECT_HAS_SUBSTRING(error.what(), std::string("Data batch must have rank of at least 3"));
}
catch (...)
{
......@@ -4999,8 +5018,7 @@ TEST(type_prop, max_pool_invalid_2d_input)
}
catch (const NodeValidationError& error)
{
EXPECT_HAS_SUBSTRING(error.what(),
std::string("Data input shape does not have rank of at least 3"));
EXPECT_HAS_SUBSTRING(error.what(), std::string("Data batch must have rank of at least 3"));
}
catch (...)
{
......@@ -5068,7 +5086,10 @@ TEST(type_prop, max_pool_invalid_wrong_number_of_window_dimensions_too_many)
{
EXPECT_HAS_SUBSTRING(
error.what(),
std::string("Window shape (Shape{3, 3, 3}) does not have required rank (2)"));
std::string("Ranks for data item shape (data batch has shape {6,2,10,10}, so data item "
"rank is 2), padding below (CoordinateDiff{0, 0, 0}), padding above "
"(CoordinateDiff{0, 0, 0}), window shape ({3,3,3}), and window strides "
"(Strides{1, 1, 1}) do not match"));
}
catch (...)
{
......@@ -5091,7 +5112,11 @@ TEST(type_prop, max_pool_invalid_wrong_number_of_window_dimensions_too_few)
catch (const NodeValidationError& error)
{
EXPECT_HAS_SUBSTRING(
error.what(), std::string("Window shape (Shape{3}) does not have required rank (2)"));
error.what(),
std::string("Ranks for data item shape (data batch has shape {6,2,10,10}, so data item "
"rank is 2), padding below (CoordinateDiff{0}), padding above "
"(CoordinateDiff{0}), window shape ({3}), and window strides (Strides{1}) "
"do not match"));
}
catch (...)
{
......@@ -5116,7 +5141,10 @@ TEST(type_prop, max_pool_invalid_movement_stride_rank)
{
EXPECT_HAS_SUBSTRING(
error.what(),
std::string("Window shape (Strides{2, 3, 8}) does not have required rank (2)"));
std::string("Ranks for data item shape (data batch has shape {6,2,10,10}, so data item "
"rank is 2), padding below (CoordinateDiff{0, 0}), padding above "
"(CoordinateDiff{0, 0}), window shape ({3,3}), and window strides "
"(Strides{2, 3, 8}) do not match"));
}
catch (...)
{
......@@ -5221,6 +5249,162 @@ TEST(type_prop, max_pool_invalid_movement_stride_0)
}
}
TEST(type_prop, max_pool_partial_rank_dynamic_ok)
{
PartialShape arg_shape{PartialShape::dynamic()};
Shape window_shape{2, 3, 4, 5};
Strides window_movement_strides{1, 1, 1, 1};
Shape padding_below{0, 0, 0, 0};
Shape padding_above{0, 0, 0, 0};
auto param = make_shared<op::Parameter>(element::f32, arg_shape);
auto mp = make_shared<op::MaxPool>(
param, window_shape, window_movement_strides, padding_below, padding_above);
ASSERT_EQ(mp->get_output_element_type(0), element::f32);
ASSERT_TRUE(mp->get_output_partial_shape(0).same_scheme(PartialShape::dynamic(6)));
}
TEST(type_prop, max_pool_partial_rank_dynamic_attrib_rank_mismatch)
{
PartialShape arg_shape{PartialShape::dynamic()};
Shape window_shape{2, 3, 4, 5};
Strides window_movement_strides{1, 1, 1, 1, 1};
Shape padding_below{0, 0, 0, 0};
Shape padding_above{0, 0, 0, 0};
auto param = make_shared<op::Parameter>(element::f32, arg_shape);
try
{
auto mp = make_shared<op::MaxPool>(
param, window_shape, window_movement_strides, padding_below, padding_above);
FAIL() << "Mismatch of attribute ranks not detected";
}
catch (const NodeValidationError& error)
{
EXPECT_HAS_SUBSTRING(
error.what(),
std::string("Ranks for data item shape (data batch has shape ?, so data item rank is "
"?), padding below (CoordinateDiff{0, 0, 0, 0}), padding above "
"(CoordinateDiff{0, 0, 0, 0}), window shape ({2,3,4,5}), and window "
"strides (Strides{1, 1, 1, 1, 1}) do not match"));
}
catch (...)
{
FAIL() << "Deduced type check failed for unexpected reason";
}
}
TEST(type_prop, max_pool_partial_rank_static_dynamic_ok)
{
PartialShape arg_shape{PartialShape::dynamic(6)};
Shape window_shape{2, 3, 4, 5};
Strides window_movement_strides{1, 1, 1, 1};
Shape padding_below{0, 0, 0, 0};
Shape padding_above{0, 0, 0, 0};
auto param = make_shared<op::Parameter>(element::f32, arg_shape);
auto mp = make_shared<op::MaxPool>(
param, window_shape, window_movement_strides, padding_below, padding_above);
ASSERT_EQ(mp->get_output_element_type(0), element::f32);
ASSERT_TRUE(mp->get_output_partial_shape(0).same_scheme(PartialShape::dynamic(6)));
}
TEST(type_prop, max_pool_partial_rank_static_dynamic_some_dims_known_ok)
{
PartialShape arg_shape{5, Dimension::dynamic(), 8, Dimension::dynamic(), 4, 7};
Shape window_shape{2, 3, 4, 5};
Strides window_movement_strides{1, 1, 1, 1};
Shape padding_below{0, 0, 0, 0};
Shape padding_above{0, 0, 0, 0};
auto param = make_shared<op::Parameter>(element::f32, arg_shape);
auto mp = make_shared<op::MaxPool>(
param, window_shape, window_movement_strides, padding_below, padding_above);
ASSERT_EQ(mp->get_output_element_type(0), element::f32);
ASSERT_TRUE(mp->get_output_partial_shape(0).same_scheme(
PartialShape{5, Dimension::dynamic(), 7, Dimension::dynamic(), 1, 3}));
}
TEST(type_prop, max_pool_partial_rank_static_dynamic_attrib_rank_mismatch)
{
PartialShape arg_shape{5, Dimension::dynamic(), 8, Dimension::dynamic(), 4, 7};
Shape window_shape{2, 3, 4, 5, 6};
Strides window_movement_strides{1, 1, 1, 1};
Shape padding_below{0, 0, 0, 0};
Shape padding_above{0, 0, 0, 0};
auto param = make_shared<op::Parameter>(element::f32, arg_shape);
try
{
auto mp = make_shared<op::MaxPool>(
param, window_shape, window_movement_strides, padding_below, padding_above);
FAIL() << "Mismatch of attribute ranks not detected";
}
catch (const NodeValidationError& error)
{
EXPECT_HAS_SUBSTRING(
error.what(),
std::string("Ranks for data item shape (data batch has shape {5,?,8,?,4,7}, so data "
"item rank is 4), padding below (CoordinateDiff{0, 0, 0, 0}), padding "
"above (CoordinateDiff{0, 0, 0, 0}), window shape ({2,3,4,5,6}), and "
"window strides (Strides{1, 1, 1, 1}) do not match"));
}
catch (...)
{
FAIL() << "Deduced type check failed for unexpected reason";
}
}
TEST(type_prop, max_pool_partial_rank_static_dynamic_window_not_too_big)
{
PartialShape arg_shape{5, Dimension::dynamic(), 8, Dimension::dynamic(), 4, 7};
Shape window_shape{9, 3, 4, 5};
Strides window_movement_strides{1, 1, 1, 1};
Shape padding_below{0, 0, 0, 0};
Shape padding_above{0, 0, 0, 0};
auto param = make_shared<op::Parameter>(element::f32, arg_shape);
try
{
auto mp = make_shared<op::MaxPool>(
param, window_shape, window_movement_strides, padding_below, padding_above);
FAIL() << "Oversized window not detected";
}
catch (const NodeValidationError& error)
{
EXPECT_HAS_SUBSTRING(error.what(),
std::string("Window after dilation has dimension (dim: 9) larger than "
"the data shape after padding (dim: 8) at axis 0"));
}
catch (...)
{
FAIL() << "Deduced type check failed for unexpected reason";
}
}
TEST(type_prop, max_pool_partial_rank_static_dynamic_padded_window_not_too_big)
{
PartialShape arg_shape{5, Dimension::dynamic(), 8, Dimension::dynamic(), 4, 7};
Shape window_shape{9, 3, 4, 5};
Strides window_movement_strides{1, 1, 1, 1};
Shape padding_below{0, 0, 0, 0};
Shape padding_above{1, 0, 0, 0};
auto param = make_shared<op::Parameter>(element::f32, arg_shape);
auto mp = make_shared<op::MaxPool>(
param, window_shape, window_movement_strides, padding_below, padding_above);
ASSERT_EQ(mp->get_output_element_type(0), element::f32);
ASSERT_TRUE(mp->get_output_partial_shape(0).same_scheme(
PartialShape{5, Dimension::dynamic(), 1, Dimension::dynamic(), 1, 3}));
}
TEST(type_prop, reverse_0d_deduce)
{
// Deduce type
......@@ -7477,7 +7661,9 @@ TEST(type_prop, avg_pool_invalid_0d_input)
}
catch (const NodeValidationError& error)
{
EXPECT_HAS_SUBSTRING(error.what(), "Data input shape does not have rank of at least 3");
EXPECT_HAS_SUBSTRING(error.what(),
"Data batch must have rank of at least 3 (one batch axis, one "
"input-channel axis, and at least one spatial dimension)");
}
catch (...)
{
......@@ -7499,7 +7685,9 @@ TEST(type_prop, avg_pool_invalid_1d_input)
}
catch (const NodeValidationError& error)
{
EXPECT_HAS_SUBSTRING(error.what(), "Data input shape does not have rank of at least 3");
EXPECT_HAS_SUBSTRING(error.what(),
"Data batch must have rank of at least 3 (one batch axis, one "
"input-channel axis, and at least one spatial dimension)");
}
catch (...)
{
......@@ -7521,7 +7709,9 @@ TEST(type_prop, avg_pool_invalid_2d_input)
}
catch (const NodeValidationError& error)
{
EXPECT_HAS_SUBSTRING(error.what(), "Data input shape does not have rank of at least 3");
EXPECT_HAS_SUBSTRING(error.what(),
"Data batch must have rank of at least 3 (one batch axis, one "
"input-channel axis, and at least one spatial dimension)");
}
catch (...)
{
......@@ -7588,7 +7778,10 @@ TEST(type_prop, avg_pool_invalid_wrong_number_of_window_dimensions_too_many)
catch (const NodeValidationError& error)
{
EXPECT_HAS_SUBSTRING(error.what(),
"Window shape (Shape{3, 3, 3}) does not have required rank (2)");
"Ranks for data item shape (data batch has shape {6,2,10,10}, so data "
"item rank is 2), padding below (CoordinateDiff{0, 0, 0}), padding "
"above (CoordinateDiff{0, 0, 0}), window shape ({3,3,3}), and window "
"strides (Strides{1, 1, 1}) do not match");
}
catch (...)
{
......@@ -7611,7 +7804,10 @@ TEST(type_prop, avg_pool_invalid_wrong_number_of_window_dimensions_too_few)
catch (const NodeValidationError& error)
{
EXPECT_HAS_SUBSTRING(error.what(),
"Window shape (Shape{3}) does not have required rank (2)");
"Ranks for data item shape (data batch has shape {6,2,10,10}, so data "
"item rank is 2), padding below (CoordinateDiff{0}), padding above "
"(CoordinateDiff{0}), window shape ({3}), and window strides "
"(Strides{1}) do not match");
}
catch (...)
{
......@@ -7635,7 +7831,10 @@ TEST(type_prop, avg_pool_invalid_movement_stride_rank)
catch (const NodeValidationError& error)
{
EXPECT_HAS_SUBSTRING(error.what(),
"Window shape (Strides{2, 3, 8}) does not have required rank (2)");
"Ranks for data item shape (data batch has shape {6,2,10,10}, so data "
"item rank is 2), padding below (CoordinateDiff{0, 0}), padding above "
"(CoordinateDiff{0, 0}), window shape ({3,3}), and window strides "
"(Strides{2, 3, 8}) do not match");
}
catch (...)
{
......@@ -7661,9 +7860,11 @@ TEST(type_prop, avg_pool_invalid_padding_below_rank)
}
catch (const NodeValidationError& error)
{
EXPECT_HAS_SUBSTRING(
error.what(),
"Data padding below (CoordinateDiff{1, 2, 3}) does not have required rank (2)");
EXPECT_HAS_SUBSTRING(error.what(),
"Ranks for data item shape (data batch has shape {6,2,10,10}, so data "
"item rank is 2), padding below (CoordinateDiff{1, 2, 3}), padding "
"above (CoordinateDiff{1, 2}), window shape ({3,3}), and window "
"strides (Strides{2, 3}) do not match");
}
catch (...)
{
......@@ -7689,9 +7890,11 @@ TEST(type_prop, avg_pool_invalid_padding_above_rank)
}
catch (const NodeValidationError& error)
{
EXPECT_HAS_SUBSTRING(
error.what(),
"Data padding above (CoordinateDiff{1, 2, 3}) does not have required rank (2");
EXPECT_HAS_SUBSTRING(error.what(),
"Ranks for data item shape (data batch has shape {6,2,10,10}, so data "
"item rank is 2), padding below (CoordinateDiff{1, 2}), padding above "
"(CoordinateDiff{1, 2, 3}), window shape ({3,3}), and window strides "
"(Strides{2, 3}) do not match");
}
catch (...)
{
......@@ -7808,6 +8011,230 @@ TEST(type_prop, avg_pool_invalid_movement_stride_0)
}
}
TEST(type_prop, avg_pool_partial_rank_dynamic_ok)
{
PartialShape arg_shape{PartialShape::dynamic()};
Shape window_shape{2, 3, 4, 5};
Strides window_movement_strides{1, 1, 1, 1};
Shape padding_below{0, 0, 0, 0};
Shape padding_above{0, 0, 0, 0};
bool include_padding_in_average = false;
auto param = make_shared<op::Parameter>(element::f32, arg_shape);
auto ap = make_shared<op::AvgPool>(param,
window_shape,
window_movement_strides,
padding_below,
padding_above,
include_padding_in_average);
ASSERT_EQ(ap->get_output_element_type(0), element::f32);
ASSERT_TRUE(ap->get_output_partial_shape(0).same_scheme(PartialShape::dynamic(6)));
}
TEST(type_prop, avg_pool_partial_rank_dynamic_attrib_rank_mismatch)
{
PartialShape arg_shape{PartialShape::dynamic()};
Shape window_shape{2, 3, 4, 5};
Strides window_movement_strides{1, 1, 1, 1, 1};
Shape padding_below{0, 0, 0, 0};
Shape padding_above{0, 0, 0, 0};
bool include_padding_in_average = false;
auto param = make_shared<op::Parameter>(element::f32, arg_shape);
try
{
auto ap = make_shared<op::AvgPool>(param,
window_shape,
window_movement_strides,
padding_below,
padding_above,
include_padding_in_average);
FAIL() << "Mismatch of attribute ranks not detected";
}
catch (const NodeValidationError& error)
{
EXPECT_HAS_SUBSTRING(
error.what(),
std::string("Ranks for data item shape (data batch has shape ?, so data item rank is "
"?), padding below (CoordinateDiff{0, 0, 0, 0}), padding above "
"(CoordinateDiff{0, 0, 0, 0}), window shape ({2,3,4,5}), and window "
"strides (Strides{1, 1, 1, 1, 1}) do not match"));
}
catch (...)
{
FAIL() << "Deduced type check failed for unexpected reason";
}
}
TEST(type_prop, avg_pool_partial_rank_static_dynamic_ok)
{
PartialShape arg_shape{PartialShape::dynamic(6)};
Shape window_shape{2, 3, 4, 5};
Strides window_movement_strides{1, 1, 1, 1};
Shape padding_below{0, 0, 0, 0};
Shape padding_above{0, 0, 0, 0};
bool include_padding_in_average = false;
auto param = make_shared<op::Parameter>(element::f32, arg_shape);
auto ap = make_shared<op::AvgPool>(param,
window_shape,
window_movement_strides,
padding_below,
padding_above,
include_padding_in_average);
ASSERT_EQ(ap->get_output_element_type(0), element::f32);
ASSERT_TRUE(ap->get_output_partial_shape(0).same_scheme(PartialShape::dynamic(6)));
}
TEST(type_prop, avg_pool_partial_rank_static_dynamic_some_dims_known_ok)
{
PartialShape arg_shape{5, Dimension::dynamic(), 8, Dimension::dynamic(), 4, 7};
Shape window_shape{2, 3, 4, 5};
Strides window_movement_strides{1, 1, 1, 1};
Shape padding_below{0, 0, 0, 0};
Shape padding_above{0, 0, 0, 0};
bool include_padding_in_average = false;
auto param = make_shared<op::Parameter>(element::f32, arg_shape);
auto ap = make_shared<op::AvgPool>(param,
window_shape,
window_movement_strides,
padding_below,
padding_above,
include_padding_in_average);
ASSERT_EQ(ap->get_output_element_type(0), element::f32);
ASSERT_TRUE(ap->get_output_partial_shape(0).same_scheme(
PartialShape{5, Dimension::dynamic(), 7, Dimension::dynamic(), 1, 3}));
}
TEST(type_prop, avg_pool_partial_rank_static_dynamic_attrib_rank_mismatch)
{
PartialShape arg_shape{5, Dimension::dynamic(), 8, Dimension::dynamic(), 4, 7};
Shape window_shape{2, 3, 4, 5, 6};
Strides window_movement_strides{1, 1, 1, 1};
Shape padding_below{0, 0, 0, 0};
Shape padding_above{0, 0, 0, 0};
bool include_padding_in_average = false;
auto param = make_shared<op::Parameter>(element::f32, arg_shape);
try
{
auto ap = make_shared<op::AvgPool>(param,
window_shape,
window_movement_strides,
padding_below,
padding_above,
include_padding_in_average);
FAIL() << "Mismatch of attribute ranks not detected";
}
catch (const NodeValidationError& error)
{
EXPECT_HAS_SUBSTRING(
error.what(),
std::string("Ranks for data item shape (data batch has shape {5,?,8,?,4,7}, so data "
"item rank is 4), padding below (CoordinateDiff{0, 0, 0, 0}), padding "
"above (CoordinateDiff{0, 0, 0, 0}), window shape ({2,3,4,5,6}), and "
"window strides (Strides{1, 1, 1, 1}) do not match"));
}
catch (...)
{
FAIL() << "Deduced type check failed for unexpected reason";
}
}
TEST(type_prop, avg_pool_partial_rank_static_dynamic_window_not_too_big)
{
PartialShape arg_shape{5, Dimension::dynamic(), 8, Dimension::dynamic(), 4, 7};
Shape window_shape{9, 3, 4, 5};
Strides window_movement_strides{1, 1, 1, 1};
Shape padding_below{0, 0, 0, 0};
Shape padding_above{0, 0, 0, 0};
bool include_padding_in_average = false;
auto param = make_shared<op::Parameter>(element::f32, arg_shape);
try
{
auto ap = make_shared<op::AvgPool>(param,
window_shape,
window_movement_strides,
padding_below,
padding_above,
include_padding_in_average);
FAIL() << "Oversized window not detected";
}
catch (const NodeValidationError& error)
{
EXPECT_HAS_SUBSTRING(error.what(),
std::string("Window after dilation has dimension (dim: 9) larger than "
"the data shape after padding (dim: 8) at axis 0"));
}
catch (...)
{
FAIL() << "Deduced type check failed for unexpected reason";
}
}
TEST(type_prop, avg_pool_partial_rank_static_dynamic_padded_window_not_too_big)
{
PartialShape arg_shape{5, Dimension::dynamic(), 8, Dimension::dynamic(), 4, 7};
Shape window_shape{9, 3, 4, 5};
Strides window_movement_strides{1, 1, 1, 1};
Shape padding_below{0, 0, 0, 0};
Shape padding_above{1, 0, 0, 0};
bool include_padding_in_average = false;
auto param = make_shared<op::Parameter>(element::f32, arg_shape);
auto ap = make_shared<op::AvgPool>(param,
window_shape,
window_movement_strides,
padding_below,
padding_above,
include_padding_in_average);
ASSERT_EQ(ap->get_output_element_type(0), element::f32);
ASSERT_TRUE(ap->get_output_partial_shape(0).same_scheme(
PartialShape{5, Dimension::dynamic(), 1, Dimension::dynamic(), 1, 3}));
}
TEST(type_prop, avg_pool_partial_rank_static_dynamic_window_in_padding)
{
PartialShape arg_shape{5, Dimension::dynamic(), 8, Dimension::dynamic(), 4, 7};
Shape window_shape{9, 3, 4, 3};
Strides window_movement_strides{1, 1, 1, 1};
Shape padding_below{0, 0, 0, 4};
Shape padding_above{0, 0, 0, 0};
bool include_padding_in_average = false;
auto param = make_shared<op::Parameter>(element::f32, arg_shape);
try
{
auto ap = make_shared<op::AvgPool>(param,
window_shape,
window_movement_strides,
padding_below,
padding_above,
include_padding_in_average);
FAIL() << "Window in padding not detected";
}
catch (const NodeValidationError& error)
{
EXPECT_HAS_SUBSTRING(error.what(),
std::string("Window after dilation has dimension (dim: 9) larger than "
"the data shape after padding (dim: 8) at axis 0"));
}
catch (...)
{
FAIL() << "Deduced type check failed for unexpected reason";
}
}
TEST(type_prop, pad_deduce_1d_exterior)
{
// Deduce type
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment