Commit 15a0bf19 authored by Adam Rogowiec's avatar Adam Rogowiec Committed by Scott Cyphers

[NGCORE-270] UT for Softplus ONNX operator testing edge cases. (#2254)

* UT for Softplus ONNX operator testing edge cases.

* Rename UT model name.

* Handle overflows.

* Add UT for ininite values and check them correctly.

* Update values in comment
parent 757621be
......@@ -16,12 +16,14 @@
#include <memory>
#include "ngraph/builder/make_constant.hpp"
#include "ngraph/node.hpp"
#include "ngraph/op/add.hpp"
#include "ngraph/op/constant.hpp"
#include "ngraph/op/exp.hpp"
#include "ngraph/op/greater.hpp"
#include "ngraph/op/log.hpp"
#include "utils/broadcasting.hpp"
#include "ngraph/op/negative.hpp"
#include "ngraph/op/select.hpp"
#include "softplus.hpp"
......@@ -37,12 +39,31 @@ namespace ngraph
{
auto data = node.get_ng_inputs().at(0);
std::shared_ptr<ngraph::Node> one_node = std::make_shared<ngraph::op::Constant>(
data->get_element_type(), Shape{}, std::vector<double>{1});
one_node = make_broadcast_node(one_node, data->get_shape());
std::shared_ptr<ngraph::Node> zero_node =
builder::make_constant(data->get_element_type(), data->get_shape(), 0.f);
std::shared_ptr<ngraph::Node> one_node =
builder::make_constant(data->get_element_type(), data->get_shape(), 1.f);
return {std::make_shared<ngraph::op::Log>(
std::make_shared<ngraph::op::Exp>(data) + one_node)};
std::shared_ptr<ngraph::Node> positive_val_node =
data + std::make_shared<ngraph::op::Log>(
std::make_shared<ngraph::op::Exp>(
std::make_shared<ngraph::op::Negative>(data)) +
one_node);
std::shared_ptr<ngraph::Node> negative_val_node =
std::make_shared<ngraph::op::Log>(std::make_shared<ngraph::op::Exp>(data) +
one_node);
std::shared_ptr<ngraph::Node> condition_node =
std::make_shared<ngraph::op::Greater>(data, zero_node);
//
// This equation represents:
// x + log(exp(-x) + 1) - for x > 0; to manage exponent overflow,
// log(exp(x) + 1) - elsewhere.
//
return {std::make_shared<ngraph::op::Select>(
condition_node, positive_val_node, negative_val_node)};
}
} // namespace set_1
......
......@@ -14,8 +14,11 @@
// limitations under the License.
//*****************************************************************************
#include <cmath>
#include <cstdint>
#include <fstream>
#include <iterator>
#include <limits>
#include <sstream>
#include <vector>
......@@ -1536,6 +1539,74 @@ TEST(onnx, model_matmul_vec_ten3d)
EXPECT_TRUE(test::all_close_f(expected_output.front(), outputs.front()));
}
TEST(onnx, model_softplus)
{
auto function =
onnx_import::import_onnx_model(file_util::path_join(SERIALIZED_ZOO, "onnx/softplus.onnx"));
// -1.0f, 0, 1.0f, 10.f, normal input values for activation
// 100.0f, -100.0f, 1000.0f, -1000.0f, input values that leads to exp() overflow
// FLT_MIN, FLT_MIN / 16, -FLT_MIN / 16, min, denorm, -denorm
// FLT_MAX, -FLT_MAX, max, -max;
Inputs inputs{std::vector<float>{-1.0f,
0,
1.0f,
10.f,
100.0f,
-100.0f,
1000.0f,
-1000.0f,
FLT_MIN,
FLT_MIN / 16,
-FLT_MIN / 16,
FLT_MAX,
-FLT_MAX}};
std::vector<float>& input = inputs.back();
std::vector<float> output;
auto softplus_impl = [](float x) -> float {
if (x > 0)
{
return x + std::log(std::exp(-x) + 1);
}
else
{
return std::log(std::exp(x) + 1);
}
};
std::transform(std::begin(input), std::end(input), std::back_inserter(output), softplus_impl);
Outputs expected_output{output};
Outputs outputs{execute(function, inputs, "INTERPRETER")};
EXPECT_TRUE(test::all_close_f(expected_output.front(), outputs.front()));
inputs.clear();
outputs.clear();
expected_output.clear();
inputs.emplace_back(std::vector<float>{std::numeric_limits<float>::infinity(),
std::numeric_limits<float>::infinity(),
std::numeric_limits<float>::infinity(),
std::numeric_limits<float>::infinity(),
std::numeric_limits<float>::infinity(),
std::numeric_limits<float>::infinity(),
std::numeric_limits<float>::infinity(),
std::numeric_limits<float>::infinity(),
std::numeric_limits<float>::infinity(),
std::numeric_limits<float>::infinity(),
std::numeric_limits<float>::infinity(),
std::numeric_limits<float>::infinity(),
std::numeric_limits<float>::infinity()});
input = inputs.back();
outputs = execute(function, inputs, "INTERPRETER");
for (float v : outputs.front())
{
EXPECT_TRUE(std::isinf(v));
}
}
TEST(onnx, model_sum_opset8)
{
auto function = onnx_import::import_onnx_model(
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment