Commit 72f4d661 authored by Chris Sullivan's avatar Chris Sullivan Committed by adstraw

Add op::ReluBackprop to GPU transformer (#712)

* Added backprop op for relu and enabled tests.
parent 22d1e52b
......@@ -47,6 +47,7 @@ namespace ngraph
class LessEq;
class Not;
class Relu;
class ReluBackprop;
class Max;
class Min;
class Negative;
......@@ -266,6 +267,13 @@ namespace ngraph
static constexpr const char* op = "not";
static constexpr const char* math_kernel = "!x0";
};
template <>
struct CudaOpMap<ngraph::op::ReluBackprop>
{
static constexpr const char* op = "relu_backprop";
static constexpr const char* math_kernel = "x1 * int(x0 > 0)";
};
}
}
}
......@@ -232,7 +232,7 @@ static const runtime::gpu::OpMap dispatcher{
{TI(ngraph::op::Max), &runtime::gpu::GPU_Emitter::emit<ngraph::op::Max>},
{TI(ngraph::op::Min), &runtime::gpu::GPU_Emitter::emit<ngraph::op::Min>},
{TI(ngraph::op::Relu), &runtime::gpu::GPU_Emitter::EmitElementwise},
{TI(ngraph::op::ReluBackprop), &runtime::gpu::GPU_Emitter::emit<ngraph::op::ReluBackprop>},
{TI(ngraph::op::ReluBackprop), &runtime::gpu::GPU_Emitter::EmitElementwise},
{TI(ngraph::op::Softmax), &runtime::gpu::GPU_Emitter::emit<ngraph::op::Softmax>},
};
......
......@@ -8433,7 +8433,6 @@ TEST(${BACKEND_NAME}, fuse_max_with_constant_zero_input_as_relu)
TEST(${BACKEND_NAME}, relu_2Dbackprop)
{
SKIP_TEST_FOR("GPU", "${BACKEND_NAME}");
auto shape_a = Shape{2, 5};
auto A = make_shared<op::Parameter>(element::f32, shape_a);
auto delta_val = make_shared<op::Parameter>(element::f32, shape_a);
......@@ -8459,7 +8458,6 @@ TEST(${BACKEND_NAME}, relu_2Dbackprop)
TEST(${BACKEND_NAME}, relu_4Dbackprop)
{
SKIP_TEST_FOR("GPU", "${BACKEND_NAME}");
auto shape_a = Shape{2, 2, 2, 2};
auto A = make_shared<op::Parameter>(element::f32, shape_a);
auto delta_val = make_shared<op::Parameter>(element::f32, shape_a);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment