Commit 167844e4 authored by Anna Alberska's avatar Anna Alberska Committed by Robert Kimball

IntelGPU backend: Power, Sigmoid and ReluBackprop operations (#1286)

* IntelGPU backend: Power, Sigmoid and ReluBackprop operations

* style changed to ReluBackprop

* Update intelgpu_backend.cpp
parent f534650d
......@@ -15,6 +15,7 @@
*******************************************************************************/
#include <CPP/activation.hpp>
#include <CPP/activation_grad.hpp>
#include <CPP/batch_norm.hpp>
#include <CPP/convolution.hpp>
#include <CPP/data.hpp>
......@@ -338,6 +339,19 @@ bool runtime::intelgpu::IntelGPUBackend::compile(shared_ptr<Function> func)
{
do_unary_operation(topology, op, activation_relu);
}
else if ("ReluBackprop" == op->description())
{
arguments_check(op, 2, 1);
const string& input = op->get_inputs().at(0).get_tensor().get_name();
const string& input_grad = op->get_inputs().at(1).get_tensor().get_name();
const string& output_name = op->get_outputs().begin()->get_tensor().get_name();
const cldnn_activation_additional_params& param = {0.f, 0.f};
const cldnn::activation_grad cldnn_activ_grad(
output_name, input_grad, input, activation_grad_relu, param);
topology.add(cldnn_activ_grad);
}
else if ("Abs" == op->description())
{
do_unary_operation(topology, op, activation_abs);
......@@ -350,10 +364,18 @@ bool runtime::intelgpu::IntelGPUBackend::compile(shared_ptr<Function> func)
{
do_unary_operation(topology, op, activation_hyperbolic_tan);
}
else if ("Sigmoid" == op->description())
{
do_unary_operation(topology, op, activation_logistic);
}
else if ("Subtract" == op->description())
{
do_eltwise_operation(topology, op, cldnn::eltwise_mode::sub);
}
else if ("Power" == op->description())
{
do_eltwise_operation(topology, op, cldnn::eltwise_mode::pow);
}
else if ("Pad" == op->description())
{
arguments_check(op, 2, 1);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment