Commit 3609cc74 authored by shssf's avatar shssf Committed by Robert Kimball

IntelGPU backend: Reshape operation optimization (#1566)

parent e6267708
......@@ -594,27 +594,34 @@ bool runtime::intelgpu::IntelGPUBackend::compile(shared_ptr<Function> func)
{
arguments_check(op, 1, 1);
const shared_ptr<op::Reshape> op_broadcast = static_pointer_cast<op::Reshape>(op);
const AxisVector& broadcast_axes = op_broadcast->get_input_order();
vector<uint16_t> permute_order({0, 1, 2, 3}); // No action by default
const size_t max_dim = 4;
const size_t scale =
broadcast_axes.size() < max_dim ? max_dim - broadcast_axes.size() : 0;
// Need to scale indexes up according on array rank.
// For example, in 2D array, indexes are 0,1 but in 4D array it should be 2,3
// because cldnn::tensor is always 4D assuming cldnn::bfyx model
size_t rindex = max_dim;
for (auto i = broadcast_axes.crbegin(); i != broadcast_axes.crend() && rindex > 0;
++i, --rindex)
const shared_ptr<op::Reshape> op_reshape = static_pointer_cast<op::Reshape>(op);
if (op_reshape->get_is_transpose())
{
permute_order.at(rindex - 1) = *i + scale;
}
vector<uint16_t> permute_order({0, 1, 2, 3}); // No action by default
const AxisVector& reshape_axes = op_reshape->get_input_order();
const size_t max_dim = 4;
const size_t scale =
reshape_axes.size() < max_dim ? max_dim - reshape_axes.size() : 0;
// Need to scale indexes up according on array rank.
// For example, in 2D array, indexes are 0,1 but in 4D array it should be 2,3
// because cldnn::tensor is always 4D assuming cldnn::bfyx model
size_t rindex = max_dim;
for (auto i = reshape_axes.crbegin(); i != reshape_axes.crend() && rindex > 0;
++i, --rindex)
{
permute_order.at(rindex - 1) = *i + scale;
}
const cldnn::permute cldnn_permute(
get_output_name(op), get_input_name(op), permute_order);
topology.add(cldnn_permute);
const cldnn::permute cldnn_permute(
get_output_name(op), get_input_name(op), permute_order);
topology.add(cldnn_permute);
}
else
{
do_equal_propagation(topology, get_input_name(op), get_output_name(op));
}
}
else if ("Negative" == op->description())
{
......
......@@ -2691,6 +2691,25 @@ NGRAPH_TEST(${BACKEND_NAME}, reshape_s2t)
EXPECT_EQ((vector<float>{42}), read_vector<float>(result));
}
NGRAPH_TEST(${BACKEND_NAME}, reshape_s2t1)
{
Shape shape_a{};
auto A = make_shared<op::Parameter>(element::boolean, shape_a);
Shape shape_r{1};
auto r = make_shared<op::Reshape>(A, AxisVector{}, shape_r);
auto f = make_shared<Function>(r, op::ParameterVector{A});
auto backend = runtime::Backend::create("${BACKEND_NAME}");
// Create some tensors for input/output
auto a = backend->create_tensor(element::boolean, shape_a);
copy_data(a, vector<char>{42});
auto result = backend->create_tensor(element::boolean, shape_r);
backend->call_with_validate(f, {result}, {a});
EXPECT_EQ((vector<char>{42}), read_vector<char>(result));
}
NGRAPH_TEST(${BACKEND_NAME}, reshape_v2m_col)
{
Shape shape_a{3};
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment