Unverified Commit ffe3a631 authored by Robert Kimball's avatar Robert Kimball Committed by GitHub

Cache functions so the backend does not need to recompile (#1209)

* Cache some generated functions in backwards tests to speed performance

* more caching
parent 9fecc560
......@@ -294,10 +294,12 @@ NGRAPH_TEST(${BACKEND_NAME}, backwards_avgpool_n2_c2_hw4x4_numeric)
};
auto f = make_graph();
auto g = make_graph();
for (auto i = 0; i < ${TEST_LOOPS}; i++)
{
auto x = rng.initialize(backend->create_tensor(element::f32, shape_a));
EXPECT_TRUE(autodiff_numeric_compare<float>(backend, make_graph, {x}, .01f, .01f));
EXPECT_TRUE(autodiff_numeric_compare<float>(backend, f, g, {x}, .01f, .01f));
}
}
......@@ -316,10 +318,12 @@ NGRAPH_TEST(${BACKEND_NAME}, backwards_avgpool_n2_c2_hw4x4_win_2x2_str_1x1_numer
};
auto f = make_graph();
auto g = make_graph();
for (auto i = 0; i < ${TEST_LOOPS}; i++)
{
auto x = rng.initialize(backend->create_tensor(element::f32, shape_a));
EXPECT_TRUE(autodiff_numeric_compare<float>(backend, make_graph, {x}, .01f, .01f));
EXPECT_TRUE(autodiff_numeric_compare<float>(backend, f, g, {x}, .01f, .01f));
}
}
......@@ -340,10 +344,12 @@ NGRAPH_TEST(${BACKEND_NAME}, backwards_avgpool_n2_c2_hw2x2_win_2x2_str_1x1_paddi
};
auto f = make_graph();
auto g = make_graph();
for (auto i = 0; i < ${TEST_LOOPS}; i++)
{
auto x = rng.initialize(backend->create_tensor(element::f32, shape_a));
EXPECT_TRUE(autodiff_numeric_compare<float>(backend, make_graph, {x}, .01f, .01f));
EXPECT_TRUE(autodiff_numeric_compare<float>(backend, f, g, {x}, .01f, .01f));
}
}
......@@ -363,15 +369,17 @@ NGRAPH_TEST(${BACKEND_NAME}, backwards_abs)
std::vector<std::shared_ptr<op::Parameter>>{X});
};
auto f = make_graph();
auto g = make_graph();
for (auto i = 0; i < ${TEST_LOOPS}; i++)
{
auto x_neg = rng_neg.initialize(backend->create_tensor<float>(shape));
EXPECT_TRUE(autodiff_numeric_compare<float>(backend, make_graph, {x_neg}, .01f, .01f));
EXPECT_TRUE(autodiff_numeric_compare<float>(backend, f, g, {x_neg}, .01f, .01f));
auto x_pos = rng_pos.initialize(backend->create_tensor<float>(shape));
EXPECT_TRUE(autodiff_numeric_compare<float>(backend, make_graph, {x_pos}, .01f, .01f));
EXPECT_TRUE(autodiff_numeric_compare<float>(backend, f, g, {x_pos}, .01f, .01f));
}
}
......@@ -573,19 +581,21 @@ NGRAPH_TEST(${BACKEND_NAME}, backwards_ceiling)
std::vector<std::shared_ptr<op::Parameter>>{X});
};
auto f = make_graph();
auto g = make_graph();
for (auto i = 0; i < ${TEST_LOOPS}; i++)
{
auto x_minusone = rng_minusone.initialize(backend->create_tensor<float>(shape));
EXPECT_TRUE(autodiff_numeric_compare<float>(backend, make_graph, {x_minusone}, .01f, .01f));
EXPECT_TRUE(autodiff_numeric_compare<float>(backend, f, g, {x_minusone}, .01f, .01f));
auto x_plusone = rng_plusone.initialize(backend->create_tensor<float>(shape));
EXPECT_TRUE(autodiff_numeric_compare<float>(backend, make_graph, {x_plusone}, .01f, .01f));
EXPECT_TRUE(autodiff_numeric_compare<float>(backend, f, g, {x_plusone}, .01f, .01f));
auto x_plustwo = rng_plustwo.initialize(backend->create_tensor<float>(shape));
EXPECT_TRUE(autodiff_numeric_compare<float>(backend, make_graph, {x_plustwo}, .01f, .01f));
EXPECT_TRUE(autodiff_numeric_compare<float>(backend, f, g, {x_plustwo}, .01f, .01f));
}
}
......@@ -601,11 +611,13 @@ NGRAPH_TEST(${BACKEND_NAME}, backwards_cos)
std::vector<std::shared_ptr<op::Parameter>>{X});
};
auto f = make_graph();
auto g = make_graph();
for (auto i = 0; i < ${TEST_LOOPS}; i++)
{
auto x = rng.initialize(backend->create_tensor<float>(shape));
EXPECT_TRUE(autodiff_numeric_compare<float>(backend, make_graph, {x}, .01f, .01f));
EXPECT_TRUE(autodiff_numeric_compare<float>(backend, f, g, {x}, .01f, .01f));
}
}
......@@ -621,11 +633,13 @@ NGRAPH_TEST(${BACKEND_NAME}, backwards_cosh)
std::vector<std::shared_ptr<op::Parameter>>{X});
};
auto f = make_graph();
auto g = make_graph();
for (auto i = 0; i < ${TEST_LOOPS}; i++)
{
auto x = rng.initialize(backend->create_tensor<float>(shape));
EXPECT_TRUE(autodiff_numeric_compare<float>(backend, make_graph, {x}, .01f, .01f));
EXPECT_TRUE(autodiff_numeric_compare<float>(backend, f, g, {x}, .01f, .01f));
}
}
......@@ -816,19 +830,21 @@ NGRAPH_TEST(${BACKEND_NAME}, backwards_floor)
std::vector<std::shared_ptr<op::Parameter>>{X});
};
auto f = make_graph();
auto g = make_graph();
for (auto i = 0; i < ${TEST_LOOPS}; i++)
{
auto x_minusone = rng_minusone.initialize(backend->create_tensor<float>(shape));
EXPECT_TRUE(autodiff_numeric_compare<float>(backend, make_graph, {x_minusone}, .01f, .01f));
EXPECT_TRUE(autodiff_numeric_compare<float>(backend, f, g, {x_minusone}, .01f, .01f));
auto x_plusone = rng_plusone.initialize(backend->create_tensor<float>(shape));
EXPECT_TRUE(autodiff_numeric_compare<float>(backend, make_graph, {x_plusone}, .01f, .01f));
EXPECT_TRUE(autodiff_numeric_compare<float>(backend, f, g, {x_plusone}, .01f, .01f));
auto x_plustwo = rng_plustwo.initialize(backend->create_tensor<float>(shape));
EXPECT_TRUE(autodiff_numeric_compare<float>(backend, make_graph, {x_plustwo}, .01f, .01f));
EXPECT_TRUE(autodiff_numeric_compare<float>(backend, f, g, {x_plustwo}, .01f, .01f));
}
}
......@@ -977,15 +993,17 @@ NGRAPH_TEST(${BACKEND_NAME}, backwards_relu)
std::vector<std::shared_ptr<op::Parameter>>{X});
};
auto f = make_graph();
auto g = make_graph();
for (auto i = 0; i < ${TEST_LOOPS}; i++)
{
auto x_neg = rng_neg.initialize(backend->create_tensor<float>(shape));
EXPECT_TRUE(autodiff_numeric_compare<float>(backend, make_graph, {x_neg}, .01f, .01f));
EXPECT_TRUE(autodiff_numeric_compare<float>(backend, f, g, {x_neg}, .01f, .01f));
auto x_pos = rng_pos.initialize(backend->create_tensor<float>(shape));
EXPECT_TRUE(autodiff_numeric_compare<float>(backend, make_graph, {x_pos}, .01f, .01f));
EXPECT_TRUE(autodiff_numeric_compare<float>(backend, f, g, {x_pos}, .01f, .01f));
}
}
......@@ -1004,12 +1022,14 @@ NGRAPH_TEST(${BACKEND_NAME}, backwards_replace_slice)
std::vector<std::shared_ptr<op::Parameter>>{X, Y});
};
auto f = make_graph();
auto g = make_graph();
for (auto i = 0; i < ${TEST_LOOPS}; i++)
{
auto x = rng.initialize(backend->create_tensor<float>(shape_x));
auto y = rng.initialize(backend->create_tensor<float>(shape_y));
EXPECT_TRUE(autodiff_numeric_compare<float>(backend, make_graph, {x, y}, .01f, .01f));
EXPECT_TRUE(autodiff_numeric_compare<float>(backend, f, g, {x, y}, .01f, .01f));
}
}
......@@ -1043,6 +1063,8 @@ NGRAPH_TEST(${BACKEND_NAME}, backwards_select)
std::vector<std::shared_ptr<op::Parameter>>{X0, X1, X2});
};
auto f = make_graph();
auto g = make_graph();
for (auto i = 0; i < ${TEST_LOOPS}; i++)
{
auto x0 = backend->create_tensor(element::boolean, shape);
......@@ -1051,7 +1073,7 @@ NGRAPH_TEST(${BACKEND_NAME}, backwards_select)
auto x2 = rng.initialize(backend->create_tensor<float>(shape));
EXPECT_TRUE(autodiff_numeric_compare_selective<float>(
backend, make_graph, {x0, x1, x2}, .01f, .01f, std::vector<bool>{false, true, true}));
backend, f, g, {x0, x1, x2}, .01f, .01f, std::vector<bool>{false, true, true}));
}
}
......@@ -1069,6 +1091,8 @@ NGRAPH_TEST(${BACKEND_NAME}, backwards_select_nested)
std::vector<std::shared_ptr<op::Parameter>>{X0, X1, X2});
};
auto f = make_graph();
auto g = make_graph();
for (auto i = 0; i < ${TEST_LOOPS}; i++)
{
auto x0 = backend->create_tensor(element::boolean, shape);
......@@ -1077,7 +1101,7 @@ NGRAPH_TEST(${BACKEND_NAME}, backwards_select_nested)
auto x2 = rng.initialize(backend->create_tensor<float>(shape));
EXPECT_TRUE(autodiff_numeric_compare_selective<float>(
backend, make_graph, {x0, x1, x2}, .01f, .01f, std::vector<bool>{false, true, true}));
backend, f, g, {x0, x1, x2}, .01f, .01f, std::vector<bool>{false, true, true}));
}
}
......@@ -1097,15 +1121,17 @@ NGRAPH_TEST(${BACKEND_NAME}, backwards_sigmoid)
std::vector<std::shared_ptr<op::Parameter>>{X});
};
auto f = make_graph();
auto g = make_graph();
for (auto i = 0; i < ${TEST_LOOPS}; i++)
{
auto x_neg = rng_neg.initialize(backend->create_tensor<float>(shape));
EXPECT_TRUE(autodiff_numeric_compare<float>(backend, make_graph, {x_neg}, .01f, .01f));
EXPECT_TRUE(autodiff_numeric_compare<float>(backend, f, g, {x_neg}, .01f, .01f));
auto x_pos = rng_pos.initialize(backend->create_tensor<float>(shape));
EXPECT_TRUE(autodiff_numeric_compare<float>(backend, make_graph, {x_pos}, .01f, .01f));
EXPECT_TRUE(autodiff_numeric_compare<float>(backend, f, g, {x_pos}, .01f, .01f));
}
}
......@@ -1125,15 +1151,17 @@ NGRAPH_TEST(${BACKEND_NAME}, backwards_sign)
std::vector<std::shared_ptr<op::Parameter>>{X});
};
auto f = make_graph();
auto g = make_graph();
for (auto i = 0; i < ${TEST_LOOPS}; i++)
{
auto x_neg = rng_neg.initialize(backend->create_tensor<float>(shape));
EXPECT_TRUE(autodiff_numeric_compare<float>(backend, make_graph, {x_neg}, .01f, .01f));
EXPECT_TRUE(autodiff_numeric_compare<float>(backend, f, g, {x_neg}, .01f, .01f));
auto x_pos = rng_pos.initialize(backend->create_tensor<float>(shape));
EXPECT_TRUE(autodiff_numeric_compare<float>(backend, make_graph, {x_pos}, .01f, .01f));
EXPECT_TRUE(autodiff_numeric_compare<float>(backend, f, g, {x_pos}, .01f, .01f));
}
}
......@@ -1149,11 +1177,13 @@ NGRAPH_TEST(${BACKEND_NAME}, backwards_sin)
std::vector<std::shared_ptr<op::Parameter>>{X});
};
auto f = make_graph();
auto g = make_graph();
for (auto i = 0; i < ${TEST_LOOPS}; i++)
{
auto x = rng.initialize(backend->create_tensor<float>(shape));
EXPECT_TRUE(autodiff_numeric_compare<float>(backend, make_graph, {x}, .01f, .01f));
EXPECT_TRUE(autodiff_numeric_compare<float>(backend, f, g, {x}, .01f, .01f));
}
}
......@@ -1169,11 +1199,13 @@ NGRAPH_TEST(${BACKEND_NAME}, backwards_sinh)
std::vector<std::shared_ptr<op::Parameter>>{X});
};
auto f = make_graph();
auto g = make_graph();
for (auto i = 0; i < ${TEST_LOOPS}; i++)
{
auto x = rng.initialize(backend->create_tensor<float>(shape));
EXPECT_TRUE(autodiff_numeric_compare<float>(backend, make_graph, {x}, .01f, .01f));
EXPECT_TRUE(autodiff_numeric_compare<float>(backend, f, g, {x}, .01f, .01f));
}
}
......@@ -1188,11 +1220,13 @@ NGRAPH_TEST(${BACKEND_NAME}, backwards_slice)
std::vector<std::shared_ptr<op::Parameter>>{X});
};
auto f = make_graph();
auto g = make_graph();
for (auto i = 0; i < ${TEST_LOOPS}; i++)
{
auto x = rng.initialize(backend->create_tensor<float>(shape));
EXPECT_TRUE(autodiff_numeric_compare<float>(backend, make_graph, {x}, .01f, .01f));
EXPECT_TRUE(autodiff_numeric_compare<float>(backend, f, g, {x}, .01f, .01f));
}
}
......@@ -1404,15 +1438,17 @@ NGRAPH_TEST(${BACKEND_NAME}, backwards_tan)
std::vector<std::shared_ptr<op::Parameter>>{X});
};
auto f = make_graph();
auto g = make_graph();
for (auto i = 0; i < ${TEST_LOOPS}; i++)
{
auto x_r = rng_r.initialize(backend->create_tensor<float>(shape));
EXPECT_TRUE(autodiff_numeric_compare<float>(backend, make_graph, {x_r}, .01f, .01f));
EXPECT_TRUE(autodiff_numeric_compare<float>(backend, f, g, {x_r}, .01f, .01f));
auto x_l = rng_l.initialize(backend->create_tensor<float>(shape));
EXPECT_TRUE(autodiff_numeric_compare<float>(backend, make_graph, {x_l}, .01f, .01f));
EXPECT_TRUE(autodiff_numeric_compare<float>(backend, f, g, {x_l}, .01f, .01f));
}
}
......@@ -1428,11 +1464,13 @@ NGRAPH_TEST(${BACKEND_NAME}, backwards_tanh)
std::vector<std::shared_ptr<op::Parameter>>{X});
};
auto f = make_graph();
auto g = make_graph();
for (auto i = 0; i < ${TEST_LOOPS}; i++)
{
auto x = rng.initialize(backend->create_tensor<float>(shape));
EXPECT_TRUE(autodiff_numeric_compare<float>(backend, make_graph, {x}, .01f, .01f));
EXPECT_TRUE(autodiff_numeric_compare<float>(backend, f, g, {x}, .01f, .01f));
}
}
......@@ -1453,6 +1491,7 @@ NGRAPH_TEST(${BACKEND_NAME}, backwards_abc)
return make_shared<Function>((X0 + X1) * X2,
std::vector<std::shared_ptr<op::Parameter>>{X0, X1, X2});
};
EXPECT_TRUE(autodiff_numeric_compare<float>(backend, make_graph, {x0, x1, x2}, .01f, .01f));
}
......
......@@ -17,6 +17,7 @@
#pragma once
#include <memory>
#include <unordered_map>
#include "ngraph/autodiff/adjoints.hpp"
#include "ngraph/graph_util.hpp"
......@@ -31,6 +32,10 @@ namespace ngraph
class Node;
class Function;
static std::unordered_map<std::shared_ptr<Function>, std::shared_ptr<Function>> s_df_map;
static std::unordered_map<std::shared_ptr<Function>, std::shared_ptr<Function>> s_clone_fwd_map;
static std::unordered_map<std::shared_ptr<Function>, std::shared_ptr<Function>> s_clone_bwd_map;
namespace runtime
{
class Backend;
......@@ -145,7 +150,6 @@ namespace ngraph
for (auto x : indep_params)
{
// add df/dx to df/dX*
auto x_shape = x->get_shape();
df_output_params.push_back(adjoints.backprop_node(x));
}
......@@ -154,7 +158,11 @@ namespace ngraph
df_input_params.insert(df_input_params.begin(), c_param);
// df/dX* = f'(c, X)
auto df = std::make_shared<Function>(df_output_params, df_input_params);
if (!s_df_map[f])
{
s_df_map[f] = std::make_shared<Function>(df_output_params, df_input_params);
}
auto df = s_df_map[f];
// (c, X) arguments
std::vector<std::shared_ptr<runtime::TensorView>> df_input_args = f_input_args;
......@@ -184,11 +192,20 @@ namespace ngraph
}
// compile and run modified (y, cached) = f(x)
auto clone_fwd = clone_function(*fprop_cache.fprop);
if (!s_clone_fwd_map[f])
{
s_clone_fwd_map[f] = clone_function(*fprop_cache.fprop);
}
auto clone_fwd = s_clone_fwd_map[f];
backend->call(clone_fwd, mod_f_output_args, f_input_args);
// call modfied f'(c, cached) to get df/dX*
auto clone_bwd = clone_function(*fprop_cache.bprop);
if (!s_clone_bwd_map[f])
{
s_clone_bwd_map[f] = clone_function(*fprop_cache.bprop);
}
auto clone_bwd = s_clone_bwd_map[f];
auto cache_dfdx = get_autodiff<T>(backend, clone_bwd, mod_df_input_args, indep_params);
const auto numpy_atol = 1e-5f;
......
......@@ -26,7 +26,8 @@
// TODO: Always compute the numerical derivatives in double
template <typename T>
bool autodiff_numeric_compare(const std::shared_ptr<ngraph::runtime::Backend>& backend,
std::function<std::shared_ptr<ngraph::Function>()> make_graph,
std::shared_ptr<ngraph::Function> f,
std::shared_ptr<ngraph::Function> g,
const std::vector<std::shared_ptr<ngraph::runtime::TensorView>>& args,
T rtol,
T atol)
......@@ -35,7 +36,6 @@ bool autodiff_numeric_compare(const std::shared_ptr<ngraph::runtime::Backend>& b
// Use INTERPRETER to compute numerical derivatives
auto interpreter_backend = ngraph::runtime::Backend::create("INTERPRETER");
auto f = make_graph();
std::vector<std::shared_ptr<ngraph::runtime::TensorView>> interpreter_args;
for (auto arg : args)
......@@ -58,7 +58,6 @@ bool autodiff_numeric_compare(const std::shared_ptr<ngraph::runtime::Backend>& b
interpreter_backend, f, interpreter_args, delta, f->get_parameters());
// Use the backend being tested to compute symbolic derivatives
auto g = make_graph();
auto results_sym =
ngraph::autodiff::backprop_derivative<T>(backend, g, args, g->get_parameters());
......@@ -75,10 +74,21 @@ bool autodiff_numeric_compare(const std::shared_ptr<ngraph::runtime::Backend>& b
return ngraph::test::all_close(results_num, interpreter_results_sym, rtol, atol);
}
template <typename T>
bool autodiff_numeric_compare(const std::shared_ptr<ngraph::runtime::Backend>& backend,
std::function<std::shared_ptr<ngraph::Function>()> make_graph,
const std::vector<std::shared_ptr<ngraph::runtime::TensorView>>& args,
T rtol,
T atol)
{
return autodiff_numeric_compare(backend, make_graph(), make_graph(), args, rtol, atol);
}
template <typename T>
bool autodiff_numeric_compare_selective(
const std::shared_ptr<ngraph::runtime::Backend>& backend,
std::function<std::shared_ptr<ngraph::Function>()> make_graph,
std::shared_ptr<ngraph::Function> f,
std::shared_ptr<ngraph::Function> g,
const std::vector<std::shared_ptr<ngraph::runtime::TensorView>>& args,
T rtol,
T atol,
......@@ -86,7 +96,6 @@ bool autodiff_numeric_compare_selective(
{
// Use INTERPRETER to compute numerical derivatives
std::vector<std::shared_ptr<ngraph::op::Parameter>> f_indep_params;
auto f = make_graph();
size_t i = 0;
......@@ -123,7 +132,6 @@ bool autodiff_numeric_compare_selective(
// Use the backend being tested to compute symbolic derivatives
std::vector<std::shared_ptr<ngraph::op::Parameter>> g_indep_params;
auto g = make_graph();
i = 0;
......@@ -150,3 +158,16 @@ bool autodiff_numeric_compare_selective(
return ngraph::test::all_close(results_num, interpreter_results_sym, rtol, atol);
}
template <typename T>
bool autodiff_numeric_compare_selective(
const std::shared_ptr<ngraph::runtime::Backend>& backend,
std::function<std::shared_ptr<ngraph::Function>()> make_graph,
const std::vector<std::shared_ptr<ngraph::runtime::TensorView>>& args,
T rtol,
T atol,
const std::vector<bool>& indep_param_mask)
{
return autodiff_numeric_compare_selective(
backend, make_graph(), make_graph(), args, rtol, atol, indep_param_mask);
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment