Commit 9ecc157d authored by Scott Cyphers's avatar Scott Cyphers

Add abc test

parent c07665c8
...@@ -61,12 +61,8 @@ TEST(backwards, add) ...@@ -61,12 +61,8 @@ TEST(backwards, add)
return make_shared<Function>( return make_shared<Function>(
X0 + X1, nullptr, std::vector<std::shared_ptr<op::Parameter>>{X0, X1}); X0 + X1, nullptr, std::vector<std::shared_ptr<op::Parameter>>{X0, X1});
}; };
EXPECT_TRUE(autodiff_numeric_compare<element::Float32>(
auto results_num = autodiff::numeric_derivative<element::Float32>( manager, backend, make_graph, {x0, x1}, .01f, .01f));
manager, backend, make_graph(), {x0, x1}, .001f);
auto results_sym =
autodiff::backprop_derivative<element::Float32>(manager, backend, make_graph(), {x0, x1});
EXPECT_TRUE(test::all_close(results_num, results_sym, .01f, .01f));
} }
TEST(backwards, divide) TEST(backwards, divide)
...@@ -181,12 +177,6 @@ TEST(backwards, parameter) ...@@ -181,12 +177,6 @@ TEST(backwards, parameter)
}; };
EXPECT_TRUE( EXPECT_TRUE(
autodiff_numeric_compare<element::Float32>(manager, backend, make_graph, {x0}, .01f, .01f)); autodiff_numeric_compare<element::Float32>(manager, backend, make_graph, {x0}, .01f, .01f));
auto X0 = make_shared<op::Parameter>(element::Float32::element_type(), shape);
auto Y = X0;
auto C = make_shared<op::Parameter>(element::Float32::element_type(), shape);
auto DYDX0 = Y->backprop_node(X0, C);
ASSERT_EQ(DYDX0, C);
} }
TEST(backwards, subtract) TEST(backwards, subtract)
...@@ -205,10 +195,28 @@ TEST(backwards, subtract) ...@@ -205,10 +195,28 @@ TEST(backwards, subtract)
return make_shared<Function>( return make_shared<Function>(
X0 - X1, nullptr, std::vector<std::shared_ptr<op::Parameter>>{X0, X1}); X0 - X1, nullptr, std::vector<std::shared_ptr<op::Parameter>>{X0, X1});
}; };
EXPECT_TRUE(autodiff_numeric_compare<element::Float32>(
manager, backend, make_graph, {x0, x1}, .01f, .01f));
}
auto results_num = autodiff::numeric_derivative<element::Float32>( TEST(backwards, abc)
manager, backend, make_graph(), {x0, x1}, .001f); {
auto results_sym = auto manager = runtime::Manager::get("NGVM");
autodiff::backprop_derivative<element::Float32>(manager, backend, make_graph(), {x0, x1}); auto backend = manager->allocate_backend();
EXPECT_TRUE(test::all_close(results_num, results_sym, .01f, .01f));
test::Uniform<element::Float32> rng(-1.0f, 1.0f);
auto shape = Shape{2, 3};
auto x0 = rng.initialize(backend->make_parameterized_tensor_view<element::Float32>(shape));
auto x1 = rng.initialize(backend->make_parameterized_tensor_view<element::Float32>(shape));
auto x2 = rng.initialize(backend->make_parameterized_tensor_view<element::Float32>(shape));
auto make_graph = [shape]() {
auto X0 = make_shared<op::Parameter>(element::Float32::element_type(), shape);
auto X1 = make_shared<op::Parameter>(element::Float32::element_type(), shape);
auto X2 = make_shared<op::Parameter>(element::Float32::element_type(), shape);
return make_shared<Function>(
(X0 + X1) * X2, nullptr, std::vector<std::shared_ptr<op::Parameter>>{X0, X1, X2});
};
EXPECT_TRUE(autodiff_numeric_compare<element::Float32>(
manager, backend, make_graph, {x0, x1, x2}, .01f, .01f));
} }
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment