Skip to content
Projects
Groups
Snippets
Help
Loading...
Sign in / Register
Toggle navigation
N
ngraph
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Packages
Packages
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
submodule
ngraph
Commits
47626835
Unverified
Commit
47626835
authored
Jul 24, 2019
by
Robert Kimball
Committed by
GitHub
Jul 24, 2019
Browse files
Options
Browse Files
Download
Plain Diff
Merge branch 'master' into bob/nbench_db
parents
d6633ec4
a509de7b
Show whitespace changes
Inline
Side-by-side
Showing
22 changed files
with
171 additions
and
41 deletions
+171
-41
abc.cpp
doc/examples/abc/abc.cpp
+1
-1
abc_operator.cpp
doc/examples/abc_operator/abc_operator.cpp
+1
-1
dist_mnist_mlp.cpp
doc/examples/mnist_mlp/dist_mnist_mlp.cpp
+4
-4
mnist_mlp.cpp
doc/examples/mnist_mlp/mnist_mlp.cpp
+4
-4
execute.rst
doc/sphinx/source/core/constructing-graphs/execute.rst
+2
-2
function.cpp
python/pyngraph/function.cpp
+39
-0
adjoints.cpp
src/ngraph/autodiff/adjoints.cpp
+0
-5
adjoints.hpp
src/ngraph/autodiff/adjoints.hpp
+0
-2
function.cpp
src/ngraph/function.cpp
+29
-0
function.hpp
src/ngraph/function.hpp
+6
-0
pad.hpp
src/ngraph/op/pad.hpp
+1
-1
cpu_emitter.cpp
src/ngraph/runtime/cpu/cpu_emitter.cpp
+3
-1
unit_test.manifest
src/ngraph/runtime/gpu/unit_test.manifest
+1
-0
unit_test.manifest
src/ngraph/runtime/intelgpu/unit_test.manifest
+1
-0
unit_test.manifest
src/ngraph/runtime/plaidml/unit_test.manifest
+1
-0
pad.hpp
src/ngraph/runtime/reference/pad.hpp
+25
-2
batch_norm.in.cpp
test/backend/batch_norm.in.cpp
+2
-2
binary_elementwise.in.cpp
test/backend/binary_elementwise.in.cpp
+3
-3
pad.in.cpp
test/backend/pad.in.cpp
+33
-0
cpu_fusion.cpp
test/cpu_fusion.cpp
+11
-9
backprop_derivative.hpp
test/util/autodiff/backprop_derivative.hpp
+1
-1
backprop_function.cpp
test/util/autodiff/backprop_function.cpp
+3
-3
No files found.
doc/examples/abc/abc.cpp
View file @
47626835
...
...
@@ -32,7 +32,7 @@ int main()
auto
t1
=
std
::
make_shared
<
op
::
Multiply
>
(
t0
,
c
);
// Make the function
auto
f
=
std
::
make_shared
<
Function
>
(
Node
Vector
{
t1
},
auto
f
=
std
::
make_shared
<
Function
>
(
Output
Vector
{
t1
},
ParameterVector
{
a
,
b
,
c
});
// Create the backend
...
...
doc/examples/abc_operator/abc_operator.cpp
View file @
47626835
...
...
@@ -31,7 +31,7 @@ int main()
auto
t1
=
(
a
+
b
)
*
c
;
// Make the function
auto
f
=
std
::
make_shared
<
Function
>
(
Node
Vector
{
t1
},
auto
f
=
std
::
make_shared
<
Function
>
(
Output
Vector
{
t1
},
ParameterVector
{
a
,
b
,
c
});
// Get the backend
...
...
doc/examples/mnist_mlp/dist_mnist_mlp.cpp
View file @
47626835
...
...
@@ -175,8 +175,8 @@ int main(int argc, char* argv[])
auto
delta
=
-
learning_rate
*
loss
;
// Updates
ngraph
::
autodiff
::
Adjoints
adjoints
(
Node
Vector
{
loss
},
Node
Vector
{
delta
});
ngraph
::
autodiff
::
Adjoints
adjoints
(
Output
Vector
{
loss
},
Output
Vector
{
delta
});
auto
grad_W0
=
adjoints
.
backprop_node
(
W0
);
auto
grad_b0
=
adjoints
.
backprop_node
(
b0
);
auto
grad_W1
=
adjoints
.
backprop_node
(
W1
);
...
...
@@ -231,7 +231,7 @@ int main(int argc, char* argv[])
NodeMap
train_node_map
;
auto
train_function
=
clone_function
(
Function
(
Node
Vector
{
loss
,
softmax
,
W0_next
,
b0_next
,
W1_next
,
b1_next
},
Output
Vector
{
loss
,
softmax
,
W0_next
,
b0_next
,
W1_next
,
b1_next
},
ParameterVector
{
X
,
Y
,
N
,
learning_rate
,
W0
,
b0
,
W1
,
b1
}),
train_node_map
);
auto
train_exec
=
backend
->
compile
(
train_function
);
...
...
@@ -240,7 +240,7 @@ int main(int argc, char* argv[])
// X, W0, b0, W1, b1 -> softmax
NodeMap
inference_node_map
;
auto
inference_function
=
clone_function
(
Function
(
Node
Vector
{
softmax
},
ParameterVector
{
X
,
W0
,
b0
,
W1
,
b1
}),
Function
(
Output
Vector
{
softmax
},
ParameterVector
{
X
,
W0
,
b0
,
W1
,
b1
}),
inference_node_map
);
auto
inference_exec
=
backend
->
compile
(
inference_function
);
...
...
doc/examples/mnist_mlp/mnist_mlp.cpp
View file @
47626835
...
...
@@ -172,8 +172,8 @@ int main(int argc, const char* argv[])
auto
delta
=
-
learning_rate
*
loss
;
// Updates
ngraph
::
autodiff
::
Adjoints
adjoints
(
Node
Vector
{
loss
},
Node
Vector
{
delta
});
ngraph
::
autodiff
::
Adjoints
adjoints
(
Output
Vector
{
loss
},
Output
Vector
{
delta
});
auto
W0_next
=
W0
+
adjoints
.
backprop_node
(
W0
);
auto
b0_next
=
b0
+
adjoints
.
backprop_node
(
b0
);
auto
W1_next
=
W1
+
adjoints
.
backprop_node
(
W1
);
...
...
@@ -218,7 +218,7 @@ int main(int argc, const char* argv[])
NodeMap
train_node_map
;
auto
train_function
=
clone_function
(
Function
(
Node
Vector
{
loss
,
softmax
,
W0_next
,
b0_next
,
W1_next
,
b1_next
},
Output
Vector
{
loss
,
softmax
,
W0_next
,
b0_next
,
W1_next
,
b1_next
},
ParameterVector
{
X
,
Y
,
N
,
learning_rate
,
W0
,
b0
,
W1
,
b1
}),
train_node_map
);
auto
train_exec
=
backend
->
compile
(
train_function
);
...
...
@@ -227,7 +227,7 @@ int main(int argc, const char* argv[])
// X, W0, b0, W1, b1 -> softmax
NodeMap
inference_node_map
;
auto
inference_function
=
clone_function
(
Function
(
Node
Vector
{
softmax
},
ParameterVector
{
X
,
W0
,
b0
,
W1
,
b1
}),
Function
(
Output
Vector
{
softmax
},
ParameterVector
{
X
,
W0
,
b0
,
W1
,
b1
}),
inference_node_map
);
auto
inference_exe
=
backend
->
compile
(
inference_function
);
...
...
doc/sphinx/source/core/constructing-graphs/execute.rst
View file @
47626835
...
...
@@ -99,8 +99,8 @@ Once the graph is built, we need to package it in a ``Function``:
:lines: 35-36
The first argument to the constuctor specifies the nodes that the function will
return; in this case, the product. A
``NodeVector`` is a vector of shared
pointer
s of ``op::Node``. The second argument specifies the parameters of the
return; in this case, the product. A
n ``OutputVector`` is a vector of references to
output
s of ``op::Node``. The second argument specifies the parameters of the
function, in the order they are to be passed to the compiled function. A
``ParameterVector`` is a vector of shared pointers to ``op::Parameter``.
...
...
python/pyngraph/function.cpp
View file @
47626835
...
...
@@ -23,6 +23,8 @@
namespace
py
=
pybind11
;
static
const
char
*
CAPSULE_NAME
=
"ngraph_function"
;
void
regclass_pyngraph_Function
(
py
::
module
m
)
{
py
::
class_
<
ngraph
::
Function
,
std
::
shared_ptr
<
ngraph
::
Function
>>
function
(
m
,
"Function"
);
...
...
@@ -49,4 +51,41 @@ void regclass_pyngraph_Function(py::module m)
py
::
cast
(
self
.
get_output_shape
(
0
)).
attr
(
"__str__"
)().
cast
<
std
::
string
>
();
return
"<"
+
class_name
+
": '"
+
self
.
get_friendly_name
()
+
"' ("
+
shape
+
")>"
;
});
function
.
def_static
(
"from_capsule"
,
[](
py
::
object
*
capsule
)
{
// get the underlying PyObject* which is a PyCapsule pointer
auto
*
pybind_capsule_ptr
=
capsule
->
ptr
();
// extract the pointer stored in the PyCapsule under the name CAPSULE_NAME
auto
*
capsule_ptr
=
PyCapsule_GetPointer
(
pybind_capsule_ptr
,
CAPSULE_NAME
);
auto
*
ngraph_function
=
static_cast
<
std
::
shared_ptr
<
ngraph
::
Function
>*>
(
capsule_ptr
);
if
(
ngraph_function
)
{
return
*
ngraph_function
;
}
else
{
throw
std
::
runtime_error
(
"The provided capsule does not contain an ngraph::Function"
);
}
});
function
.
def_static
(
"to_capsule"
,
[](
std
::
shared_ptr
<
ngraph
::
Function
>&
ngraph_function
)
{
// create a shared pointer on the heap before putting it in the capsule
// this secures the lifetime of the object transferred by the capsule
auto
*
sp_copy
=
new
std
::
shared_ptr
<
ngraph
::
Function
>
(
ngraph_function
);
// a destructor callback that will delete the heap allocated shared_ptr
// when the capsule is destructed
auto
sp_deleter
=
[](
PyObject
*
capsule
)
{
auto
*
capsule_ptr
=
PyCapsule_GetPointer
(
capsule
,
CAPSULE_NAME
);
auto
*
function_sp
=
static_cast
<
std
::
shared_ptr
<
ngraph
::
Function
>*>
(
capsule_ptr
);
if
(
function_sp
)
{
delete
function_sp
;
}
};
// put the shared_ptr in a new capsule under the same name as in "from_capsule"
auto
pybind_capsule
=
py
::
capsule
(
sp_copy
,
CAPSULE_NAME
,
sp_deleter
);
return
pybind_capsule
;
});
}
src/ngraph/autodiff/adjoints.cpp
View file @
47626835
...
...
@@ -51,11 +51,6 @@ OutputVector make_zeros(std::shared_ptr<Node> x)
return
zeros
;
}
autodiff
::
Adjoints
::
Adjoints
(
const
NodeVector
&
ys
,
const
NodeVector
&
cs
)
:
Adjoints
(
OutputVector
(
ys
.
begin
(),
ys
.
end
()),
OutputVector
(
cs
.
begin
(),
cs
.
end
()))
{
}
autodiff
::
Adjoints
::
Adjoints
(
const
OutputVector
&
ys
,
const
OutputVector
&
cs
)
{
if
(
ys
.
size
()
!=
cs
.
size
())
...
...
src/ngraph/autodiff/adjoints.hpp
View file @
47626835
...
...
@@ -46,8 +46,6 @@ namespace ngraph
/// \param c An expression for where to evaluate the derivatives
Adjoints
(
const
OutputVector
&
y
,
const
OutputVector
&
c
);
Adjoints
(
const
NodeVector
&
y
,
const
NodeVector
&
c
);
Adjoints
(
const
Adjoints
&
adjoints
)
=
default
;
Adjoints
&
operator
=
(
const
Adjoints
&
adjoints
)
=
default
;
Adjoints
()
=
default
;
...
...
src/ngraph/function.cpp
View file @
47626835
...
...
@@ -41,6 +41,30 @@ Function::Function(const ResultVector& results,
init
();
}
Function
::
Function
(
const
OutputVector
&
results
,
const
ParameterVector
&
parameters
,
const
std
::
string
&
name
)
:
m_results
(
results
.
size
())
,
m_parameters
(
parameters
)
,
m_temporary_pool_size
(
0
)
,
m_instance_id
(
m_next_instance_id
.
fetch_add
(
1
))
,
m_name
(
name
)
,
m_unique_name
(
"Function_"
+
to_string
(
m_instance_id
))
{
if
(
std
::
any_of
(
results
.
cbegin
(),
results
.
cend
(),
[](
Output
<
Node
>
n
)
{
return
std
::
dynamic_pointer_cast
<
op
::
Result
>
(
n
.
get_node_shared_ptr
());
}))
{
throw
ngraph_error
(
" Results already contain op::Results. Use a c-tor that takes a ResultVector"
);
}
std
::
transform
(
results
.
begin
(),
results
.
end
(),
m_results
.
begin
(),
[](
Output
<
Node
>
n
)
{
return
std
::
make_shared
<
op
::
Result
>
(
n
);
});
init
();
}
Function
::
Function
(
const
NodeVector
&
results
,
const
ParameterVector
&
parameters
,
const
std
::
string
&
name
)
...
...
@@ -208,6 +232,11 @@ shared_ptr<Node> Function::get_output_op(size_t i) const
return
m_results
.
at
(
i
);
}
Output
<
Node
>
Function
::
output
(
size_t
i
)
const
{
return
m_results
.
at
(
i
);
}
shared_ptr
<
Node
>
Function
::
get_result
()
const
{
if
(
m_results
.
size
()
!=
1
)
...
...
src/ngraph/function.hpp
View file @
47626835
...
...
@@ -37,6 +37,10 @@ namespace ngraph
const
ParameterVector
&
parameters
,
const
std
::
string
&
name
=
""
);
Function
(
const
OutputVector
&
results
,
const
ParameterVector
&
parameters
,
const
std
::
string
&
name
=
""
);
Function
(
const
std
::
shared_ptr
<
Node
>&
result
,
const
ParameterVector
&
parameters
,
const
std
::
string
&
name
=
""
);
...
...
@@ -55,6 +59,8 @@ namespace ngraph
/// Return the op that generates output i
std
::
shared_ptr
<
Node
>
get_output_op
(
size_t
i
)
const
;
Output
<
Node
>
output
(
size_t
i
)
const
;
/// Return the element type of output i
const
element
::
Type
&
get_output_element_type
(
size_t
i
)
const
;
...
...
src/ngraph/op/pad.hpp
View file @
47626835
...
...
@@ -34,7 +34,7 @@ namespace ngraph
/// \param arg_pad_value The node producing the scalar value to be inserted for padding.
/// \param padding_below The padding-below widths.
/// \param padding_above The padding-above widths.
/// \param pad_mode The padding mode: CONSTANT(default), EDGE
or REFLECT
.
/// \param pad_mode The padding mode: CONSTANT(default), EDGE
, REFLECT or SYMMETRIC
.
Pad
(
const
std
::
shared_ptr
<
Node
>&
arg
,
const
std
::
shared_ptr
<
Node
>&
arg_pad_value
,
const
CoordinateDiff
&
padding_below
,
...
...
src/ngraph/runtime/cpu/cpu_emitter.cpp
View file @
47626835
...
...
@@ -3027,7 +3027,9 @@ namespace ngraph
case
ngraph
:
:
op
::
PadMode
::
REFLECT
:
pad_mode_string
=
"ngraph::op::PadMode::REFLECT"
;
break
;
case
ngraph
:
:
op
::
PadMode
::
SYMMETRIC
:
throw
ngraph_error
(
"Unsupported PadMode"
);
case
ngraph
:
:
op
::
PadMode
::
SYMMETRIC
:
pad_mode_string
=
"ngraph::op::PadMode::SYMMETRIC"
;
break
;
}
writer
<<
"reference::pad<"
<<
out
[
0
].
get_type
()
<<
">("
<<
args
[
0
].
get_name
()
<<
",
\n
"
;
...
...
src/ngraph/runtime/gpu/unit_test.manifest
View file @
47626835
...
...
@@ -123,6 +123,7 @@ pad_reflect_1d_bottom_neg_bigger_than_tensor
pad_reflect_1d_multi_reflect
pad_reflect_2d
pad_reflect_2d_with_neg
pad_symmetric
# Quantized operators are not supported on gpu backend
model_dequantize_linear
...
...
src/ngraph/runtime/intelgpu/unit_test.manifest
View file @
47626835
...
...
@@ -40,6 +40,7 @@ pad_reflect_1d_bottom_neg_bigger_than_tensor
pad_reflect_1d_multi_reflect
pad_reflect_2d
pad_reflect_2d_with_neg
pad_symmetric
# Not implemented
batch_mat_mul_forward
...
...
src/ngraph/runtime/plaidml/unit_test.manifest
View file @
47626835
...
...
@@ -137,6 +137,7 @@ pad_reflect_2d_with_neg
pad_negative_exterior_2d
pad_negative_exterior_2d_all_negative
pad_negative_exterior_4d
pad_symmetric
max_trivial_int8
max_trivial_5d_int32
max_3d_to_scalar_double
...
...
src/ngraph/runtime/reference/pad.hpp
View file @
47626835
...
...
@@ -164,8 +164,31 @@ namespace ngraph
}
case
op
:
:
PadMode
::
SYMMETRIC
:
{
// TODO: Add support for Symmetric mode
throw
ngraph_error
(
"Symmetric mode padding not supported"
);
Coordinate
c
=
in_coord
;
// have to copy because in_coord is const
for
(
size_t
i
=
0
;
i
<
c
.
size
();
i
++
)
{
ptrdiff_t
pos
=
padding_below
[
i
]
-
(
c
[
i
]
+
1
);
if
(
pos
>=
0
)
{
c
[
i
]
=
static_cast
<
size_t
>
(
pos
+
padding_below
[
i
]);
}
else
{
pos
=
-
(
pos
+
1
);
ptrdiff_t
src_dim
=
static_cast
<
ptrdiff_t
>
(
arg0_shape
[
i
]);
if
(
pos
<
src_dim
)
{
c
[
i
]
=
static_cast
<
size_t
>
(
pos
+
padding_below
[
i
]);
}
else
{
c
[
i
]
=
static_cast
<
size_t
>
(
padding_below
[
i
]
+
src_dim
+
padding_above
[
i
]
-
pos
);
}
}
}
v
=
arg0
[
input_transform
.
index
(
c
)];
break
;
}
}
...
...
test/backend/batch_norm.in.cpp
View file @
47626835
...
...
@@ -733,8 +733,8 @@ NGRAPH_TEST(${BACKEND_NAME}, batch_norm_bprop_n4c3h2w2)
auto
C
=
std
::
make_shared
<
op
::
Parameter
>
(
element
::
f32
,
shape_r
);
auto
zero
=
ngraph
::
make_zero
(
bn_dgamma
->
get_element_type
(),
bn_dgamma
->
get_shape
());
ngraph
::
autodiff
::
Adjoints
adjoints
(
Node
Vector
{
bn_dx
,
bn_dgamma
,
bn_dbeta
},
Node
Vector
{
C
,
zero
,
zero
});
ngraph
::
autodiff
::
Adjoints
adjoints
(
Output
Vector
{
bn_dx
,
bn_dgamma
,
bn_dbeta
},
Output
Vector
{
C
,
zero
,
zero
});
auto
dinput
=
adjoints
.
backprop_node
(
input
);
auto
dgamma
=
adjoints
.
backprop_node
(
gamma
);
...
...
test/backend/binary_elementwise.in.cpp
View file @
47626835
...
...
@@ -257,10 +257,10 @@ NGRAPH_TEST(${BACKEND_NAME}, divide_adjoint_stability)
auto
B
=
make_shared
<
op
::
Parameter
>
(
element
::
f32
,
shape
);
auto
f
=
make_shared
<
Function
>
(
make_shared
<
op
::
Divide
>
(
A
,
B
),
ParameterVector
{
A
,
B
});
auto
Y_out
=
f
->
get_output_op
(
0
);
auto
Y_out
=
f
->
output
(
0
);
auto
Xs
=
f
->
get_parameters
();
auto
C
=
std
::
make_shared
<
op
::
Parameter
>
(
Y_out
->
get_element_type
(),
Y_out
->
get_shape
());
ngraph
::
autodiff
::
Adjoints
adjoints
(
NodeVector
{
Y_out
},
Node
Vector
{
C
});
auto
C
=
std
::
make_shared
<
op
::
Parameter
>
(
Y_out
.
get_element_type
(),
Y_out
.
get_shape
());
ngraph
::
autodiff
::
Adjoints
adjoints
(
OutputVector
{
Y_out
},
Output
Vector
{
C
});
std
::
vector
<
std
::
shared_ptr
<
Node
>>
dYdXs
(
Xs
.
size
());
transform
(
Xs
.
begin
(),
Xs
.
end
(),
dYdXs
.
begin
(),
[
C
,
&
adjoints
](
const
std
::
shared_ptr
<
Node
>&
X
)
{
...
...
test/backend/pad.in.cpp
View file @
47626835
...
...
@@ -939,3 +939,36 @@ NGRAPH_TEST(${BACKEND_NAME}, pad_2channel_2image_asym)
read_vector
<
float
>
(
result
),
MIN_FLOAT_TOLERANCE_BITS
));
}
NGRAPH_TEST
(
$
{
BACKEND_NAME
},
pad_symmetric
)
{
Shape
shape_a
{
2
,
3
};
auto
A
=
make_shared
<
op
::
Parameter
>
(
element
::
f32
,
shape_a
);
Shape
shape_b
{};
auto
B
=
make_shared
<
op
::
Parameter
>
(
element
::
f32
,
shape_b
);
Shape
shape_r
{
4
,
7
};
CoordinateDiff
padding_below
{
1
,
2
};
CoordinateDiff
padding_above
{
1
,
2
};
auto
f
=
make_shared
<
Function
>
(
make_shared
<
op
::
Pad
>
(
A
,
B
,
padding_below
,
padding_above
,
op
::
PadMode
::
SYMMETRIC
),
ParameterVector
{
A
,
B
});
auto
backend
=
runtime
::
Backend
::
create
(
"${BACKEND_NAME}"
);
// Create some tensors for input/output
auto
a
=
backend
->
create_tensor
(
element
::
f32
,
shape_a
);
copy_data
(
a
,
test
::
NDArray
<
float
,
2
>
({{
1
,
2
,
3
},
{
4
,
5
,
6
}}).
get_vector
());
auto
b
=
backend
->
create_tensor
(
element
::
f32
,
shape_b
);
copy_data
(
b
,
vector
<
float
>
{
2112
});
auto
result
=
backend
->
create_tensor
(
element
::
f32
,
shape_r
);
auto
handle
=
backend
->
compile
(
f
);
handle
->
call_with_validate
({
result
},
{
a
,
b
});
EXPECT_TRUE
(
test
::
all_close_f
((
test
::
NDArray
<
float
,
2
>
({{
2
,
1
,
1
,
2
,
3
,
3
,
2
},
{
2
,
1
,
1
,
2
,
3
,
3
,
2
},
{
5
,
4
,
4
,
5
,
6
,
6
,
5
},
{
5
,
4
,
4
,
5
,
6
,
6
,
5
}})
.
get_vector
()),
read_vector
<
float
>
(
result
),
MIN_FLOAT_TOLERANCE_BITS
));
}
test/cpu_fusion.cpp
View file @
47626835
...
...
@@ -507,7 +507,8 @@ TEST(cpu_fusion, conv_bias_bprop_n1c1h3w3)
auto
f
=
make_shared
<
Function
>
(
convolution_bias
,
ParameterVector
{
conv_test
.
data
,
conv_test
.
weights
,
conv_test
.
bias
});
ngraph
::
autodiff
::
Adjoints
adjoints
(
NodeVector
{
convolution_bias
},
NodeVector
{
conv_test
.
delta
});
ngraph
::
autodiff
::
Adjoints
adjoints
(
OutputVector
{
convolution_bias
},
OutputVector
{
conv_test
.
delta
});
auto
d_data
=
adjoints
.
backprop_node
(
conv_test
.
data
);
auto
d_weights
=
adjoints
.
backprop_node
(
conv_test
.
weights
);
...
...
@@ -546,7 +547,7 @@ TEST(cpu_fusion, conv_bias_bprop)
pass_manager
.
register_pass
<
pass
::
VisualizeTree
>
(
"conv_bias_bprop_fusion.png"
);
auto
f
=
make_shared
<
Function
>
(
conv_bias
,
ParameterVector
{
data_batch
,
filters
,
bias
});
ngraph
::
autodiff
::
Adjoints
adjoints
(
NodeVector
{
conv_bias
},
Node
Vector
{
delta
});
ngraph
::
autodiff
::
Adjoints
adjoints
(
OutputVector
{
conv_bias
},
Output
Vector
{
delta
});
auto
d_data
=
adjoints
.
backprop_node
(
data_batch
);
auto
d_weights
=
adjoints
.
backprop_node
(
filters
);
...
...
@@ -1452,7 +1453,7 @@ TEST(cpu_fusion, max_pool_with_indices)
auto
max_pool
=
std
::
make_shared
<
op
::
MaxPool
>
(
input
,
window_shape
);
auto
C
=
std
::
make_shared
<
op
::
Parameter
>
(
element
::
f32
,
max_pool
->
get_shape
());
ngraph
::
autodiff
::
Adjoints
adjoints
(
NodeVector
{
max_pool
},
Node
Vector
{
C
});
ngraph
::
autodiff
::
Adjoints
adjoints
(
ngraph
::
OutputVector
{
max_pool
},
ngraph
::
Output
Vector
{
C
});
auto
dinput
=
adjoints
.
backprop_node
(
input
);
...
...
@@ -1789,14 +1790,14 @@ static std::shared_ptr<ngraph::Function> make_forward_function()
return
std
::
make_shared
<
Function
>
(
NodeVector
{
max_pool
,
neg
,
absn
},
ParameterVector
{
input
});
}
static
std
::
pair
<
std
::
shared_ptr
<
ngraph
::
Function
>
,
std
::
vector
<
std
::
shared_ptr
<
ngraph
::
Node
>>
>
static
std
::
pair
<
std
::
shared_ptr
<
ngraph
::
Function
>
,
OutputVector
>
make_backward_function
(
std
::
shared_ptr
<
ngraph
::
Function
>
f
)
{
// get parameters
std
::
vector
<
std
::
shared_ptr
<
ngraph
::
op
::
Parameter
>>
back_parameters
=
f
->
get_parameters
();
ngraph
::
Node
Vector
adjoints
;
ngraph
::
Node
Vector
outputs
;
ngraph
::
Output
Vector
adjoints
;
ngraph
::
Output
Vector
outputs
;
for
(
auto
Y
:
f
->
get_results
())
{
// Get the output
...
...
@@ -1809,7 +1810,7 @@ static std::pair<std::shared_ptr<ngraph::Function>, std::vector<std::shared_ptr<
ngraph
::
autodiff
::
Adjoints
adjoint
{
outputs
,
adjoints
};
// Perform autodiff
std
::
vector
<
std
::
shared_ptr
<
Node
>>
dYdXs
(
back_parameters
.
size
());
OutputVector
dYdXs
(
back_parameters
.
size
());
transform
(
back_parameters
.
begin
(),
back_parameters
.
end
(),
dYdXs
.
begin
(),
...
...
@@ -1818,7 +1819,8 @@ static std::pair<std::shared_ptr<ngraph::Function>, std::vector<std::shared_ptr<
// create the backward function
std
::
vector
<
std
::
shared_ptr
<
ngraph
::
op
::
Parameter
>>
param_adjoints
;
for
(
auto
n
:
adjoints
)
param_adjoints
.
push_back
(
std
::
dynamic_pointer_cast
<
ngraph
::
op
::
Parameter
>
(
n
));
param_adjoints
.
push_back
(
std
::
dynamic_pointer_cast
<
ngraph
::
op
::
Parameter
>
(
n
.
get_node_shared_ptr
()));
back_parameters
.
insert
(
back_parameters
.
begin
(),
param_adjoints
.
begin
(),
param_adjoints
.
end
());
return
{
std
::
make_shared
<
ngraph
::
Function
>
(
dYdXs
,
back_parameters
),
adjoints
};
...
...
@@ -2703,7 +2705,7 @@ void sigmoid_multiply_fusion_backward_compute(runtime::Backend* backend,
auto
sigmoid_mul
=
make_shared
<
op
::
SigmoidMultiply
>
(
input_0_alt
,
input_1_alt
,
input_0_type
,
input_1_type
);
ngraph
::
autodiff
::
Adjoints
adjoints
(
NodeVector
{
sigmoid_mul
},
Node
Vector
{
delta_param
});
ngraph
::
autodiff
::
Adjoints
adjoints
(
OutputVector
{
sigmoid_mul
},
Output
Vector
{
delta_param
});
auto
d_input_0
=
adjoints
.
backprop_node
(
input_0_adjoint
);
auto
d_input_1
=
adjoints
.
backprop_node
(
input_1_adjoint
);
auto
df
=
make_shared
<
Function
>
(
NodeVector
{
d_input_0
,
d_input_1
},
back_params
);
...
...
test/util/autodiff/backprop_derivative.hpp
View file @
47626835
...
...
@@ -144,7 +144,7 @@ namespace ngraph
// df/dX*
std
::
vector
<
std
::
shared_ptr
<
Node
>>
df_output_params
;
Adjoints
adjoints
(
NodeVector
{
f
->
get_output_op
(
0
)},
Node
Vector
{
c_param
});
Adjoints
adjoints
(
OutputVector
{
f
->
output
(
0
)},
Output
Vector
{
c_param
});
// for each x "of interest"
for
(
auto
x
:
indep_params
)
...
...
test/util/autodiff/backprop_function.cpp
View file @
47626835
...
...
@@ -32,10 +32,10 @@ using namespace ngraph;
std
::
shared_ptr
<
Function
>
autodiff
::
backprop_function
(
const
std
::
shared_ptr
<
Function
>&
f
)
{
auto
Y_out
=
f
->
get_output_op
(
0
);
auto
Y_out
=
f
->
output
(
0
);
auto
Xs
=
f
->
get_parameters
();
auto
C
=
std
::
make_shared
<
op
::
Parameter
>
(
Y_out
->
get_element_type
(),
Y_out
->
get_shape
());
Adjoints
adjoints
(
NodeVector
{
Y_out
},
Node
Vector
{
C
});
auto
C
=
std
::
make_shared
<
op
::
Parameter
>
(
Y_out
.
get_element_type
(),
Y_out
.
get_shape
());
Adjoints
adjoints
(
OutputVector
{
Y_out
},
Output
Vector
{
C
});
std
::
vector
<
std
::
shared_ptr
<
Node
>>
dYdXs
(
Xs
.
size
());
transform
(
Xs
.
begin
(),
Xs
.
end
(),
dYdXs
.
begin
(),
[
C
,
&
adjoints
](
const
std
::
shared_ptr
<
Node
>&
X
)
{
return
adjoints
.
backprop_node
(
X
);
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment