Commit d1d59827 authored by Adam Procter's avatar Adam Procter Committed by Scott Cyphers

clang-format comments: /doc/examples (#3477)

* Opt /doc/examples into the new comment-wrapping rules

* Typo

* Update rst line number refs
parent dc0f0011
#
# OVERRIDE TO STYLE: Comments wrap, ColumnLimit to 75.
#
BasedOnStyle: LLVM BasedOnStyle: LLVM
IndentWidth: 4 IndentWidth: 4
UseTab: Never UseTab: Never
...@@ -24,7 +27,7 @@ BreakBeforeBraces: Allman ...@@ -24,7 +27,7 @@ BreakBeforeBraces: Allman
BreakConstructorInitializersBeforeComma: true BreakConstructorInitializersBeforeComma: true
ColumnLimit: 75 ColumnLimit: 75
CommentPragmas: '.*' #CommentPragmas: '.*'
IndentCaseLabels: false IndentCaseLabels: false
IndentWrappedFunctionNames: true IndentWrappedFunctionNames: true
......
...@@ -227,11 +227,13 @@ int main(int argc, char* argv[]) ...@@ -227,11 +227,13 @@ int main(int argc, char* argv[])
auto t_softmax = make_output_tensor(backend, softmax, 0); auto t_softmax = make_output_tensor(backend, softmax, 0);
// Train // Train
// X, Y, learning_rate, W0, b0, W1, b1 -> loss, softmax, W0_next, b0_next, W1_next, b1_next // X, Y, learning_rate, W0, b0, W1, b1
// -> loss, softmax, W0_next, b0_next, W1_next, b1_next
NodeMap train_node_map; NodeMap train_node_map;
auto train_function = clone_function( auto train_function = clone_function(
Function( Function(
OutputVector{loss, softmax, W0_next, b0_next, W1_next, b1_next}, OutputVector{
loss, softmax, W0_next, b0_next, W1_next, b1_next},
ParameterVector{X, Y, N, learning_rate, W0, b0, W1, b1}), ParameterVector{X, Y, N, learning_rate, W0, b0, W1, b1}),
train_node_map); train_node_map);
auto train_exec = backend->compile(train_function); auto train_exec = backend->compile(train_function);
...@@ -239,9 +241,10 @@ int main(int argc, char* argv[]) ...@@ -239,9 +241,10 @@ int main(int argc, char* argv[])
// Plain inference // Plain inference
// X, W0, b0, W1, b1 -> softmax // X, W0, b0, W1, b1 -> softmax
NodeMap inference_node_map; NodeMap inference_node_map;
auto inference_function = clone_function( auto inference_function =
Function(OutputVector{softmax}, ParameterVector{X, W0, b0, W1, b1}), clone_function(Function(OutputVector{softmax},
inference_node_map); ParameterVector{X, W0, b0, W1, b1}),
inference_node_map);
auto inference_exec = backend->compile(inference_function); auto inference_exec = backend->compile(inference_function);
set_scalar(t_learning_rate, .03f); set_scalar(t_learning_rate, .03f);
......
...@@ -214,11 +214,13 @@ int main(int argc, const char* argv[]) ...@@ -214,11 +214,13 @@ int main(int argc, const char* argv[])
auto t_softmax = make_output_tensor(backend, softmax, 0); auto t_softmax = make_output_tensor(backend, softmax, 0);
// Train // Train
// X, Y, learning_rate, W0, b0, W1, b1 -> loss, softmax, W0_next, b0_next, W1_next, b1_next // X, Y, learning_rate, W0, b0, W1, b1
// -> loss, softmax, W0_next, b0_next, W1_next, b1_next
NodeMap train_node_map; NodeMap train_node_map;
auto train_function = clone_function( auto train_function = clone_function(
Function( Function(
OutputVector{loss, softmax, W0_next, b0_next, W1_next, b1_next}, OutputVector{
loss, softmax, W0_next, b0_next, W1_next, b1_next},
ParameterVector{X, Y, N, learning_rate, W0, b0, W1, b1}), ParameterVector{X, Y, N, learning_rate, W0, b0, W1, b1}),
train_node_map); train_node_map);
auto train_exec = backend->compile(train_function); auto train_exec = backend->compile(train_function);
...@@ -226,9 +228,10 @@ int main(int argc, const char* argv[]) ...@@ -226,9 +228,10 @@ int main(int argc, const char* argv[])
// Plain inference // Plain inference
// X, W0, b0, W1, b1 -> softmax // X, W0, b0, W1, b1 -> softmax
NodeMap inference_node_map; NodeMap inference_node_map;
auto inference_function = clone_function( auto inference_function =
Function(OutputVector{softmax}, ParameterVector{X, W0, b0, W1, b1}), clone_function(Function(OutputVector{softmax},
inference_node_map); ParameterVector{X, W0, b0, W1, b1}),
inference_node_map);
auto inference_exe = backend->compile(inference_function); auto inference_exe = backend->compile(inference_function);
set_scalar(t_learning_rate, .03f); set_scalar(t_learning_rate, .03f);
......
...@@ -87,7 +87,8 @@ std::ostream& operator<<(std::ostream& s, const ngraph::Shape& shape) ...@@ -87,7 +87,8 @@ std::ostream& operator<<(std::ostream& s, const ngraph::Shape& shape)
return s; return s;
} }
// A debug class that supports various ways to dump information about a tensor. // A debug class that supports various ways to dump information about a
// tensor.
class TensorDumper class TensorDumper
{ {
protected: protected:
......
...@@ -167,5 +167,5 @@ compile clones of the nodes. ...@@ -167,5 +167,5 @@ compile clones of the nodes.
.. literalinclude:: ../../../../examples/mnist_mlp/mnist_mlp.cpp .. literalinclude:: ../../../../examples/mnist_mlp/mnist_mlp.cpp
:language: cpp :language: cpp
:lines: 216-224 :lines: 216-226
...@@ -43,7 +43,7 @@ echo "Verified that '${CLANG_FORMAT_PROG}' has version '${REQUIRED_CLANG_FORMAT_ ...@@ -43,7 +43,7 @@ echo "Verified that '${CLANG_FORMAT_PROG}' has version '${REQUIRED_CLANG_FORMAT_
pushd "${THIS_SCRIPT_DIR}/.." pushd "${THIS_SCRIPT_DIR}/.."
declare ROOT_SUBDIR declare ROOT_SUBDIR
for ROOT_SUBDIR in src test python/pyngraph; do for ROOT_SUBDIR in src test doc/examples python/pyngraph; do
if ! [[ -d "${ROOT_SUBDIR}" ]]; then if ! [[ -d "${ROOT_SUBDIR}" ]]; then
echo "In directory '$(pwd)', no subdirectory named '${ROOT_SUBDIR}' was found." echo "In directory '$(pwd)', no subdirectory named '${ROOT_SUBDIR}' was found."
else else
......
...@@ -48,7 +48,7 @@ pushd "${THIS_SCRIPT_DIR}/.." ...@@ -48,7 +48,7 @@ pushd "${THIS_SCRIPT_DIR}/.."
declare PYBIND_WRAPPER="python/pyngraph" declare PYBIND_WRAPPER="python/pyngraph"
declare ROOT_SUBDIR declare ROOT_SUBDIR
for ROOT_SUBDIR in src test python/pyngraph; do for ROOT_SUBDIR in src test doc/examples python/pyngraph; do
if ! [[ -d "${ROOT_SUBDIR}" ]]; then if ! [[ -d "${ROOT_SUBDIR}" ]]; then
echo "In directory '$(pwd)', no subdirectory named '${ROOT_SUBDIR}' was found." echo "In directory '$(pwd)', no subdirectory named '${ROOT_SUBDIR}' was found."
else else
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment