Commit d1d59827 authored by Adam Procter's avatar Adam Procter Committed by Scott Cyphers

clang-format comments: /doc/examples (#3477)

* Opt /doc/examples into the new comment-wrapping rules

* Typo

* Update rst line number refs
parent dc0f0011
#
# OVERRIDE TO STYLE: Comments wrap, ColumnLimit to 75.
#
BasedOnStyle: LLVM
IndentWidth: 4
UseTab: Never
......@@ -24,7 +27,7 @@ BreakBeforeBraces: Allman
BreakConstructorInitializersBeforeComma: true
ColumnLimit: 75
CommentPragmas: '.*'
#CommentPragmas: '.*'
IndentCaseLabels: false
IndentWrappedFunctionNames: true
......
......@@ -227,11 +227,13 @@ int main(int argc, char* argv[])
auto t_softmax = make_output_tensor(backend, softmax, 0);
// Train
// X, Y, learning_rate, W0, b0, W1, b1 -> loss, softmax, W0_next, b0_next, W1_next, b1_next
// X, Y, learning_rate, W0, b0, W1, b1
// -> loss, softmax, W0_next, b0_next, W1_next, b1_next
NodeMap train_node_map;
auto train_function = clone_function(
Function(
OutputVector{loss, softmax, W0_next, b0_next, W1_next, b1_next},
OutputVector{
loss, softmax, W0_next, b0_next, W1_next, b1_next},
ParameterVector{X, Y, N, learning_rate, W0, b0, W1, b1}),
train_node_map);
auto train_exec = backend->compile(train_function);
......@@ -239,9 +241,10 @@ int main(int argc, char* argv[])
// Plain inference
// X, W0, b0, W1, b1 -> softmax
NodeMap inference_node_map;
auto inference_function = clone_function(
Function(OutputVector{softmax}, ParameterVector{X, W0, b0, W1, b1}),
inference_node_map);
auto inference_function =
clone_function(Function(OutputVector{softmax},
ParameterVector{X, W0, b0, W1, b1}),
inference_node_map);
auto inference_exec = backend->compile(inference_function);
set_scalar(t_learning_rate, .03f);
......
......@@ -214,11 +214,13 @@ int main(int argc, const char* argv[])
auto t_softmax = make_output_tensor(backend, softmax, 0);
// Train
// X, Y, learning_rate, W0, b0, W1, b1 -> loss, softmax, W0_next, b0_next, W1_next, b1_next
// X, Y, learning_rate, W0, b0, W1, b1
// -> loss, softmax, W0_next, b0_next, W1_next, b1_next
NodeMap train_node_map;
auto train_function = clone_function(
Function(
OutputVector{loss, softmax, W0_next, b0_next, W1_next, b1_next},
OutputVector{
loss, softmax, W0_next, b0_next, W1_next, b1_next},
ParameterVector{X, Y, N, learning_rate, W0, b0, W1, b1}),
train_node_map);
auto train_exec = backend->compile(train_function);
......@@ -226,9 +228,10 @@ int main(int argc, const char* argv[])
// Plain inference
// X, W0, b0, W1, b1 -> softmax
NodeMap inference_node_map;
auto inference_function = clone_function(
Function(OutputVector{softmax}, ParameterVector{X, W0, b0, W1, b1}),
inference_node_map);
auto inference_function =
clone_function(Function(OutputVector{softmax},
ParameterVector{X, W0, b0, W1, b1}),
inference_node_map);
auto inference_exe = backend->compile(inference_function);
set_scalar(t_learning_rate, .03f);
......
......@@ -87,7 +87,8 @@ std::ostream& operator<<(std::ostream& s, const ngraph::Shape& shape)
return s;
}
// A debug class that supports various ways to dump information about a tensor.
// A debug class that supports various ways to dump information about a
// tensor.
class TensorDumper
{
protected:
......
......@@ -167,5 +167,5 @@ compile clones of the nodes.
.. literalinclude:: ../../../../examples/mnist_mlp/mnist_mlp.cpp
:language: cpp
:lines: 216-224
:lines: 216-226
......@@ -43,7 +43,7 @@ echo "Verified that '${CLANG_FORMAT_PROG}' has version '${REQUIRED_CLANG_FORMAT_
pushd "${THIS_SCRIPT_DIR}/.."
declare ROOT_SUBDIR
for ROOT_SUBDIR in src test python/pyngraph; do
for ROOT_SUBDIR in src test doc/examples python/pyngraph; do
if ! [[ -d "${ROOT_SUBDIR}" ]]; then
echo "In directory '$(pwd)', no subdirectory named '${ROOT_SUBDIR}' was found."
else
......
......@@ -48,7 +48,7 @@ pushd "${THIS_SCRIPT_DIR}/.."
declare PYBIND_WRAPPER="python/pyngraph"
declare ROOT_SUBDIR
for ROOT_SUBDIR in src test python/pyngraph; do
for ROOT_SUBDIR in src test doc/examples python/pyngraph; do
if ! [[ -d "${ROOT_SUBDIR}" ]]; then
echo "In directory '$(pwd)', no subdirectory named '${ROOT_SUBDIR}' was found."
else
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment