Commit afa062d5 authored by Adam Procter's avatar Adam Procter Committed by omarkanawi

clang-format comments: /src/ngraph/frontend (#3503)

* New clang-format rules for src/ngraph/frontend

* Remove now-redundant .clang-format
parent 1221652c
#
# OVERRIDE TO STYLE: Comments do *not* wrap.
#
BasedOnStyle: LLVM
IndentWidth: 4
UseTab: Never
Language: Cpp
Standard: Cpp11
AccessModifierOffset: -4
AlignConsecutiveDeclarations: false
AlignConsecutiveAssignments: false
AlignTrailingComments: true
AllowShortBlocksOnASingleLine: true
AllowShortCaseLabelsOnASingleLine: true
AllowShortFunctionsOnASingleLine: Inline
AlwaysBreakBeforeMultilineStrings: true
AlwaysBreakTemplateDeclarations: true
BinPackArguments: false
BinPackParameters: false
BreakBeforeBraces: Allman
BreakConstructorInitializersBeforeComma: true
ColumnLimit: 100
CommentPragmas: '.*'
IndentCaseLabels: false
IndentWrappedFunctionNames: true
KeepEmptyLinesAtTheStartOfBlocks: false
NamespaceIndentation: All
PointerAlignment: Left
SpaceAfterCStyleCast: false
SpaceBeforeAssignmentOperators: true
SpaceBeforeParens: ControlStatements
SpaceInEmptyParentheses: false
SpacesInAngles: false
SpacesInCStyleCastParentheses: false
SpacesInParentheses: false
SpacesInSquareBrackets: false
SortIncludes: false
ReflowComments: true
IncludeCategories:
- Regex: '^".*'
Priority: 3
- Regex: '^<.*'
Priority: 2
SortIncludes: true
......@@ -104,7 +104,8 @@ namespace ngraph
{
unknown_operators.emplace(detail::get_op_domain_and_name(node_proto),
node_proto);
// If a node from an unregistered domain is detected, try registering that domain
// If a node from an unregistered domain is detected, try registering that
// domain
m_model->enable_opset_domain(detail::get_node_domain(node_proto));
}
}
......
......@@ -53,8 +53,10 @@ namespace ngraph
/// \param name type name of the operator object,
/// \param domain domain name of the operator object.
/// \return Reference to the operator object.
/// \throw error::UnknownDomain there is no operator set defined for the given domain,
/// \throw error::UnknownOperator the given operator type name does not exist in operator set.
/// \throw error::UnknownDomain there is no operator set defined for the given
/// domain,
/// \throw error::UnknownOperator the given operator type name does not exist in
/// operator set.
const Operator& get_operator(const std::string& name, const std::string& domain) const;
/// \brief Check availability of operator base on NodeProto.
......
......@@ -37,7 +37,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -36,7 +36,7 @@ namespace ngraph
}
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -53,7 +53,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -32,7 +32,7 @@ namespace ngraph
NodeVector acosh(const Node& node);
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -53,7 +53,7 @@ namespace ngraph
} // namespace set_7
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -37,7 +37,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -34,7 +34,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -37,7 +37,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -34,7 +34,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -37,7 +37,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -36,7 +36,7 @@ namespace ngraph
}
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -52,7 +52,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -32,7 +32,7 @@ namespace ngraph
NodeVector asinh(const Node& node);
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -36,7 +36,7 @@ namespace ngraph
}
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -56,7 +56,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -32,7 +32,7 @@ namespace ngraph
NodeVector atanh(const Node& node);
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -33,7 +33,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -37,7 +37,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -75,7 +75,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -30,7 +30,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -39,7 +39,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -31,7 +31,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -37,7 +37,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -43,7 +43,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -31,7 +31,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -45,7 +45,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -31,7 +31,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -151,7 +151,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -31,7 +31,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -53,7 +53,8 @@ namespace ngraph
{
// Split one convolution op to N ops where N is the number of groups
// and concat results after computation.
// reference: https://github.com/NervanaSystems/ngraph-mxnet/blob/fdd692/src/ngraph/ngraph_emitter.cc#L822-L856
// reference:
// https://github.com/NervanaSystems/ngraph-mxnet/blob/fdd692/src/ngraph/ngraph_emitter.cc#L822-L856
std::size_t n_data_channels{data->get_shape().at(1)};
std::size_t n_filters_channels{filters->get_shape().at(0)};
std::size_t data_group_size{n_data_channels / groups};
......@@ -166,7 +167,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -37,7 +37,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -113,7 +113,7 @@ namespace ngraph
}
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -31,13 +31,13 @@ namespace ngraph
///
/// \param node The ONNX node object representing this operation.
///
/// \return The vector containing Ngraph nodes producing output of quantized ONNX convolution
/// operation.
/// \return The vector containing Ngraph nodes producing output of quantized ONNX
/// convolution operation.
NodeVector conv_integer(const Node& node);
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -120,7 +120,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -37,7 +37,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -36,7 +36,7 @@ namespace ngraph
}
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -36,7 +36,7 @@ namespace ngraph
}
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -33,7 +33,7 @@ namespace ngraph
}
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -39,7 +39,7 @@ namespace ngraph
NodeVector depth_to_space(const Node& node);
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -83,7 +83,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -31,7 +31,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -53,7 +53,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -38,7 +38,7 @@ namespace ngraph
}
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -40,7 +40,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -31,7 +31,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -37,7 +37,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -36,7 +36,7 @@ namespace ngraph
}
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -37,7 +37,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -30,7 +30,7 @@ namespace ngraph
NodeVector eye_like(const Node& node);
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -42,7 +42,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -33,7 +33,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -37,7 +37,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -44,7 +44,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -46,7 +46,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -31,7 +31,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -31,8 +31,8 @@ namespace ngraph
///
/// \param node The ONNX node object representing this operation.
///
/// \return The vector containing Ngraph nodes producing output of ONNX GlobalAveragePool
/// operation.
/// \return The vector containing Ngraph nodes producing output of ONNX
/// GlobalAveragePool operation.
NodeVector global_average_pool(const Node& node);
} // namespace set_1
......
......@@ -31,8 +31,8 @@ namespace ngraph
///
/// \param node The ONNX node object representing this operation.
///
/// \return The vector containing Ngraph nodes producing output of ONNX GlobalMaxPool
/// operation.
/// \return The vector containing Ngraph nodes producing output of ONNX
/// GlobalMaxPool operation.
NodeVector global_max_pool(const Node& node);
} // namespace set_1
......
......@@ -38,7 +38,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -41,7 +41,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -31,7 +31,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -30,7 +30,7 @@ namespace ngraph
NodeVector hardmax(const Node& node);
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -33,7 +33,7 @@ namespace ngraph
}
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -86,7 +86,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -27,7 +27,8 @@ namespace ngraph
{
namespace set_1
{
/// \brief Creates nGraph node representing ONNX InstanceNormalization operator.
/// \brief Creates nGraph node representing ONNX InstanceNormalization
/// operator.
///
/// \note The resulting node represents following equation:
/// y = scale * (x - mean) / sqrt(variance + epsilon) + B
......@@ -40,7 +41,7 @@ namespace ngraph
NodeVector instance_norm(const Node& node);
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -54,7 +54,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -31,7 +31,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -37,7 +37,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -37,7 +37,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -38,7 +38,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -58,7 +58,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -32,7 +32,8 @@ namespace ngraph
/// Suppose A contains spatial dimensions of input tensor, then
/// for matrix A we have p-norm defined as following double sum over
/// all elements:
/// ||A||_p = ||vec(A)||_p = [sum_{i=1}^m sum_{j=1}^n abs(a_{i,j})^p]^{1/p}
/// ||A||_p = ||vec(A)||_p =
/// [sum_{i=1}^m sum_{j=1}^n abs(a_{i,j})^p]^{1/p}
///
/// \param[in] node The input ONNX node representing this operation.
///
......@@ -41,7 +42,7 @@ namespace ngraph
NodeVector lp_norm(const Node& node);
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -72,7 +72,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -37,7 +37,8 @@ namespace ngraph
/// Suppose A contains spatial dimensions of input tensor, then
/// for matrix A we have p-norm defined as following double sum over
/// all elements:
/// ||A||_p = ||vec(A)||_p = [sum_{i=1}^m sum_{j=1}^n abs(a_{i,j})^p]^{1/p}
/// ||A||_p = ||vec(A)||_p =
/// [sum_{i=1}^m sum_{j=1}^n abs(a_{i,j})^p]^{1/p}
///
/// \param[in] node The input ONNX node representing this operation.
///
......@@ -46,7 +47,7 @@ namespace ngraph
NodeVector global_lp_pool(const Node& node);
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -40,7 +40,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -30,7 +30,7 @@ namespace ngraph
NodeVector lrn(const Node& node);
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -77,9 +77,11 @@ namespace ngraph
// ----- Mandatory inputs ------
// Packed input sequences. Shape: [seq_length, batch_size, input_size]
m_map[LSTMInput::LSTM_INPUT_X] = ng_inputs.at(0);
// Weight tensor for the gates. Shape: [num_directions, 4*hidden_size, input_size]
// Weight tensor for the gates.
// Shape: [num_directions, 4*hidden_size, input_size]
m_map[LSTMInput::LSTM_INPUT_W] = ng_inputs.at(1);
// The recurrence weight tensor. Shape: [num_directions, 4*hidden_size, hidden_size]
// The recurrence weight tensor.
// Shape: [num_directions, 4*hidden_size, hidden_size]
m_map[LSTMInput::LSTM_INPUT_R] = ng_inputs.at(2);
const std::size_t hidden_size =
......@@ -116,7 +118,8 @@ namespace ngraph
std::vector<std::int32_t>(
batch_size, m_map[LSTMInput::LSTM_INPUT_X]->get_shape().at(0)));
}
// The initial value of the hidden. Shape [num_directions, batch_size, hidden_size]
// The initial value of the hidden.
// Shape [num_directions, batch_size, hidden_size]
if (ng_inputs.size() > 5 && !ng_inputs.at(5)->is_null())
{
m_map[LSTMInput::LSTM_INPUT_INIT_H] = ng_inputs.at(5);
......@@ -128,7 +131,8 @@ namespace ngraph
Shape{num_directions, batch_size, hidden_size},
std::vector<float>(num_directions * batch_size * hidden_size, 0.f));
}
// The initial value of the cell. Shape [num_directions, batch_size, hidden_size]
// The initial value of the cell.
// Shape [num_directions, batch_size, hidden_size]
if (ng_inputs.size() > 6 && !ng_inputs.at(6)->is_null())
{
m_map[LSTMInput::LSTM_INPUT_INIT_C] = ng_inputs.at(6);
......@@ -237,8 +241,8 @@ namespace ngraph
const std::shared_ptr<ngraph::Node>& initial_c,
const std::shared_ptr<ngraph::Node>& seq_lengths,
const LSTMAttributes& attributes)
: m_X{X}
// Since we have forward LSTM we can squeeze `num_directions` axis from inputs.
: m_X{X} // Since we have forward LSTM we can squeeze `num_directions` axis
// from inputs.
, m_W(reshape::squeeze(W))
, m_R(reshape::squeeze(R))
, m_B(reshape::squeeze(B))
......@@ -258,7 +262,8 @@ namespace ngraph
// ------ INPUTS ------
// X - The input tensor. [seq_length, batch_size, input_size]
// W - The weight tensor. [num_directions, 4*hidden_size, input_size]
// R - The recurrence weight tensor. [num_directions, 4*hidden_size, hidden_size]
// R - The recurrence weight tensor. [num_directions, 4*hidden_size,
// hidden_size]
// B - The bias tensor for input gate. [num_directions, 8*hidden_size]
// P - The weight tensor for peepholes. [num_directions, 3*hidde_size]
// ------ ACRONYMS ------
......@@ -452,7 +457,8 @@ namespace ngraph
}
if (attributes.m_direction == LSTMDirection::LSTM_DIRECTION_BIDIRECTIONAL)
{
// In bidirectional mode weights are stacked together, so we must split them.
// In bidirectional mode weights are stacked together, so we must split
// them.
NodeVector W{
ngraph::builder::split(input_map.at(LSTMInput::LSTM_INPUT_W), 2)};
NodeVector R{
......@@ -502,7 +508,7 @@ namespace ngraph
}
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -31,7 +31,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -32,7 +32,7 @@ namespace ngraph
}
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -30,7 +30,7 @@ namespace ngraph
NodeVector matmul(const Node& node);
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -32,7 +32,7 @@ namespace ngraph
}
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -31,13 +31,13 @@ namespace ngraph
///
/// \param node The ONNX node object representing this operation.
///
/// \return The vector containing Ngraph nodes producing output of quantized ONNX matrix
/// multiplication operation.
/// \return The vector containing Ngraph nodes producing output of quantized ONNX
/// matrix multiplication operation.
NodeVector matmul_integer(const Node& node);
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -45,7 +45,7 @@ namespace ngraph
} // namespace set_8
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -37,7 +37,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -39,7 +39,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -33,7 +33,8 @@ namespace ngraph
auto sum = variadic::make_ng_variadic_op<ngraph::op::Add>(node).front();
auto shape = sum->get_shape();
// Create a Constant representing the number of inputs with the same shape as sum
// Create a Constant representing the number of inputs with the same shape as
// sum
auto count = ngraph::op::Constant::create(
sum->get_element_type(),
shape,
......@@ -52,7 +53,8 @@ namespace ngraph
variadic::make_ng_variadic_op_with_broadcast<ngraph::op::Add>(node).front();
auto shape = sum->get_shape();
// Create a Constant representing the number of inputs with the same shape as sum
// Create a Constant representing the number of inputs with the same shape as
// sum
auto count = ngraph::op::Constant::create(
sum->get_element_type(),
shape,
......@@ -63,7 +65,7 @@ namespace ngraph
} // namespace set_8
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -37,7 +37,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -54,7 +54,7 @@ namespace ngraph
} // namespace set_9
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -37,7 +37,7 @@ namespace ngraph
NodeVector mean_variance_normalization(const Node& node);
} // namespace set_9
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -45,7 +45,7 @@ namespace ngraph
} // namespace set_8
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -56,7 +56,7 @@ namespace ngraph
} // namespace set_7
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -31,7 +31,7 @@ namespace ngraph
inline NodeVector neg(const Node& node) { return {-node.get_ng_inputs().at(0)}; }
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -35,7 +35,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -89,7 +89,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -33,7 +33,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -37,7 +37,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -71,7 +71,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -31,7 +31,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -37,7 +37,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -37,7 +37,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -31,7 +31,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -32,7 +32,7 @@ namespace ngraph
}
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -30,7 +30,7 @@ namespace ngraph
NodeVector qlinear_matmul(const Node& node);
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -82,7 +82,8 @@ namespace ngraph
{
// Split one convolution op to N ops where N is the number of groups
// and concat results after computation.
// reference: https://github.com/NervanaSystems/ngraph-mxnet/blob/fdd692/src/ngraph/ngraph_emitter.cc#L822-L856
// reference:
// https://github.com/NervanaSystems/ngraph-mxnet/blob/fdd692/src/ngraph/ngraph_emitter.cc#L822-L856
std::size_t n_data_channels{data->get_shape().at(1)};
std::size_t n_filters_channels{filters->get_shape().at(0)};
......@@ -270,7 +271,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -31,13 +31,13 @@ namespace ngraph
///
/// \param node The ONNX node object representing this operation.
///
/// \return The vector containing Ngraph nodes producing output of ONNX quantizied convolution
/// operation.
/// \return The vector containing Ngraph nodes producing output of ONNX quantizied
/// convolution operation.
NodeVector quant_conv(const Node& node);
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -72,7 +72,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -31,7 +31,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -45,7 +45,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -31,7 +31,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -61,7 +61,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -41,7 +41,8 @@ namespace ngraph
{
namespace set_1
{
/// \brief Compute the log sum of the input tensor's elements along the provided axes.
/// \brief Compute the log sum of the input tensor's elements along the
/// provided axes.
///
/// \par Overview
/// The output tensor has the same rank as the input if Node attribute keepdims
......@@ -63,7 +64,8 @@ namespace ngraph
return {std::make_shared<ngraph::op::Log>(sum_node)};
}
/// \brief Compute the log sum exponent of the input tensor's elements along the provided axes.
/// \brief Compute the log sum exponent of the input tensor's elements along
/// the provided axes.
///
/// \par Overview
/// The output tensor has the same rank as the input if Node attribute keepdims
......@@ -86,7 +88,8 @@ namespace ngraph
return {std::make_shared<ngraph::op::Log>(sum_node)};
}
/// \brief Compute the L1 norm of the input tensor's element along the provided axes.
/// \brief Compute the L1 norm of the input tensor's element along the provided
/// axes.
///
/// \par Overview
/// The output tensor has the same rank as the input if Node attribute keepdims
......@@ -107,7 +110,8 @@ namespace ngraph
node, node.get_ng_inputs().at(0), l1_norm_reduction)};
}
/// \brief Compute the L2 norm of the input tensor's element along the provided axes.
/// \brief Compute the L2 norm of the input tensor's element along the provided
/// axes.
///
/// \par Overview
/// The output tensor has the same rank as the input if Node attribute keepdims
......@@ -129,7 +133,8 @@ namespace ngraph
node, node.get_ng_inputs().at(0), l2_norm_reduction)};
}
/// \brief Compute the maximum value of the input tensor's elements along the provided axes.
/// \brief Compute the maximum value of the input tensor's elements along the
/// provided axes.
///
/// \par Overview
/// The output tensor has the same rank as the input if Node attribute keepdims
......@@ -150,7 +155,8 @@ namespace ngraph
const ngraph::AxisSet&>)};
}
/// \brief Compute the mean value of the input tensor's elements along the provided axes.
/// \brief Compute the mean value of the input tensor's elements along the
/// provided axes.
///
/// \par Overview
/// The output tensor has the same rank as the input if Node attribute keepdims
......@@ -163,7 +169,8 @@ namespace ngraph
///
NodeVector reduce_mean(const Node& node);
/// \brief Compute the minimum value of the input tensor's elements along the provided axes.
/// \brief Compute the minimum value of the input tensor's elements along the
/// provided axes.
///
/// \par Overview
/// The output tensor has the same rank as the input if Node attribute keepdims
......@@ -184,7 +191,8 @@ namespace ngraph
const ngraph::AxisSet&>)};
}
/// \brief Compute the product of the input tensor's elements along the provided axes.
/// \brief Compute the product of the input tensor's elements along the
/// provided axes.
///
/// \par Overview
/// The output tensor has the same rank as the input if Node attribute keepdims
......@@ -205,7 +213,8 @@ namespace ngraph
const ngraph::AxisSet&>)};
}
/// \brief Compute the sum of the input tensor's elements along the provided axes.
/// \brief Compute the sum of the input tensor's elements along the provided
/// axes.
///
/// \par Overview
/// The output tensor has the same rank as the input if Node attribute keepdims
......@@ -226,7 +235,8 @@ namespace ngraph
const ngraph::AxisSet&>)};
}
/// \brief Compute the sum square of the input tensor's element along the provided axes.
/// \brief Compute the sum square of the input tensor's element along the
/// provided axes.
///
/// \par Overview
/// The output tensor has the same rank as the input if Node attribute keepdims
......@@ -251,7 +261,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -36,7 +36,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -68,7 +68,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -38,7 +38,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -35,7 +35,7 @@ namespace ngraph
const auto data = node.get_ng_inputs().at(0);
const auto sequence_lengths = node.get_ng_inputs().at(1);
//nGraph supports only int32 type of sequence_lengths
// nGraph supports only int32 type of sequence_lengths
const auto sequence_lengths_i32 = std::make_shared<ngraph::op::Convert>(
node.get_ng_inputs().at(1), element::i32);
......@@ -48,7 +48,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -70,7 +70,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -31,7 +31,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -42,7 +42,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -31,7 +31,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -92,7 +92,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -30,13 +30,13 @@ namespace ngraph
/// \brief ONNX Shrink operator
///
/// \note It operates on a single input tensor and two attributes: lambd and bias.
/// Input values greater or equal to '-lambd' and less or equal to 'lambd' are zeroed-out.
/// 'Bias' is added to the values that are less than '-lambd'
/// Input values greater or equal to '-lambd' and less or equal to 'lambd' are
/// zeroed-out. 'Bias' is added to the values that are less than '-lambd'
/// and subtracted from values greater than 'lambd'.
NodeVector shrink(const Node& node);
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -37,7 +37,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -36,7 +36,7 @@ namespace ngraph
}
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -36,7 +36,7 @@ namespace ngraph
}
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -36,7 +36,7 @@ namespace ngraph
}
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -46,7 +46,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -31,7 +31,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -59,7 +59,8 @@ namespace ngraph
get_valid_array_idx(ends.at(idx), data_shape.at(axis));
}
// Check for cases when start is greater than end and change them to "empty" slice.
// Check for cases when start is greater than end and change them to "empty"
// slice.
for (auto idx = 0; idx < lower_bounds.size(); ++idx)
{
if (lower_bounds.at(idx) > upper_bounds.at(idx))
......@@ -73,7 +74,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -31,7 +31,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -53,7 +53,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -32,7 +32,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -68,7 +68,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -31,7 +31,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -47,7 +47,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -31,7 +31,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -33,7 +33,7 @@ namespace ngraph
}
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -36,7 +36,7 @@ namespace ngraph
NodeVector space_to_depth(const Node& node);
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -57,7 +57,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -31,7 +31,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -37,7 +37,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -47,6 +47,6 @@ namespace ngraph
}
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
} // namespace ngraph
......@@ -31,7 +31,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -55,7 +55,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -45,7 +45,7 @@ namespace ngraph
} // namespace set_8
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -36,7 +36,7 @@ namespace ngraph
}
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -37,7 +37,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -50,7 +50,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -31,7 +31,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -61,7 +61,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -36,7 +36,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -43,7 +43,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -31,7 +31,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -37,7 +37,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -31,7 +31,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -40,7 +40,7 @@ namespace ngraph
}
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -45,7 +45,7 @@ namespace ngraph
} // namespace set_1
} //namespace op
} // namespace op
} // namespace onnx_import
......
......@@ -92,8 +92,8 @@ namespace ngraph
/// \brief Creates a shifted square identity matrix.
/// \note Shifting in the context of this operator means that
/// the matrix can be created with elements equal to 1 not only in the main diagonal.
/// Shifting adds an offset and moves the diagonal up or down
/// the matrix can be created with elements equal to 1 not only in the main
/// diagonal. Shifting adds an offset and moves the diagonal up or down
///
/// \param[in] output_shape Shape of the resulting matrix.
/// \param[in] output_type Element type of the resulting matrix.
......
......@@ -24,15 +24,17 @@ namespace ngraph
{
namespace matmul
{
/// \brief Factory class which generates an nGraph sub-graph based on an ONNX MatMul operation.
/// \brief Factory class which generates an nGraph sub-graph based on an ONNX MatMul
/// operation.
///
/// \note
/// The sub-graph is needed to adjust nGraph's Dot operation semantics to semantics
/// expected by ONNX, which are modeled on NumPy's "stacks of arrays" approach.
/// Differences are apparent with matrices of rank > 2.
///
/// This default implementation `MatmulFactory` creates a `MatMul` operation for floating-point data.
/// Subclasses: `QLinearMatmulFactory` and `MatmulIntegerFactory` implement quantized versions.
/// This default implementation `MatmulFactory` creates a `MatMul` operation for
/// floating-point data. Subclasses: `QLinearMatmulFactory` and `MatmulIntegerFactory`
/// implement quantized versions.
class MatmulFactory
{
public:
......@@ -65,7 +67,8 @@ namespace ngraph
const NodeVector m_inputs;
};
/// \brief Factory class which generates an nGraph sub-graph based on an ONNX QLinearMatMul operation.
/// \brief Factory class which generates an nGraph sub-graph based on an ONNX
/// QLinearMatMul operation.
class QLinearMatmulFactory : public MatmulFactory
{
public:
......@@ -80,7 +83,8 @@ namespace ngraph
const std::shared_ptr<ngraph::Node>& right) override;
};
/// \brief Factory class which generates an nGraph sub-graph based on an ONNX MatMulInteger operation.
/// \brief Factory class which generates an nGraph sub-graph based on an ONNX
/// MatMulInteger operation.
class MatmulIntegerFactory : public MatmulFactory
{
public:
......
......@@ -33,7 +33,8 @@ namespace ngraph
namespace pooling
{
///
/// \brief Factory class which generates sub-graphs for ONNX 'regular' pooling operators.
/// \brief Factory class which generates sub-graphs for ONNX 'regular' pooling
/// operators.
///
/// \note This factory is intended for creating pooling operations like:
/// - AveragePool
......@@ -52,7 +53,8 @@ namespace ngraph
///
/// \brief Creates a sub-graph representing appropriate ONNX operation.
///
/// \tparam NgraphOperator nGraph operator class type used to build ONNX operation.
/// \tparam NgraphOperator nGraph operator class type used to build ONNX
/// operation.
///
/// \return Vector of output nodes.
///
......@@ -83,7 +85,8 @@ namespace ngraph
NodeVector PoolingFactory::make_pooling_op<ngraph::op::AvgPool>() const;
///
/// \brief Factory class which generates sub-graphs for ONNX 'global' pooling operators.
/// \brief Factory class which generates sub-graphs for ONNX 'global' pooling
/// operators.
///
class GlobalPoolingFactory : public PoolingFactory
{
......
......@@ -52,9 +52,9 @@ namespace ngraph
}
}
// Check whether there are dimensions equal to -1 in output_shape. There may be at most
// one such case. Its value is then inferred from the size of the tensor and the
// remaining dimensions.
// Check whether there are dimensions equal to -1 in output_shape. There may be at
// most one such case. Its value is then inferred from the size of the tensor and
// the remaining dimensions.
auto neg_value_it =
std::find(std::begin(inferred_dims), std::end(inferred_dims), -1);
if (neg_value_it != std::end(inferred_dims))
......
......@@ -34,10 +34,11 @@ namespace ngraph
/// \brief Infer `output_shape` dimension values.
///
/// \par Inferention rules
/// \li The input_shape may consist at most on -1 value. In this case the value
/// is inferred from the size of the tensor and the remaining dimensions.
/// \li If a dimension value is equal to 0, then its output value is going to
/// be copied from the input_shape argument.
/// \li The input_shape may consist at most on -1 value. In this case the
/// value is inferred from the size of the tensor and the remaining
/// dimensions.
/// \li If a dimension value is equal to 0, then its output value is going
/// to be copied from the input_shape argument.
///
/// \param[in] node_name The node name.
/// \param[in] input_shape The input node shape.
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment