Commit 445c8158 authored by Robert Kimball's avatar Robert Kimball Committed by Scott Cyphers

fix broken doc strings (#2981)

parent 1fdf14ae
...@@ -31,6 +31,8 @@ ...@@ -31,6 +31,8 @@
// //
#ifdef NGRAPH_DEPRECATED_ENABLE #ifdef NGRAPH_DEPRECATED_ENABLE
#define NGRAPH_DEPRECATED(msg) __attribute__((deprecated((msg)))) #define NGRAPH_DEPRECATED(msg) __attribute__((deprecated((msg))))
#define NGRAPH_DEPRECATED_DOC /// \deprecated
#else #else
#define NGRAPH_DEPRECATED(msg) #define NGRAPH_DEPRECATED(msg)
#define NGRAPH_DEPRECATED_DOC
#endif #endif
...@@ -24,7 +24,7 @@ namespace ngraph ...@@ -24,7 +24,7 @@ namespace ngraph
{ {
namespace op namespace op
{ {
// \brief Computes minimum index along a specified axis for a given tensor /// \brief Computes minimum index along a specified axis for a given tensor
class ArgMax : public op::util::IndexReduction class ArgMax : public op::util::IndexReduction
{ {
public: public:
......
...@@ -24,7 +24,7 @@ namespace ngraph ...@@ -24,7 +24,7 @@ namespace ngraph
{ {
namespace op namespace op
{ {
// \brief Computes minimum index along a specified axis for a given tensor /// \brief Computes minimum index along a specified axis for a given tensor
class ArgMin : public op::util::IndexReduction class ArgMin : public op::util::IndexReduction
{ {
public: public:
......
...@@ -18,6 +18,7 @@ ...@@ -18,6 +18,7 @@
#include <memory> #include <memory>
#include "ngraph/deprecated.hpp"
#include "ngraph/node.hpp" #include "ngraph/node.hpp"
#include "ngraph/op/op.hpp" #include "ngraph/op/op.hpp"
#include "ngraph/util.hpp" #include "ngraph/util.hpp"
...@@ -26,39 +27,40 @@ namespace ngraph ...@@ -26,39 +27,40 @@ namespace ngraph
{ {
namespace op namespace op
{ {
// \brief Batchnorm for training operation /// \brief Batchnorm for training operation
class BatchNormTraining : public Op class BatchNormTraining : public Op
{ {
public: public:
// \param input Must have rank >= 2, [., C, ...] /// \param input Must have rank >= 2, [., C, ...]
// \param gamma gamma scaling for normalized value. [C] /// \param gamma gamma scaling for normalized value. [C]
// \param beta bias added to the scaled normalized value [C] /// \param beta bias added to the scaled normalized value [C]
// \param epsilon Avoids divsion by 0 if input has 0 variance /// \param epsilon Avoids divsion by 0 if input has 0 variance
BatchNormTraining(std::shared_ptr<Node> input, BatchNormTraining(std::shared_ptr<Node> input,
std::shared_ptr<Node> gamma, std::shared_ptr<Node> gamma,
std::shared_ptr<Node> beta, std::shared_ptr<Node> beta,
double epsilon); double epsilon);
// \deprecated NGRAPH_DEPRECATED_DOC
// In this version of BatchNorm: /// In this version of BatchNorm:
// ///
// MEAN AND VARIANCE: computed directly from the content of 'input'. /// MEAN AND VARIANCE: computed directly from the content of 'input'.
// ///
// OUTPUT VALUE: A tuple with the following structure: /// OUTPUT VALUE: A tuple with the following structure:
// [0] - The normalization of 'input'. /// [0] - The normalization of 'input'.
// [1] - The per-channel means of (pre-normalized) 'input'. /// [1] - The per-channel means of (pre-normalized) 'input'.
// [2] - The per-channel variances of (pre-normalized) 'input'. /// [2] - The per-channel variances of (pre-normalized) 'input'.
// ///
// AUTODIFF SUPPORT: yes: 'generate_adjoints(...)' works as expected. /// AUTODIFF SUPPORT: yes: 'generate_adjoints(...)' works as expected.
// ///
// SHAPE DETAILS: /// SHAPE DETAILS:
// gamma: must have rank 1, with the same span as input's channel axis. /// gamma: must have rank 1, with the same span as input's channel axis.
// beta: must have rank 1, with the same span as input's channel axis. /// beta: must have rank 1, with the same span as input's channel axis.
// input: must have rank >= 2. The second dimension represents the channel axis /// input: must have rank >= 2. The second dimension represents the channel axis
// and must have a span of at least 1. /// and must have a span of at least 1.
// output[0]: shall have the same shape as 'input'. /// output[0]: shall have the same shape as 'input'.
// output[1]: shall have rank 1, with the same span as input's channel axis. /// output[1]: shall have rank 1, with the same span as input's channel axis.
// output[2]: shall have rank 1, with the same span as input's channel axis. /// output[2]: shall have rank 1, with the same span as input's channel axis.
NGRAPH_DEPRECATED("Use another constructor")
BatchNormTraining(double eps, BatchNormTraining(double eps,
std::shared_ptr<Node> gamma, std::shared_ptr<Node> gamma,
std::shared_ptr<Node> beta, std::shared_ptr<Node> beta,
...@@ -85,12 +87,12 @@ namespace ngraph ...@@ -85,12 +87,12 @@ namespace ngraph
class BatchNormInference : public Op class BatchNormInference : public Op
{ {
public: public:
// \param input [., C, ...] /// \param input [., C, ...]
// \param gamma gamma scaling for normalized value. [C] /// \param gamma gamma scaling for normalized value. [C]
// \param beta bias added to the scaled normalized value [C] /// \param beta bias added to the scaled normalized value [C]
// \param mean value for mean normalization [C] /// \param mean value for mean normalization [C]
// \param variance value for variance normalization [C] /// \param variance value for variance normalization [C]
// \param epsilon Avoids divsion by 0 if input has 0 variance /// \param epsilon Avoids divsion by 0 if input has 0 variance
BatchNormInference(std::shared_ptr<ngraph::Node> input, BatchNormInference(std::shared_ptr<ngraph::Node> input,
std::shared_ptr<ngraph::Node> gamma, std::shared_ptr<ngraph::Node> gamma,
std::shared_ptr<ngraph::Node> beta, std::shared_ptr<ngraph::Node> beta,
...@@ -98,24 +100,25 @@ namespace ngraph ...@@ -98,24 +100,25 @@ namespace ngraph
std::shared_ptr<ngraph::Node> variance, std::shared_ptr<ngraph::Node> variance,
double epsilon); double epsilon);
// \deprecated NGRAPH_DEPRECATED_DOC
// In this version of BatchNorm: /// In this version of BatchNorm:
// ///
// MEAN AND VARIANCE: provided by the 'mean' and 'variance' parameters. /// MEAN AND VARIANCE: provided by the 'mean' and 'variance' parameters.
// ///
// OUTPUT VALUE: a single tensor with the normalized value of 'input'. /// OUTPUT VALUE: a single tensor with the normalized value of 'input'.
// ///
// AUTODIFF SUPPORT: /// AUTODIFF SUPPORT:
// - 'generate_adjoints(...) may throw an exception. /// - 'generate_adjoints(...) may throw an exception.
// ///
// SHAPE DETAILS: /// SHAPE DETAILS:
// gamma: must have rank 1, with the same span as input's channel axis. /// gamma: must have rank 1, with the same span as input's channel axis.
// beta: must have rank 1, with the same span as input's channel axis. /// beta: must have rank 1, with the same span as input's channel axis.
// input: must have rank >= 2. The second dimension represents the channel axis and /// input: must have rank >= 2. The second dimension represents the channel axis and
// must have a span of at least 1. /// must have a span of at least 1.
// mean: must have rank 1, with the same span as input's channel axis. /// mean: must have rank 1, with the same span as input's channel axis.
// variance: must have rank 1, with the same span as input's channel axis. /// variance: must have rank 1, with the same span as input's channel axis.
// output: shall have the same shape as 'input'. /// output: shall have the same shape as 'input'.
NGRAPH_DEPRECATED("Use another constructor")
BatchNormInference(double eps, BatchNormInference(double eps,
std::shared_ptr<ngraph::Node> gamma, std::shared_ptr<ngraph::Node> gamma,
std::shared_ptr<ngraph::Node> beta, std::shared_ptr<ngraph::Node> beta,
...@@ -157,7 +160,8 @@ namespace ngraph ...@@ -157,7 +160,8 @@ namespace ngraph
std::shared_ptr<Node> delta, std::shared_ptr<Node> delta,
double epsilon); double epsilon);
// \deprecated NGRAPH_DEPRECATED_DOC
NGRAPH_DEPRECATED("Use another constructor")
BatchNormTrainingBackprop(double epsilon, BatchNormTrainingBackprop(double epsilon,
std::shared_ptr<Node> gamma, std::shared_ptr<Node> gamma,
std::shared_ptr<Node> beta, std::shared_ptr<Node> beta,
......
...@@ -24,7 +24,7 @@ namespace ngraph ...@@ -24,7 +24,7 @@ namespace ngraph
{ {
namespace op namespace op
{ {
// \brief Returns embeddings for given indices /// \brief Returns embeddings for given indices
class EmbeddingLookup : public Op class EmbeddingLookup : public Op
{ {
public: public:
......
...@@ -26,9 +26,9 @@ namespace ngraph ...@@ -26,9 +26,9 @@ namespace ngraph
class Gather : public Op class Gather : public Op
{ {
public: public:
// \param params The tensor from which slices are gathered /// \param params The tensor from which slices are gathered
// \param indices Index tensor: Data type must be `element::i32` or `element::i64` /// \param indices Index tensor: Data type must be `element::i32` or `element::i64`
// \param axis Axis in params to gather /// \param axis Axis in params to gather
Gather(const std::shared_ptr<Node>& params, Gather(const std::shared_ptr<Node>& params,
const std::shared_ptr<Node>& indices, const std::shared_ptr<Node>& indices,
size_t axis = 0) size_t axis = 0)
......
...@@ -26,8 +26,8 @@ namespace ngraph ...@@ -26,8 +26,8 @@ namespace ngraph
class GatherND : public Op class GatherND : public Op
{ {
public: public:
// \param params The tensor from which slices are gathered /// \param params The tensor from which slices are gathered
// \param indices Index tensor: Data type must be `element::i32` or `element::i64` /// \param indices Index tensor: Data type must be `element::i32` or `element::i64`
GatherND(const std::shared_ptr<Node>& params, const std::shared_ptr<Node>& indices) GatherND(const std::shared_ptr<Node>& params, const std::shared_ptr<Node>& indices)
: Op("GatherND", check_single_output_args({params, indices})) : Op("GatherND", check_single_output_args({params, indices}))
{ {
......
...@@ -26,9 +26,9 @@ namespace ngraph ...@@ -26,9 +26,9 @@ namespace ngraph
class ScatterAdd : public Op class ScatterAdd : public Op
{ {
public: public:
// \param inputs Tensor /// \param inputs Tensor
// \param indices Index tensor: Data type must be `element::i32` or `element::i64` /// \param indices Index tensor: Data type must be `element::i32` or `element::i64`
// \param update Tensor: Must have same type as inputs /// \param updates Tensor: Must have same type as inputs
ScatterAdd(const std::shared_ptr<Node>& inputs, ScatterAdd(const std::shared_ptr<Node>& inputs,
const std::shared_ptr<Node>& indices, const std::shared_ptr<Node>& indices,
const std::shared_ptr<Node>& updates) const std::shared_ptr<Node>& updates)
......
...@@ -26,9 +26,9 @@ namespace ngraph ...@@ -26,9 +26,9 @@ namespace ngraph
class ScatterNDAdd : public Op class ScatterNDAdd : public Op
{ {
public: public:
// \param inputs Tensor /// \param inputs Tensor
// \param indices Index tensor: Data type must be `element::i32` or `element::i64` /// \param indices Index tensor: Data type must be `element::i32` or `element::i64`
// \param update Tensor: Must have same type as inputs /// \param updates Tensor: Must have same type as inputs
ScatterNDAdd(const std::shared_ptr<Node>& inputs, ScatterNDAdd(const std::shared_ptr<Node>& inputs,
const std::shared_ptr<Node>& indices, const std::shared_ptr<Node>& indices,
const std::shared_ptr<Node>& updates) const std::shared_ptr<Node>& updates)
......
...@@ -132,11 +132,11 @@ public: ...@@ -132,11 +132,11 @@ public:
virtual void remove_compiled_function(std::shared_ptr<Executable> exec); virtual void remove_compiled_function(std::shared_ptr<Executable> exec);
// \brief Return a backend specific op (that is not a core ngraph op). /// \brief Return a backend specific op (that is not a core ngraph op).
// The string op_name is the requested op, which a backend may or may not implement. /// The string op_name is the requested op, which a backend may or may not implement.
// If unsupported, nullptr is returned, else a backend op is returned. /// If unsupported, nullptr is returned, else a backend op is returned.
// The variadic input is used to pass inputs that the op constructor might take /// The variadic input is used to pass inputs that the op constructor might take
// \param op_name is the name of the backend specific op /// \param op_name is the name of the backend specific op
// \returns a shared pointer to the op if found, else nullptr /// \returns a shared pointer to the op if found, else nullptr
virtual std::shared_ptr<ngraph::Node> get_backend_op(const std::string& op_name, ...); virtual std::shared_ptr<ngraph::Node> get_backend_op(const std::string& op_name, ...);
}; };
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment