Unverified Commit 65456b05 authored by Scott Cyphers's avatar Scott Cyphers Committed by GitHub

Fix compilation issues for default constructors (#3928)

parent 49c748f5
...@@ -53,8 +53,8 @@ namespace ngraph ...@@ -53,8 +53,8 @@ namespace ngraph
double get_min() const { return m_min; } double get_min() const { return m_min; }
double get_max() const { return m_max; } double get_max() const { return m_max; }
private: private:
const double m_min; double m_min;
const double m_max; double m_max;
}; };
} }
} }
...@@ -56,7 +56,10 @@ namespace ngraph ...@@ -56,7 +56,10 @@ namespace ngraph
NGRAPH_API NGRAPH_API
static constexpr NodeTypeInfo type_info{"ReluBackprop", 0}; static constexpr NodeTypeInfo type_info{"ReluBackprop", 0};
const NodeTypeInfo& get_type_info() const override { return type_info; } const NodeTypeInfo& get_type_info() const override { return type_info; }
ReluBackprop() = default; ReluBackprop()
: BinaryElementwiseArithmetic(AutoBroadcastSpec::NONE)
{
}
/// \brief Constructs a ReluBackprop operation. /// \brief Constructs a ReluBackprop operation.
/// ///
/// \param arg Node that produces the relu forward input tensor. /// \param arg Node that produces the relu forward input tensor.
......
...@@ -84,6 +84,7 @@ namespace ngraph ...@@ -84,6 +84,7 @@ namespace ngraph
ActivationFunction(ActivationFunctionType f, float alpha, float beta); ActivationFunction(ActivationFunctionType f, float alpha, float beta);
ActivationFunction(ActivationFunctionType f, float alpha); ActivationFunction(ActivationFunctionType f, float alpha);
ActivationFunction(ActivationFunctionType f); ActivationFunction(ActivationFunctionType f);
ActivationFunction() = default;
/// ///
/// \brief Calls stored activation function with provided node argument. /// \brief Calls stored activation function with provided node argument.
......
...@@ -56,6 +56,8 @@ namespace ngraph ...@@ -56,6 +56,8 @@ namespace ngraph
const std::vector<float>& activations_alpha, const std::vector<float>& activations_alpha,
const std::vector<float>& activations_beta); const std::vector<float>& activations_beta);
RNNCellBase() = default;
std::size_t get_hidden_size() const { return m_hidden_size; } std::size_t get_hidden_size() const { return m_hidden_size; }
float get_clip() const { return m_clip; } float get_clip() const { return m_clip; }
const std::vector<std::string>& get_activations() const { return m_activations; } const std::vector<std::string>& get_activations() const { return m_activations; }
...@@ -117,12 +119,12 @@ namespace ngraph ...@@ -117,12 +119,12 @@ namespace ngraph
/// ///
std::shared_ptr<Node> clip(const Output<Node>& data) const; std::shared_ptr<Node> clip(const Output<Node>& data) const;
private: protected:
const std::size_t m_hidden_size; std::size_t m_hidden_size;
const float m_clip; float m_clip;
const std::vector<std::string> m_activations; std::vector<std::string> m_activations;
const std::vector<float> m_activations_alpha; std::vector<float> m_activations_alpha;
const std::vector<float> m_activations_beta; std::vector<float> m_activations_beta;
}; };
} // namespace util } // namespace util
} // namespace op } // namespace op
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment