Commit 13d63f21 authored by Scott Cyphers's avatar Scott Cyphers

Transformers don't need to export symbols.

parent d8d940d0
......@@ -17,7 +17,6 @@
#pragma once
#include "ngraph/op/op.hpp"
#include "ngraph/runtime/cpu/cpu_backend_visibility.h"
namespace ngraph
{
......@@ -35,7 +34,6 @@ namespace ngraph
class BatchMatMulTranspose : public Op
{
public:
CPU_BACKEND_API
static const std::string type_name;
const std::string& description() const override { return type_name; }
/// \brief Constructs a batch of matmul product operation.
......
......@@ -30,7 +30,6 @@ namespace ngraph
class BatchNormTrainingRelu : public Op
{
public:
CPU_BACKEND_API
static const std::string type_name;
const std::string& description() const override { return type_name; }
CPU_BACKEND_API BatchNormTrainingRelu(double eps,
......@@ -60,7 +59,6 @@ namespace ngraph
class BatchNormInferenceRelu : public Op
{
public:
CPU_BACKEND_API
static const std::string type_name;
const std::string& description() const override { return type_name; }
BatchNormInferenceRelu(double eps,
......
......@@ -19,7 +19,6 @@
#include "ngraph/node.hpp"
#include "ngraph/op/op.hpp"
#include "ngraph/op/util/unary_elementwise_arithmetic.hpp"
#include "ngraph/runtime/cpu/cpu_backend_visibility.h"
namespace ngraph
{
......@@ -30,7 +29,6 @@ namespace ngraph
class BoundedRelu : public ngraph::op::util::UnaryElementwiseArithmetic
{
public:
CPU_BACKEND_API
static const std::string type_name;
const std::string& description() const override { return type_name; }
/// \brief Constructs a BoundedRelu operation.
......
......@@ -18,7 +18,6 @@
#include "ngraph/op/convolution.hpp"
#include "ngraph/op/op.hpp"
#include "ngraph/runtime/cpu/cpu_backend_visibility.h"
namespace ngraph
{
......@@ -27,7 +26,6 @@ namespace ngraph
class ConvolutionAdd : public Op
{
public:
CPU_BACKEND_API
static const std::string type_name;
const std::string& description() const override { return type_name; }
ConvolutionAdd(const std::shared_ptr<op::Convolution>& conv,
......
......@@ -28,7 +28,6 @@ namespace ngraph
class ConvolutionRelu : public Op
{
public:
CPU_BACKEND_API
static const std::string type_name;
const std::string& description() const override { return type_name; }
CPU_BACKEND_API ConvolutionRelu(const std::shared_ptr<op::Convolution>& conv);
......
......@@ -35,7 +35,6 @@ namespace ngraph
class ConvertLayout : public ngraph::op::Op
{
public:
CPU_BACKEND_API
static const std::string type_name;
const std::string& description() const override { return type_name; }
CPU_BACKEND_API ConvertLayout(
......
......@@ -18,7 +18,6 @@
#include "ngraph/op/convolution.hpp"
#include "ngraph/op/op.hpp"
#include "ngraph/runtime/cpu/cpu_backend_visibility.h"
namespace ngraph
{
......@@ -28,7 +27,6 @@ namespace ngraph
class DeconvolutionBias : public Op
{
public:
CPU_BACKEND_API
static const std::string type_name;
const std::string& description() const override { return type_name; }
/// \brief Constructs a batched-convolution data batch-backprop operation.
......
......@@ -17,7 +17,6 @@
#pragma once
#include "ngraph/op/op.hpp"
#include "ngraph/runtime/cpu/cpu_backend_visibility.h"
#include "ngraph/util.hpp"
namespace ngraph
......@@ -27,7 +26,6 @@ namespace ngraph
class Dropout : public Op
{
public:
CPU_BACKEND_API
static const std::string type_name;
const std::string& description() const override { return type_name; }
Dropout(const Output<Node>& input,
......
......@@ -17,7 +17,6 @@
#pragma once
#include "ngraph/op/fused/group_conv.hpp"
#include "ngraph/op/op.hpp"
#include "ngraph/runtime/cpu/cpu_backend_visibility.h"
namespace ngraph
{
......@@ -28,7 +27,6 @@ namespace ngraph
class GroupConvolutionBias : public Op
{
public:
CPU_BACKEND_API
static const std::string type_name;
const std::string& description() const override { return type_name; }
GroupConvolutionBias(const std::shared_ptr<op::GroupConvolution>& conv,
......
......@@ -20,7 +20,6 @@
#include <vector>
#include "ngraph/op/op.hpp"
#include "ngraph/runtime/cpu/cpu_backend_visibility.h"
namespace ngraph
{
......@@ -33,7 +32,6 @@ namespace ngraph
class HalideOp : public ngraph::op::Op
{
public:
CPU_BACKEND_API
static const std::string type_name;
const std::string& description() const override { return type_name; }
HalideOp(const OutputVector& args,
......
......@@ -19,7 +19,6 @@
#include "ngraph/node.hpp"
#include "ngraph/op/op.hpp"
#include "ngraph/op/util/unary_elementwise_arithmetic.hpp"
#include "ngraph/runtime/cpu/cpu_backend_visibility.h"
namespace ngraph
{
......@@ -31,7 +30,6 @@ namespace ngraph
class CPULeakyRelu : public ngraph::op::util::UnaryElementwiseArithmetic
{
public:
CPU_BACKEND_API
static const std::string type_name;
const std::string& description() const override { return type_name; }
/// \brief Constructs a CPULeakyRelu operation.
......
......@@ -17,7 +17,6 @@
#pragma once
#include "ngraph/op/op.hpp"
#include "ngraph/runtime/cpu/cpu_backend_visibility.h"
#include "ngraph/runtime/cpu/op/rnn_utils.hpp"
#include "ngraph/util.hpp"
......@@ -28,7 +27,6 @@ namespace ngraph
class Lstm : public Op
{
public:
CPU_BACKEND_API
static const std::string type_name;
const std::string& description() const override { return type_name; }
// INPUTS:
......
......@@ -27,7 +27,6 @@ namespace ngraph
class MatmulBias : public Op
{
public:
CPU_BACKEND_API
static const std::string type_name;
const std::string& description() const override { return type_name; }
CPU_BACKEND_API MatmulBias(const Output<Node>& W,
......
......@@ -32,7 +32,6 @@ namespace ngraph
class MaxPoolWithIndices : public Op
{
public:
CPU_BACKEND_API
static const std::string type_name;
const std::string& description() const override { return type_name; }
CPU_BACKEND_API MaxPoolWithIndices(const Output<Node>& arg,
......@@ -68,7 +67,6 @@ namespace ngraph
class MaxPoolWithIndicesBackprop : public Op
{
public:
CPU_BACKEND_API
static const std::string type_name;
const std::string& description() const override { return type_name; }
CPU_BACKEND_API MaxPoolWithIndicesBackprop(const Output<Node>& arg_forward,
......
......@@ -19,7 +19,6 @@
#include <utility>
#include "ngraph/op/op.hpp"
#include "ngraph/runtime/cpu/cpu_backend_visibility.h"
namespace ngraph
{
......@@ -28,7 +27,6 @@ namespace ngraph
class QuantizedMatmul : public Op
{
public:
CPU_BACKEND_API
static const std::string type_name;
const std::string& description() const override { return type_name; }
QuantizedMatmul(const Output<Node>& data,
......
......@@ -48,7 +48,6 @@ namespace ngraph
class Rnn : public Op
{
public:
CPU_BACKEND_API
static const std::string type_name;
const std::string& description() const override { return type_name; }
CPU_BACKEND_API Rnn(const Output<Node>& src_layer,
......
......@@ -30,7 +30,6 @@ namespace ngraph
class SigmoidMultiply : public Op
{
public:
CPU_BACKEND_API
static const std::string type_name;
const std::string& description() const override { return type_name; }
/// Defines valid function types
......@@ -69,7 +68,6 @@ namespace ngraph
class SigmoidMultiplyBackprop : public Op
{
public:
CPU_BACKEND_API
static const std::string type_name;
const std::string& description() const override { return type_name; }
typedef SigmoidMultiply::FunctionType FunctionType;
......
......@@ -18,7 +18,6 @@
#include "ngraph/coordinate.hpp"
#include "ngraph/op/op.hpp"
#include "ngraph/runtime/cpu/cpu_backend_visibility.h"
#include "ngraph/strides.hpp"
namespace ngraph
......@@ -51,7 +50,6 @@ namespace ngraph
class UpdateSlice : public Op
{
public:
CPU_BACKEND_API
static const std::string type_name;
const std::string& description() const override { return type_name; }
/// \brief Constructs a tensor slice update operation.
......
......@@ -39,7 +39,6 @@ namespace ngraph
class ngraph::runtime::plaidml::op::Convolution final : public ngraph::op::Op
{
public:
NGRAPH_API
static const std::string type_name;
const std::string& description() const override { return type_name; }
Convolution(std::shared_ptr<ngraph::op::Convolution> src,
......@@ -66,7 +65,6 @@ private:
class ngraph::runtime::plaidml::op::ConvolutionBackpropData final : public ngraph::op::Op
{
public:
NGRAPH_API
static const std::string type_name;
const std::string& description() const override { return type_name; }
ConvolutionBackpropData(std::shared_ptr<ngraph::op::ConvolutionBackpropData> src,
......@@ -93,7 +91,6 @@ private:
class ngraph::runtime::plaidml::op::ConvolutionBackpropFilters final : public ngraph::op::Op
{
public:
NGRAPH_API
static const std::string type_name;
const std::string& description() const override { return type_name; }
ConvolutionBackpropFilters(std::shared_ptr<ngraph::op::ConvolutionBackpropFilters> src,
......
......@@ -40,7 +40,6 @@ namespace ngraph
class ngraph::runtime::plaidml::op::ImplicitBroadcast final : public ngraph::op::Op
{
public:
NGRAPH_API
static const std::string type_name;
const std::string& description() const override { return type_name; }
ImplicitBroadcast(const Output<Node>& input, const Shape& shape);
......
......@@ -39,7 +39,6 @@ namespace ngraph
class ngraph::runtime::plaidml::op::Replicate final : public ngraph::op::Op
{
public:
NGRAPH_API
static const std::string type_name;
const std::string& description() const override { return type_name; }
Replicate(const Output<Node>& arg, std::size_t replication_axis, std::size_t replication_count);
......
......@@ -38,7 +38,6 @@ namespace ngraph
class ngraph::runtime::plaidml::op::Winograd final : public ngraph::op::Op
{
public:
NGRAPH_API
static const std::string type_name;
const std::string& description() const override { return type_name; }
Winograd(std::shared_ptr<Convolution> conv, const OutputVector& args);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment