Commit c92cabf6 authored by Scott Cyphers's avatar Scott Cyphers

Use includes relative to includer

A few using types for things that aren't class-worthy to help with {} initializers
parent 7d2d0850
// ----------------------------------------------------------------------------
// Copyright 2017 Nervana Systems Inc.
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// ----------------------------------------------------------------------------
#pragma once
#include <memory>
#include <vector>
#include <set>
// Names for types that aren't worth giving their own classes
namespace ngraph
{
class Node;
class Parameter;
/// Zero or more nodes
using Nodes = std::vector<std::shared_ptr<Node>>;
/// A set of indices, for example, reduction axes
using IndexSet = std::set<size_t>;
/// A list of parameters
using Parameters = std::vector<std::shared_ptr<Parameter>>;
}
...@@ -14,10 +14,10 @@ ...@@ -14,10 +14,10 @@
#pragma once #pragma once
#include "ngraph/node.hpp" #include "node.hpp"
#include "ngraph/op.hpp" #include "op.hpp"
#include "ngraph/ops/parameter.hpp" #include "ops/parameter.hpp"
#include "ngraph/type.hpp" #include "type.hpp"
namespace ngraph namespace ngraph
{ {
......
...@@ -18,17 +18,18 @@ ...@@ -18,17 +18,18 @@
#pragma once #pragma once
#include "ngraph/element_type.hpp" #include "common.hpp"
#include "ngraph/except.hpp" #include "element_type.hpp"
#include "ngraph/function.hpp" #include "except.hpp"
#include "ngraph/node.hpp" #include "function.hpp"
#include "ngraph/op.hpp" #include "node.hpp"
#include "ngraph/ops/broadcast.hpp" #include "op.hpp"
#include "ngraph/ops/concatenate.hpp" #include "ops/broadcast.hpp"
#include "ngraph/ops/constant.hpp" #include "ops/concatenate.hpp"
#include "ngraph/ops/convert.hpp" #include "ops/constant.hpp"
#include "ngraph/ops/dot.hpp" #include "ops/convert.hpp"
#include "ngraph/ops/parameter.hpp" #include "ops/dot.hpp"
#include "ngraph/ops/tuple.hpp" #include "ops/parameter.hpp"
#include "ngraph/shape.hpp" #include "ops/tuple.hpp"
#include "ngraph/type.hpp" #include "shape.hpp"
#include "type.hpp"
...@@ -20,7 +20,8 @@ ...@@ -20,7 +20,8 @@
#include <iostream> #include <iostream>
#include "ngraph/type.hpp" #include "type.hpp"
#include "common.hpp"
namespace ngraph namespace ngraph
{ {
...@@ -37,7 +38,7 @@ namespace ngraph ...@@ -37,7 +38,7 @@ namespace ngraph
using ptr = std::shared_ptr<Node>; using ptr = std::shared_ptr<Node>;
protected: protected:
Node(const std::vector<Node::ptr>& arguments, ValueType::ptr type = nullptr) Node(const Nodes& arguments, ValueType::ptr type = nullptr)
: TypedValueMixin(type) : TypedValueMixin(type)
, m_arguments(arguments) , m_arguments(arguments)
{ {
...@@ -57,7 +58,7 @@ namespace ngraph ...@@ -57,7 +58,7 @@ namespace ngraph
/// Propagate types and check arguments for consistency /// Propagate types and check arguments for consistency
virtual void propagate_types() = 0; virtual void propagate_types() = 0;
const std::vector<Node::ptr>& arguments() const { return m_arguments; } const Nodes& arguments() const { return m_arguments; }
const std::multiset<Node*>& users() const { return m_users; } const std::multiset<Node*>& users() const { return m_users; }
...@@ -75,7 +76,7 @@ namespace ngraph ...@@ -75,7 +76,7 @@ namespace ngraph
} }
protected: protected:
std::vector<Node::ptr> m_arguments; Nodes m_arguments;
std::multiset<Node*> m_users; std::multiset<Node*> m_users;
std::string m_name; std::string m_name;
}; };
......
...@@ -16,9 +16,9 @@ ...@@ -16,9 +16,9 @@
#include <memory> #include <memory>
#include "ngraph/node.hpp" #include "node.hpp"
#include "ngraph/ops/parameter.hpp" #include "ops/parameter.hpp"
#include "ngraph/type.hpp" #include "type.hpp"
namespace ngraph namespace ngraph
{ {
......
...@@ -19,13 +19,15 @@ namespace ngraph ...@@ -19,13 +19,15 @@ namespace ngraph
class BroadcastOp : public BuiltinOp class BroadcastOp : public BuiltinOp
{ {
public: public:
using Axes = std::vector<size_t>;
/** /**
** /param arg The tensor view to be broadcast. ** /param arg The tensor view to be broadcast.
** /param shape The shape of the result ** /param shape The shape of the result
** /param broadcast_axes The axis positions (0-based) in the result that are being broadcast. ** /param broadcast_axes The axis positions (0-based) in the result that are being broadcast.
** the remaining axes in shape must be the same as the shape of arg. ** the remaining axes in shape must be the same as the shape of arg.
**/ **/
BroadcastOp(const Node::ptr& arg, const Shape& shape, std::vector<size_t> broadcast_axes) BroadcastOp(const Node::ptr& arg, const Shape& shape, const Axes& broadcast_axes)
: BuiltinOp({arg}) : BuiltinOp({arg})
, m_shape(shape) , m_shape(shape)
, m_broadcast_axes(broadcast_axes) , m_broadcast_axes(broadcast_axes)
...@@ -37,13 +39,13 @@ namespace ngraph ...@@ -37,13 +39,13 @@ namespace ngraph
protected: protected:
Shape m_shape; Shape m_shape;
std::vector<size_t> m_broadcast_axes; Axes m_broadcast_axes;
}; };
namespace op namespace op
{ {
Node::ptr broadcast(const Node::ptr& tensor, Node::ptr broadcast(const Node::ptr& tensor,
const Shape& shape, const Shape& shape,
const std::vector<size_t>& broadcast_axes); const BroadcastOp::Axes&& broadcast_axes);
} }
} }
...@@ -18,13 +18,13 @@ namespace ngraph ...@@ -18,13 +18,13 @@ namespace ngraph
{ {
namespace op namespace op
{ {
Node::ptr concatenate(const std::vector<Node::ptr>& args); Node::ptr concatenate(const Nodes& args);
} }
class ConcatenateOp : public BuiltinOp class ConcatenateOp : public BuiltinOp
{ {
public: public:
ConcatenateOp(const std::vector<Node::ptr>& args) ConcatenateOp(const Nodes& args)
: BuiltinOp(args) : BuiltinOp(args)
{ {
} }
......
...@@ -14,7 +14,7 @@ ...@@ -14,7 +14,7 @@
#pragma once #pragma once
#include "ngraph/element_type.hpp" #include "../element_type.hpp"
namespace ngraph namespace ngraph
{ {
...@@ -42,7 +42,7 @@ namespace ngraph ...@@ -42,7 +42,7 @@ namespace ngraph
using ctype = typename T::ctype; using ctype = typename T::ctype;
ScalarConstantOp(typename T::ctype value) ScalarConstantOp(typename T::ctype value)
: ScalarConstantBaseOp(std::make_shared<TensorViewType>(T::type, ngraph::Shape{})) : ScalarConstantBaseOp(std::make_shared<TensorViewType>(T::type, Shape{}))
, m_value(value) , m_value(value)
{ {
} }
......
...@@ -14,8 +14,8 @@ ...@@ -14,8 +14,8 @@
#pragma once #pragma once
#include "ngraph/node.hpp" #include "../node.hpp"
#include "ngraph/type.hpp" #include "../type.hpp"
namespace ngraph namespace ngraph
{ {
......
...@@ -18,13 +18,13 @@ namespace ngraph ...@@ -18,13 +18,13 @@ namespace ngraph
{ {
namespace op namespace op
{ {
Node::ptr tuple(const std::vector<Node::ptr>& args); Node::ptr tuple(const Nodes& args);
} }
class TupleOp : public BuiltinOp class TupleOp : public BuiltinOp
{ {
public: public:
TupleOp(const std::vector<Node::ptr>& args) TupleOp(const Nodes& args)
: BuiltinOp(args) : BuiltinOp(args)
{ {
} }
......
...@@ -17,8 +17,8 @@ ...@@ -17,8 +17,8 @@
#include <memory> #include <memory>
#include <vector> #include <vector>
#include "ngraph/element_type.hpp" #include "element_type.hpp"
#include "ngraph/shape.hpp" #include "shape.hpp"
namespace ngraph namespace ngraph
{ {
......
...@@ -25,7 +25,7 @@ using namespace ngraph; ...@@ -25,7 +25,7 @@ using namespace ngraph;
**/ **/
Node::ptr ngraph::op::broadcast(const Node::ptr& tensor, Node::ptr ngraph::op::broadcast(const Node::ptr& tensor,
const Shape& shape, const Shape& shape,
const vector<size_t>& broadcast_axes) const BroadcastOp::Axes&& broadcast_axes)
{ {
return make_shared<BroadcastOp>(tensor, shape, broadcast_axes); return make_shared<BroadcastOp>(tensor, shape, broadcast_axes);
} }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment