Commit 48015415 authored by Jaikrishnan Menon's avatar Jaikrishnan Menon

Merge branch 'master' into cpu_layout2

parents 02cbfc41 3178d485
# Copyright 2017 Nervana Systems Inc.
# Copyright 2018 Nervana Systems Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
......@@ -78,9 +78,6 @@ set(NGRAPH_INSTALL_DOC "${CMAKE_INSTALL_PREFIX}/doc")
# Compiler-specific logic...
#-----------------------------------------------------------------------------------------------
# Default values...
set(NGRAPH_CXX_WARNING_FLAGS "")
# Compiler-specific logic...
if ("${CMAKE_CXX_COMPILER_ID}" MATCHES "^(Apple)?Clang$")
message( STATUS "Setting clang flags...")
......@@ -107,18 +104,22 @@ endif()
# GPU support
#-----------------------------------------------------------------------------------------------
# Setup CUDA and cuDNN if NGRAPH_GPU_ENABLE=TRUE
if(NGRAPH_GPU_ENABLE)
find_package(CUDA 8 REQUIRED)
find_package(CUDNN 5 QUIET REQUIRED)
include_directories(${CUDA_INCLUDE_DIRS} ${CUDNN_INCLUDE_DIR} ${LLVM_INCLUDE_DIR})
if (CMAKE_C_COMPILER_ID STREQUAL "GNU" AND
find_package(CUDA 8 QUIET)
if(CUDA_FOUND AND (NOT DEFINED NGRAPH_GPU_ENABLE OR NGRAPH_GPU_ENABLE))
message(STATUS "GPU Backend Enabled")
set(NGRAPH_GPU_ENABLE TRUE)
find_package(CUDNN 5 QUIET REQUIRED)
include_directories(SYSTEM ${CUDA_INCLUDE_DIRS} ${CUDNN_INCLUDE_DIR} ${LLVM_INCLUDE_DIR})
if (CMAKE_C_COMPILER_ID STREQUAL "GNU" AND
NOT CMAKE_C_COMPILER_VERSION VERSION_LESS 6.0 AND
CUDA_HOST_COMPILER STREQUAL CMAKE_C_COMPILER)
message(FATAL_ERROR
message(FATAL_ERROR
"CUDA 8.0 is not compatible with GCC version >= 6.\n"
"Please select a correct compiler version\n"
)
endif()
elseif(NGRAPH_GPU_ENABLE)
message(FATAL_ERROR "GPU was required but CUDA library was not found")
endif()
#-----------------------------------------------------------------------------------------------
......
......@@ -19,5 +19,6 @@ RUN pip install --upgrade pip
# installed sphinx with pip to get the updated version 1.6.5
# allows for make html build under the doc/source directory as an interim build process
RUN pip install sphinx
RUN pip install breathe
WORKDIR /home
......@@ -10,25 +10,20 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
if ("${NGRAPH_BUILD_DOCS}" MATCHES "^ON$")
add_custom_target( docs
COMMENT "Build all of the documentation types selected during CMake configuration."
)
add_custom_target( docs
COMMENT "Build all of the documentation types selected during CMake configuration."
)
set(DOCS_TARGET_IS_EMPTY TRUE)
add_subdirectory( doxygen )
add_subdirectory( sphinx )
if (DOCS_TARGET_IS_EMPTY)
add_custom_target( docs-is-noop-error
add_subdirectory( doxygen )
add_subdirectory( sphinx )
else()
add_custom_target( docs
COMMAND echo
COMMAND echo "The 'docs' target does nothing because every kind of doc was disabled during configuration"
COMMAND echo "The 'docs' target is disabled. To enable the building of documentation, re-run cmake with the option -DNGRAPH_BUILD_DOCS=ON."
COMMAND echo
COMMAND false
VERBATIM
)
add_dependencies( docs docs-is-noop-error )
endif()
......@@ -11,38 +11,41 @@
# See the License for the specific language governing permissions and
# limitations under the License.
set(NGRAPH_BUILD_DOXYGEN_DOCS FALSE
CACHE BOOL
"The NGraph build system shall contain a target for Doxygen-based docs."
)
find_package(Doxygen REQUIRED)
if ("${NGRAPH_DOXYGEN_WARN_IF_UNDOCUMENTED}" MATCHES "^ON$")
set(DOXYGEN_WARN_IF_UNDOCUMENTED YES)
else()
set(DOXYGEN_WARN_IF_UNDOCUMENTED NO)
endif()
if (NGRAPH_BUILD_DOXYGEN_DOCS)
find_package(Doxygen REQUIRED)
set(DOXYGEN_IN "${CMAKE_CURRENT_SOURCE_DIR}/Doxyfile.in")
set(DOXYGEN_OUT "${CMAKE_CURRENT_BINARY_DIR}/Doxyfile")
configure_file("${DOXYGEN_IN}" "${DOXYGEN_OUT}" @ONLY)
add_custom_target(doxygen-docs
ALL
COMMAND "${DOXYGEN_EXECUTABLE}" "${DOXYGEN_OUT}"
WORKING_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}"
COMMENT "Generating documentation with Doxygen"
VERBATIM )
add_dependencies( docs doxygen-docs )
set(DOCS_TARGET_IS_EMPTY FALSE PARENT_SCOPE)
install(
DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}/html/"
DESTINATION "${NGRAPH_INSTALL_DOC}/api-reference/html"
OPTIONAL
)
install(
DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}/latex/"
DESTINATION "${NGRAPH_INSTALL_DOC}/api-reference/latex"
OPTIONAL
)
if ("${NGRAPH_DOXYGEN_QUIET}" MATCHES "^ON$")
set(DOXYGEN_QUIET YES)
else()
set(DOXYGEN_QUIET NO)
endif()
set(DOXYGEN_IN "${CMAKE_CURRENT_SOURCE_DIR}/Doxyfile.in")
set(DOXYGEN_OUT "${CMAKE_CURRENT_BINARY_DIR}/Doxyfile")
configure_file("${DOXYGEN_IN}" "${DOXYGEN_OUT}" @ONLY)
add_custom_target(doxygen-docs
ALL
COMMAND "${DOXYGEN_EXECUTABLE}" "${DOXYGEN_OUT}"
WORKING_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}"
COMMENT "Generating documentation with Doxygen"
VERBATIM )
add_dependencies( docs doxygen-docs )
install(
DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}/html/"
DESTINATION "${NGRAPH_INSTALL_DOC}/api-reference/html"
OPTIONAL
)
install(
DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}/latex/"
DESTINATION "${NGRAPH_INSTALL_DOC}/api-reference/latex"
OPTIONAL
)
PROJECT_NAME = "ngraph++"
PROJECT_BRIEF = "Nervana graph compiler"
PROJECT_NAME = "Intel® nGraph™ library"
PROJECT_BRIEF = "Intel® nGraph™ library"
OUTPUT_DIRECTORY = @CMAKE_CURRENT_BINARY_DIR@
INPUT = @CMAKE_SOURCE_DIR@/src
RECURSIVE = YES
EXCLUDE_PATTERNS = json.hpp
USE_MATHJAX = YES
GENERATE_XML = YES
WARN_IF_UNDOCUMENTED = @DOXYGEN_WARN_IF_UNDOCUMENTED@
QUIET = @DOXYGEN_QUIET@
# Minimal makefile for Sphinx documentation
# Robust Makefile for Sphinx documentation
#
# You can set these variables from the command line.
......@@ -17,4 +17,113 @@ help:
# Catch-all target: route all unknown targets to Sphinx using the new
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
%: Makefile
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
\ No newline at end of file
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
doxy-code:
$(Q)(cat ngraph.doxyfile ; echo "STRIP_FROM_PATH=${NGRAPH_BASE}" ) | doxygen - 2>&1 | tee doc.log
doxy: doxy-code
clean:
@rm -rf $(BUILDDIR)/*
@rm -rf html
@rm -rf xml
@rm -rf doxygen
@rm -rf latex
htmldocs: doxy html
pickle:
$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
@echo
@echo "Build finished; now you can process the pickle files."
json: prep
$(SPHINXBUILD) -t $(DOC_TAG) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
@rm -rf samples
@rm -rf boards
@echo
@echo "Build finished; now you can process the JSON files."
applehelp:
$(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp
@echo
@echo "Build finished. The help book is in $(BUILDDIR)/applehelp."
@echo "N.B. You won't be able to view it unless you put it in" \
"~/Library/Documentation/Help or install it in your application" \
"bundle."
devhelp:
$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
@echo
@echo "Build finished."
@echo "To view the help file:"
@echo "# mkdir -p $$HOME/.local/share/devhelp/ngraph"
@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/ngraph"
@echo "# devhelp"
epub:
$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
@echo
@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
latex:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo
@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
@echo "Run \`make' in that directory to run these through (pdf)latex" \
"(use \`make latexpdf' here to do that automatically)."
latexpdf:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo "Running LaTeX files through pdflatex..."
$(MAKE) -C $(BUILDDIR)/latex all-pdf
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
latexpdfja:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo "Running LaTeX files through platex and dvipdfmx..."
$(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
text:
$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
@echo
@echo "Build finished. The text files are in $(BUILDDIR)/text."
man:
$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
@echo
@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
texinfo:
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
@echo
@echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
@echo "Run \`make' in that directory to run these through makeinfo" \
"(use \`make info' here to do that automatically)."
info:
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
@echo "Running Texinfo files through makeinfo..."
make -C $(BUILDDIR)/texinfo info
@echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
gettext:
$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
@echo
@echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
changes:
$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
@echo
@echo "The overview file is in $(BUILDDIR)/changes."
coverage:
$(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage
@echo "Testing of coverage in the sources finished, look at the " \
"results in $(BUILDDIR)/coverage/python.txt."
This source diff could not be displayed because it is too large. You can view the blob instead.
# Maintainer: Gavin Lloyd
# https://github.intel.com/gavinllo/ttf-neo-sans-intel
pkgname=ttf-neo-sans-intel
pkgver=1.00
pkgrel=3
pkgdesc='Versatile, futuristic typeface for Intel-branded material'
arch=('ANY')
depends=('fontconfig' 'xorg-font-utils')
source=('NeoSansIntel-Italic.ttf'
'NeoSansIntel-LightItalic.ttf'
'NeoSansIntel-Light.ttf'
'NeoSansIntel-MediumItalic.ttf'
'NeoSansIntel-Medium.ttf'
'NeoSansIntel.ttf')
sha256sums=('be2f036d58320bd0fab7cca7327b806840ddfedfdc4e44a520a85bd53a1ed7b3'
'ce45deb38ad2749ba25cbb76084955e34a86f627043f1f0f8f8073720115545c'
'd522c9c3905532680f8bb8068fa340200d2e5e45376ea89d97bcc8edbce8eff8'
'61b3ce0ed96b6f343c8ac0a94471ed504708782bee7d9df88fadc564640ffbba'
'6cd878034142c390eeb98d2a17ee1b949c2f8ded0a8684d3b17e0fe4203a8fd8'
'303bc44874e23a563775e5d463a6ec3dd7bdfc7948fa95d65a45fa965bf5ee28')
package() {
install -d $pkgdir/usr/share/fonts/TTF/
install -m644 *.ttf $pkgdir/usr/share/fonts/TTF/
}
.. about:
About
=====
Welcome to the Intel nGraph project, an open source C++ library for developers
of :abbr:`Deep Learning (DL)` (DL) systems and frameworks. Here you will find
a suite of components, documentation, and APIs that can be used with
:abbr:`Deep Neural Network (DNN)` models defined in a variety of frameworks.
The nGraph library translates a framework’s representation of computations into
a neutral-:abbr:`Intermediate Representation (IR)` designed to promote
computational efficiency on target hardware; it works on Intel and non-Intel
platforms.
.. figure:: graphics/fig.jpeg
The *nGraph core* uses a strongly-typed and platform-neutral stateless graph
representation for computations. Each node, or *op*, in the graph corresponds
to one step in a computation, where each step produces zero or more tensor
outputs from zero or more tensor inputs.
There is a *framework bridge* for each supported framework which acts as
an intermediary between the *ngraph core* and the framework. A *transformer*
plays a similar role between the ngraphcore and the various execution
platforms.
Transformers compile the graph using a combination of generic and
platform-specific graph transformations. The result is a function that
can be executed from the framework bridge. Transformers also allocate
and deallocate, as well as read and write, tensors under direction of the
bridge.
For this early |release| release, we provide framework integration guides
to
* :ref:`mxnet_intg`,
* :ref:`tensorflow_intg`, and
* Try neon™ `frontend`_ framework for training GPU-performant models.
Integration guides for each of these other frameworks is tentatively
forthcoming and/or open to the community for contributions and sample
documentation:
* `Chainer`_,
* `PyTorch`_,
* `Caffe2`_, and
* Frameworks not yet written (for algorithms that do not yet exist).
.. _Caffe2: https://github.com/caffe2/
.. _PyTorch: http://pytorch.org/
.. _Chainer: https://chainer.org/
.. _frontend: http://neon.nervanasys.com/index.html/
......@@ -3,4 +3,8 @@
API
###
.. TODO don't add Python APIs that will break the build.
\ No newline at end of file
.. TODO don't add Python APIs that will break the build.
Sections
********
......@@ -22,10 +22,11 @@ This script does *not* modify the source code.
Core Ops
--------
We have some core ops. Other ops may be added to core when they
have sufficient documentation and examples of those ops in practice
or potentially-practical use cases.
Our design philosophy is that the graph is not a script for running kernels, but, rather,
that the graph should describe the computation in terms of ops that are building blocks,
and compilation should match these ops to appropriate kernels for the backend(s) in use.
Thus, we expect that adding core ops should be infrequent. Instead, functionality should
be added by adding functions that build sub-graphs from existing core ops.
Coding style
......
......@@ -34,7 +34,8 @@ needs_sphinx = '1.6.5'
extensions = ['sphinx.ext.mathjax',
'sphinx.ext.ifconfig',
'sphinx.ext.viewcode',
'sphinx.ext.autodoc'
'sphinx.ext.autodoc',
'breathe'
]
# Add any paths that contain templates here, relative to this directory.
......@@ -62,9 +63,9 @@ author = 'Intel Corporation'
# built documents.
#
# The short X.Y version.
version = '0.5.1'
version = 'alpha'
# The full version, including alpha/beta/rc tags.
release = '0.5.1'
release = 'alpha'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
......@@ -189,6 +190,16 @@ texinfo_documents = [
html_add_permalinks = ""
breathe_projects = {
"nGraph": "../../../build/doc/doxygen/xml",
}
breathe_default_project = "nGraph"
breathe_projects = {
"nGraph": "xml"
}
rst_epilog = u"""
.. |codename| replace:: Intel nGraph
......
......@@ -27,7 +27,7 @@ with respect to additions or feature requests.
If you prefer to use a containerized application, like Jupyter\* notebooks,
Google Docs\*, or MS Word\* to write and share documentation contributions,
you can convert the ``doc/source/.rst`` files to another format with a tool
you can convert the ``doc/sphinx/source/*.rst`` files to another format with a tool
like ``pypandoc`` and share a link to your docs on our `wiki`_.
Another option is to fork the `ngraph repo`_, essentially snapshotting it at
......@@ -38,8 +38,7 @@ our wiki.
.. note:: Please do not submit Jupyter* notebook code to the Intel nGraph library
repos; best practice is to maintain any project-specific examples, tests, or
walk-throughs separately. Alternatively, you may wish to upstream documentation
contributions directly to whatever frontend framework supports the rendering and
reproducibility of your example.
contributions directly to whatever frontend framework supports your example.
......@@ -126,21 +125,26 @@ Build the Documentation
Right now the minimal version of Sphinx needed to build the documentation is
Sphinx v. 1.6.5. This can be installed with `pip3` either to a virtual environment, or
to your base system if you plan to contribute much to docs.
Sphinx v. 1.6.5. This can be installed with `pip3`, either to a virtual
environment, or to your base system if you plan to contribute much to docs.
`Breathe`_ can also be installed to build C++ API documentation (currently WIP).
To build documentation locally, run:
.. code-block:: console
$ pip3 install [-I] Sphinx==1.6.5 [--user]
$ pip3 install [-I] breathe [--user]
$ cd doc/sphinx/
$ make html
For tips similar to this, see the `sphinx`_ stable reST documentation.
.. _ngraph repo: https://github.com/NervanaSystems/ngraph/
.. _ngraph repo: https://github.com/NervanaSystems/ngraph-cpp/
.. _documentation repo: https://github.com/NervanaSystems/ngraph/tree/master/doc
.. _sphinx: http://www.sphinx-doc.org/en/stable/rest.html
.. _wiki: https://github.com/NervanaSystems/ngraph/wiki/
.. _Breathe: https://breathe.readthedocs.io/en/latest/
......@@ -5,14 +5,35 @@ Glossary
.. glossary::
function graph
The Intel nGraph library uses a function graph to represent an ``op``'s
parameters and results.
op
An op represents an operation. Ops are stateless and have zero or more
inputs and zero or more outputs. Some ops have additional constant
attributes. Every output of an op corresponds to a tensor and has an
element type and a shape. The element types and shapes of the outputs of
an op are determined by the inputs and attributes of the op.
tensors
Tensors are maps from *coordinates* to scalar values, all of the same type,
called the *element type* of the tensor.
parameter
In the context of a function graph, a "paramater" refers
to what "stands in" for an argument in an ``op`` definition.
In the context of a function graph, a "parameter" refers to what "stands
in" for an argument in an ``op`` definition.
result
In the context of a function graph, the term "result" refers to what
stands in for the returned *value*.
stands in for the returned value.
shape
The shape of a tensor is a tuple of non-negative integers that represents an
exclusive upper bound for coordinate values.
step
An abstract "action" that produces zero or more tensor outputs from zero or more tensor
inputs. Steps correspond to *ops* that connect *nodes*.
function graph
The Intel nGraph library uses a function graph to represent an ``op``'s
parameters and results.
.. ---------------------------------------------------------------------------
.. Copyright 2017 Intel Corporation
.. Copyright 2018 Intel Corporation
.. Licensed under the Apache License, Version 2.0 (the "License");
.. you may not use this file except in compliance with the License.
.. You may obtain a copy of the License at
......@@ -13,10 +13,23 @@
.. limitations under the License.
.. ---------------------------------------------------------------------------
.. Intel nGraph library core documentation master file, created on Mon Dec 25 13:04:12 2017.
#############################
Intel nGraph library project
#############################
Intel nGraph library
====================
Welcome to the Intel nGraph project, an open source C++ library for developers
of :abbr:`Deep Learning (DL)` (DL) systems and frameworks. Here you will find
a suite of components, documentation, and APIs that can be used with
:abbr:`Deep Neural Network (DNN)` models defined in a variety of frameworks.
The nGraph library translates a framework’s representation of computations into
a neutral-:abbr:`Intermediate Representation (IR)` designed to promote
computational efficiency on target hardware; it works on Intel and non-Intel
platforms.
For further overview details, see the :doc:`about` page.
=======
.. toctree::
:maxdepth: 1
......@@ -26,15 +39,12 @@ Intel nGraph library
installation.rst
testing-libngraph.rst
framework-integration-guides.rst
build-a-functiongraph.rst
graph-basics.rst
.. toctree::
:maxdepth: 1
:caption: Models
:name: Models
training.rst
model-phases.rst
:caption: Algorithms
:name:
.. toctree::
:maxdepth: 2
......@@ -48,10 +58,17 @@ Intel nGraph library
autodiff.rst
glossary.rst
.. toctree::
:maxdepth: 1
:caption: Ops
ops/convolution.rst
.. toctree::
:maxdepth: 1
:caption: Project Docs
about.rst
release-notes.rst
code-contributor-README.rst
......@@ -68,3 +85,4 @@ Indices and tables
==================
* :ref:`search`
* :ref:`genindex`
\ No newline at end of file
This diff is collapsed.
.. model-phases:
.. NOTE this is mostly just placeholder text designed to start a discussion around
the ways we can highlight something other than "run MNIST models" for training
as a feature of the nGraph library.
Phases
======
With the optimizations built into Intel nGraph library core, you can
train a model and quickly iterate upon (or with) learnings from your
original dataset. Once the model's data has been trained with the nGraph
library, it is essentially "freed" from the original framework that you
wrangled it into, and you can apply different kinds of operations and
tests to further refine to the goals of your data science.
.. For example, let's say that you notice the `MNIST` MLP dataset running
with MXNet on nGraph trains itself to 0.997345 or 1.00000 accuracy after
only 10 Epochs. The original model was written to train the dataset for
20 Epochs. This means that there are potentially 10 wasted cycles of
compute power that can be used elsewhere.
This diff is collapsed.
This diff is collapsed.
.. training:
Training
########
......@@ -85,6 +85,7 @@ set (SRC
pass/memory_layout.cpp
pass/memory_visualize.cpp
pass/pass.cpp
pass/reshape_elimination.cpp
pass/visualize_tree.cpp
pattern/matcher.cpp
runtime/aligned_buffer.cpp
......@@ -111,7 +112,7 @@ foreach(OP ${OPS})
foreach(LINE ${OP_CLASS})
string(REGEX REPLACE ".*class ([A-Za-z0-9_]+) : public ([A-Za-z0-9_]+).*" "\\1:\\2" CLASS_FOUND ${LINE})
set(OP_CLASS_LIST ${OP_CLASS_LIST} ${CLASS_FOUND})
endforeach(LINE ${OP_CLASS})
endforeach(LINE ${OP_CLASS})
endforeach()
message(STATUS "${CMAKE_CURRENT_BINARY_DIR}/ops_list.txt")
string(REPLACE ";" "\n" OP_CLASS_LINES "${OP_CLASS_LIST}")
......@@ -128,14 +129,8 @@ if(NGRAPH_ADDRESS_SANITIZER)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -g -fsanitize=address -fno-omit-frame-pointer")
endif()
include_directories(
"${NGRAPH_INCLUDE_PATH}"
)
include_directories(
SYSTEM
"${EIGEN_INCLUDE_DIR}"
)
include_directories("${NGRAPH_INCLUDE_PATH}")
include_directories(SYSTEM "${EIGEN_INCLUDE_DIR}")
if (NGRAPH_CPU_ENABLE AND LLVM_INCLUDE_DIR AND
MKLDNN_INCLUDE_DIR)
......
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment