Commit 63d0fd90 authored by Leona C's avatar Leona C

PR feedback updates

parent 75c441a9
...@@ -76,9 +76,7 @@ author = 'Intel Corporation' ...@@ -76,9 +76,7 @@ author = 'Intel Corporation'
version = '0.22' version = '0.22'
# The Documentation full version, including alpha/beta/rc tags. Some features # The Documentation full version, including alpha/beta/rc tags. Some features
# available in the latest code will not necessarily be documented first. # available in the latest code will not necessarily be documented first
# rc syntax may be tagged; this documentation supports various rc-naming conventions
release = '0.22.0' release = '0.22.0'
# The language for content autogenerated by Sphinx. Refer to documentation # The language for content autogenerated by Sphinx. Refer to documentation
...@@ -107,38 +105,18 @@ todo_include_todos = True ...@@ -107,38 +105,18 @@ todo_include_todos = True
# -- Options for HTML output ---------------------------------------------- # -- Options for HTML output ----------------------------------------------
html_title = 'nGraph Compiler stack Documentation' html_title = "Documentation for the nGraph Library and Compiler stack"
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'ngraph_theme' html_theme = 'ngraph_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
# html_theme_options = {} # html_theme_options = {}
html_theme_path = ["../"] html_logo = '../ngraph_theme/static/logo.png'
if tags.has('release'):
is_release = True
docs_title = 'Docs / %s' %(version) %(release)
else:
is_release = False
docs_title = 'Docs / Latest'
# borrow this from the zephyr docs theme
html_context = {
# 'show_license': html_show_license, we have custom footers to attribute
# RTD, WTD, and Sphinx contributors; so we do not enable this
'docs_title': docs_title,
'is_release': is_release,
'theme_logo_only': False,
'current_version': version,
'versions': ( ("latest", "../"),
("0.20.0", "/0.20.0/"), #not yet sure how we'll do this
("0.19.0", "/0.19.0/"),
("0.18.0", "/0.18.0/"),
("0.17.0", "/0.17.0/"),
("0.16.0", "/0.16.0/"),
)
}
html_logo = '../ngraph_theme/static/favicon.ico'
# The name of an image file (within the static path) to use as favicon of the # The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
...@@ -151,6 +129,7 @@ html_favicon = '../ngraph_theme/static/favicon.ico' ...@@ -151,6 +129,7 @@ html_favicon = '../ngraph_theme/static/favicon.ico'
html_static_path = ['../ngraph_theme/static'] html_static_path = ['../ngraph_theme/static']
# Add any paths that contain custom themes here, relative to this directory. # Add any paths that contain custom themes here, relative to this directory.
html_theme_path = ["../"]
# Custom sidebar templates, must be a dictionary that maps document names # Custom sidebar templates, must be a dictionary that maps document names
# to template names. # to template names.
...@@ -165,26 +144,6 @@ html_sidebars = { ...@@ -165,26 +144,6 @@ html_sidebars = {
} }
# Custom added feature to allow redirecting old URLs
#
# list of tuples (old_url, new_url) for pages to redirect
# (URLs should be relative to document root, only)
html_redirect_pages = [
('backend-support', 'backends/index'),
('core/core', 'core/overview.rst'),
('core/fusion', 'core/fusion/index'),
('frameworks/mxnet', 'frameworks/mxnet_intg.rst'),
('frameworks/onnx', 'frameworks/onnx_intg.rst'),
('frameworks/tensorflow', 'frameworks/tensorflow_connect.rst'),
('frameworks/paddle', 'frameworks/paddle_integ.rst'),
('inspection/inspection', 'inspection/index'),
('releases/release-notes', 'releases/index'),
# ('getting_started/getting_starting', 'getting_started/index'),
# mv to framework-specific helper directory
('project/project', 'project/index'),
('python_api/', 'python_api/index'),
]
# -- Options for HTMLHelp output ------------------------------------------ # -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder. # Output file base name for HTML help builder.
...@@ -195,11 +154,11 @@ htmlhelp_basename = 'IntelnGraphlibrarydoc' ...@@ -195,11 +154,11 @@ htmlhelp_basename = 'IntelnGraphlibrarydoc'
latex_elements = { latex_elements = {
# The paper size ('letterpaper' or 'a4paper'). # The paper size ('letterpaper' or 'a4paper').
# #
# 'papersize': 'letterpaper', 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt'). # The font size ('10pt', '11pt' or '12pt').
# #
# 'pointsize': '10pt', 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble. # Additional stuff for the LaTeX preamble.
# #
...@@ -214,11 +173,10 @@ latex_elements = { ...@@ -214,11 +173,10 @@ latex_elements = {
# (source start file, target name, title, # (source start file, target name, title,
# author, documentclass [howto, manual, or own class]). # author, documentclass [howto, manual, or own class]).
latex_documents = [ latex_documents = [
(master_doc, 'nGraphCompilerStack.tex', 'nGraph Compiler Stack Documentation', (master_doc, 'nGraphCompilerStack.tex', u'nGraph Compiler Stack Documentation',
'Intel Corporation', 'manual'), u'Intel Corporation', 'manual'),
] ]
# -- Options for manual page output --------------------------------------- # -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples # One entry per manual page. List of tuples
...@@ -244,22 +202,7 @@ breathe_projects = { ...@@ -244,22 +202,7 @@ breathe_projects = {
} }
rst_epilog = u""" rst_epilog = u"""
.. |codename| replace:: Intel nGraph .. include:: /replacements.txt
.. |project| replace:: Intel nGraph Library
.. |InG| replace:: Intel® nGraph
.. |copy| unicode:: U+000A9 .. COPYRIGHT SIGN
:ltrim:
.. |deg| unicode:: U+000B0 .. DEGREE SIGN
:ltrim:
.. |plusminus| unicode:: U+000B1 .. PLUS-MINUS SIGN
:rtrim:
.. |micro| unicode:: U+000B5 .. MICRO SIGN
:rtrim:
.. |trade| unicode:: U+02122 .. TRADEMARK SIGN
:ltrim:
.. |reg| unicode:: U+000AE .. REGISTERED TRADEMARK SIGN
:ltrim:
""" """
# -- autodoc Extension configuration -------------------------------------- # -- autodoc Extension configuration --------------------------------------
......
...@@ -4,29 +4,6 @@ ...@@ -4,29 +4,6 @@
Backend Backend
======= =======
* :ref:`generic_backend`
* :ref:`intelgpu_backend`
.. _generic_backend:
Generic backend
---------------
.. doxygenclass:: ngraph::runtime::Backend .. doxygenclass:: ngraph::runtime::Backend
:project: ngraph :project: ngraph
:members: :members:
.. _intelgpu_backend:
IntelGPU
--------
.. doxygenclass:: ngraph::runtime::intelgpu::IntelGPUBackend
:project: ngraph
:members:
...@@ -4,29 +4,10 @@ ...@@ -4,29 +4,10 @@
Executable Executable
========== ==========
* :ref:`generic_executable`
* :ref:`intelgpu_executable`
The ``compile`` function on an ``Executable`` has more direct methods to The ``compile`` function on an ``Executable`` has more direct methods to
actions such as ``validate``, ``call``, ``get_performance_data``, and so on. actions such as ``validate``, ``call``, ``get_performance_data``, and so on.
.. _generic_executable:
Generic executable
------------------
.. doxygenclass:: ngraph::runtime::Executable .. doxygenclass:: ngraph::runtime::Executable
:project: ngraph :project: ngraph
:members: :members:
.. _intelgpu_executable:
IntelGPU executable
-------------------
.. doxygenclass:: ngraph::runtime::intelgpu::IntelGPUExecutable
:project: ngraph
:members:
...@@ -73,6 +73,9 @@ Additional resources for device or framework-specific configurations: ...@@ -73,6 +73,9 @@ Additional resources for device or framework-specific configurations:
OpenCL OpenCL
------ ------
OpenCL is needed for the :doc:`plaidml-ng-api/index`; this is not needed if
you have only a CPU backend.
#. Install the latest Linux driver for your system. You can find a list #. Install the latest Linux driver for your system. You can find a list
of drivers at https://software.intel.com/en-us/articles/opencl-drivers; of drivers at https://software.intel.com/en-us/articles/opencl-drivers;
You may need to install `OpenCL SDK`_ in case of an ``libOpenCL.so`` absence. You may need to install `OpenCL SDK`_ in case of an ``libOpenCL.so`` absence.
...@@ -86,15 +89,16 @@ OpenCL ...@@ -86,15 +89,16 @@ OpenCL
may, for example, be able to find details at the ``/sys/module/[system]/parameters/`` location. may, for example, be able to find details at the ``/sys/module/[system]/parameters/`` location.
nGraph Bridge from TensorFlow*\ nGraph Bridge from TensorFlow\*
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When specified as the generic backend -- either manually or automatically When specified as the generic backend -- either manually or automatically
from a framework -- ``NGRAPH`` defaults to CPU, and it also allows for from a framework -- ``NGRAPH`` defaults to CPU, and it also allows for
additional device configuration or selection. additional device configuration or selection.
Because nGraph can select backends, you may try specifying the ``INTELGPU`` Because nGraph can select backends, specifying the ``INTELGPU``
backend as a runtime environment variable: backend as a runtime environment variable also works if one is
present in your system:
:envvar:`NGRAPH_TF_BACKEND="INTELGPU"` :envvar:`NGRAPH_TF_BACKEND="INTELGPU"`
...@@ -105,14 +109,9 @@ depending on the parameters specified. ...@@ -105,14 +109,9 @@ depending on the parameters specified.
NGRAPH_TF_BACKEND="INTELGPU" python3 axpy.py NGRAPH_TF_BACKEND="INTELGPU" python3 axpy.py
* ``NGRAPH_INTELGPU_DUMP_FUNCTION`` -- dumps * ``NGRAPH_INTELGPU_DUMP_FUNCTION`` -- dumps nGraph’s functions
nGraph’s functions in dot format. in dot format.
* `` `` --.
* `` `` --.
* `` `` --.
.. _axpy.py example: https://github.com/tensorflow/ngraph-bridge/blob/master/examples/axpy.py .. _axpy.py example: https://github.com/tensorflow/ngraph-bridge/blob/master/examples/axpy.py
.. _OpenCL SDK: https://software.intel.com/en-us/opencl-sdk .. _OpenCL SDK: https://software.intel.com/en-us/opencl-sdk
...@@ -77,7 +77,7 @@ version = '0.22' ...@@ -77,7 +77,7 @@ version = '0.22'
# The Documentation full version, including alpha/beta/rc tags. Some features # The Documentation full version, including alpha/beta/rc tags. Some features
# available in the latest code will not necessarily be documented first # available in the latest code will not necessarily be documented first
release = '0.22-doc' release = '0.22.0'
# The language for content autogenerated by Sphinx. Refer to documentation # The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages. # for a list of supported languages.
......
...@@ -37,7 +37,7 @@ documentation. ...@@ -37,7 +37,7 @@ documentation.
Activate logtrace-related environment variables Activate logtrace-related environment variables
=============================================== ===============================================
Another generic configuration option is to activate ``NGRAPH_CPU_DEBUG_TRACER``, Another configuration option is to activate ``NGRAPH_CPU_DEBUG_TRACER``,
a runtime environment variable that supports extra logging and debug detail. a runtime environment variable that supports extra logging and debug detail.
This is a useful tool for data scientists interested in outputs from logtrace This is a useful tool for data scientists interested in outputs from logtrace
...@@ -175,10 +175,11 @@ Convolution shapes ...@@ -175,10 +175,11 @@ Convolution shapes
``OMP_NUM_THREADS`` ``OMP_NUM_THREADS``
^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^
The best resource for this configuration option is the `gnu.org site`_. The best resource for this configuration option is the Intel® OpenMP\* docs
``OMP_NUM_THREADS`` defaults to the number of logical cores. To check the at the following link: `Intel OpenMP documentation`_. ``OMP_NUM_THREADS``
number of cores on your system, you can run the following on the command-line to defaults to the number of logical cores. To check the number of cores on your
see the details of your CPU: system, you can run the following on the command-line to see the details
of your CPU:
.. code-block:: console .. code-block:: console
...@@ -221,7 +222,7 @@ thus can make more efficient use of the underlying hardware. ...@@ -221,7 +222,7 @@ thus can make more efficient use of the underlying hardware.
.. _PyPI: https://pypi.org/project/ngraph-core .. _PyPI: https://pypi.org/project/ngraph-core
.. _KMP: https://software.intel.com/en-us/node/522691 .. _KMP: https://software.intel.com/en-us/node/522691
.. _MKL-DNN: https://github.com/intel/mkl-dnn .. _MKL-DNN: https://github.com/intel/mkl-dnn
.. _gnu.org site: https://gcc.gnu.org/onlinedocs/libgomp/Environment-Variables.html .. _Intel OpenMP documentation: https://www.openmprtl.org/documentation
.. _Movidius: https://www.movidius.com/ .. _Movidius: https://www.movidius.com/
.. _BUILDING.md: https://github.com/NervanaSystems/ngraph/blob/master/python/BUILDING.md .. _BUILDING.md: https://github.com/NervanaSystems/ngraph/blob/master/python/BUILDING.md
.. _GCC wiki for details: https://gcc.gnu.org/wiki/FunctionMultiVersioning .. _GCC wiki for details: https://gcc.gnu.org/wiki/FunctionMultiVersioning
......
.. ---------------------------------------------------------------------------
.. Copyright 2018-2019 Intel Corporation .. Copyright 2018-2019 Intel Corporation
.. Licensed under the Apache License, Version 2.0 (the "License"); .. Licensed under the Apache License, Version 2.0 (the "License");
.. you may not use this file except in compliance with the License. .. you may not use this file except in compliance with the License.
......
...@@ -58,7 +58,7 @@ files to another format with a tool like ``pypandoc`` and share a link ...@@ -58,7 +58,7 @@ files to another format with a tool like ``pypandoc`` and share a link
to your efforts on our `wiki`_. to your efforts on our `wiki`_.
Another option is to fork the `ngraph repo`_, essentially snapshotting it at Another option is to fork the `ngraph repo`_, essentially snapshotting it at
that point in time, and to build Jupyter\* notebook or other set of docs around that point in time, and to build a Jupyter\* notebook or other set of docs around
it for a specific use case. Add a note on our wiki to show us what you it for a specific use case. Add a note on our wiki to show us what you
did; new and novel applications may have their projects highlighted on an did; new and novel applications may have their projects highlighted on an
upcoming `ngraph.ai`_ release. upcoming `ngraph.ai`_ release.
......
...@@ -3,9 +3,9 @@ ...@@ -3,9 +3,9 @@
## Building nGraph Python Wheels ## Building nGraph Python Wheels
If you want to try a newer version of nGraph's Python API than is available If you want to try a newer version of nGraph's Python API than is available
from PyPI, you can build your own latest version from the source code. This from PyPI, you can build the latest version from source code. This process is
process is very similar to what is outlined in our [ngraph_build] instructions very similar to what is outlined in our [ngraph_build] instructions with two
with two important differences: important differences:
1. You must specify: `-DNGRAPH_PYTHON_BUILD_ENABLE=ON` and `-DNGRAPH_ONNX_IMPORT_ENABLE=ON` 1. You must specify: `-DNGRAPH_PYTHON_BUILD_ENABLE=ON` and `-DNGRAPH_ONNX_IMPORT_ENABLE=ON`
when running `cmake`. when running `cmake`.
...@@ -18,7 +18,7 @@ with two important differences: ...@@ -18,7 +18,7 @@ with two important differences:
After this procedure completes, the `ngraph/build/python/dist` directory should After this procedure completes, the `ngraph/build/python/dist` directory should
contain the Python packages of the version you cloned. For example, if you contain the Python packages of the version you cloned. For example, if you
checked out and built `0.21`, you may see something like: checked out and built `0.21` for Python 3.7, you might see something like:
$ ls python/dist/ $ ls python/dist/
ngraph-core-0.21.0rc0.tar.gz ngraph-core-0.21.0rc0.tar.gz
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment