Commit cbdb405e authored by nmostafa's avatar nmostafa

PR fixes

parent 9f0e1f60
......@@ -190,10 +190,10 @@ if (NGRAPH_CPU_ENABLE
set(NGRAPH_INTEL_CPU_ONLY_ENABLE ON)
endif()
if (NGRAPH_MLIR_ENABLE)
if (NGRAPH_MLIR_ENABLE AND (NOT NGRAPH_DEX_ONLY))
#disable code-gen due to sym collision with LLVM
message(STATUS "Forcing NGRAPH_DEX_ONLY to ON")
set(NGRAPH_DEX_ONLY ON)
message(FATAL_ERROR "Cannot build with both MLIR and code-gen ON.\n"
"Use -DNGRAPH_DEX_ONLY=ON and try again. \n")
endif()
if (NGRAPH_DISTRIBUTED_ENABLE)
......
# ******************************************************************************
# Copyright 2019 Intel Corporation
# Copyright 2017-2019 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
......@@ -29,26 +29,23 @@ set(MLIR_BUILD_DIR ${MLIR_LLVM_ROOT}/build)
# MLIR has to be pre-built before ngraph build starts
# this will clone and build MLIR during cmake config instead
find_program(NINJA ninja)
if (NINJA)
configure_file(${CMAKE_SOURCE_DIR}/cmake/mlir_fetch.cmake.in ${MLIR_PROJECT_ROOT}/CMakeLists.txt)
execute_process(COMMAND "${CMAKE_COMMAND}" -G "${CMAKE_GENERATOR}" .
WORKING_DIRECTORY "${MLIR_PROJECT_ROOT}")
# clone and build llvm
execute_process(COMMAND "${CMAKE_COMMAND}" --build . --target ext_mlir_llvm
WORKING_DIRECTORY "${MLIR_PROJECT_ROOT}")
# clone and build mlir
execute_process(COMMAND "${CMAKE_COMMAND}" --build . --target ext_mlir
WORKING_DIRECTORY "${MLIR_PROJECT_ROOT}")
# point find_package to the pre-built libs
set(LLVM_DIR ${MLIR_LLVM_ROOT}/build/lib/cmake/llvm)
set(MLIR_SRC_INCLUDE_PATH ${MLIR_SOURCE_DIR}/include)
set(MLIR_BIN_INCLUDE_PATH ${MLIR_BUILD_DIR}/projects/mlir/include)
set(MLIR_INCLUDE_PATHS ${MLIR_SRC_INCLUDE_PATH};${MLIR_BIN_INCLUDE_PATH})
else()
message(FATAL_ERROR "Cannot find ninja. Cannot build with NGRAPH_MLIR_ENABLE=ON")
endif()
configure_file(${CMAKE_SOURCE_DIR}/cmake/mlir_fetch.cmake.in ${MLIR_PROJECT_ROOT}/CMakeLists.txt)
execute_process(COMMAND "${CMAKE_COMMAND}" -G "${CMAKE_GENERATOR}" .
WORKING_DIRECTORY "${MLIR_PROJECT_ROOT}")
# clone and build llvm
execute_process(COMMAND "${CMAKE_COMMAND}" --build . --target ext_mlir_llvm
WORKING_DIRECTORY "${MLIR_PROJECT_ROOT}")
# clone and build mlir
execute_process(COMMAND "${CMAKE_COMMAND}" --build . --target ext_mlir
WORKING_DIRECTORY "${MLIR_PROJECT_ROOT}")
# point find_package to the pre-built libs
set(LLVM_DIR ${MLIR_LLVM_ROOT}/build/lib/cmake/llvm)
set(MLIR_SRC_INCLUDE_PATH ${MLIR_SOURCE_DIR}/include)
set(MLIR_BIN_INCLUDE_PATH ${MLIR_BUILD_DIR}/projects/mlir/include)
set(MLIR_INCLUDE_PATHS ${MLIR_SRC_INCLUDE_PATH};${MLIR_BIN_INCLUDE_PATH})
# ******************************************************************************
# Copyright 2019 Intel Corporation
# Copyright 2017-2019 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
......@@ -45,8 +45,8 @@ ExternalProject_Add(
CMAKE_GENERATOR_PLATFORM ${CMAKE_GENERATOR_PLATFORM}
CMAKE_GENERATOR_TOOLSET ${CMAKE_GENERATOR_TOOLSET}
BUILD_COMMAND ${CMAKE_COMMAND} -G Ninja ../llvm -DLLVM_BUILD_EXAMPLES=OFF -DLLVM_ENABLE_CXX1Y=Y -DLLVM_TARGETS_TO_BUILD=host -DLLVM_ENABLE_RTTI=ON -DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}
COMMAND ${CMAKE_COMMAND} --build . --target check-mlir
BUILD_COMMAND ${CMAKE_COMMAND} ../llvm -DLLVM_BUILD_EXAMPLES=OFF -DLLVM_ENABLE_CXX1Y=Y -DLLVM_TARGETS_TO_BUILD=host -DLLVM_ENABLE_RTTI=ON -DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}
COMMAND ${CMAKE_COMMAND} --build . --target check-mlir -- -j8
INSTALL_COMMAND ""
UPDATE_COMMAND ""
SOURCE_DIR ${MLIR_SOURCE_DIR}
......
# ******************************************************************************
# Copyright 2019 Intel Corporation
# Copyright 2017-2019 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
......@@ -50,35 +50,35 @@ if (NGRAPH_MLIR_ENABLE)
# Link MLIR libs
target_link_libraries(
mlir_backend PRIVATE
MLIRAnalysis
MLIREDSC
MLIRExecutionEngine
MLIRIR
MLIRLLVMIR
MLIRParser
MLIRPass
MLIRTargetLLVMIR
MLIRTransforms
MLIRSupport
mlir_backend PRIVATE
MLIRAnalysis
MLIREDSC
MLIRExecutionEngine
MLIRIR
MLIRLLVMIR
MLIRParser
MLIRPass
MLIRTargetLLVMIR
MLIRTransforms
MLIRSupport
)
# some libs need whole archive linkage because of Globals static initialization
function(whole_archive_link target)
if("${CMAKE_SYSTEM_NAME}" STREQUAL "Darwin")
set(link_flags "-Llib -Wl,-all_load ")
FOREACH(LIB ${ARGN})
string(CONCAT link_flags ${link_flags} "${LIB}")
ENDFOREACH(LIB)
else()
set(link_flags "-Llib -Wl,--whole-archive,")
FOREACH(LIB ${ARGN})
string(CONCAT link_flags ${link_flags} "${LIB},")
ENDFOREACH(LIB)
string(CONCAT link_flags ${link_flags} "--no-whole-archive")
endif()
message(STATUS "MLIR Ops link flag: ${link_flags}" )
if("${CMAKE_SYSTEM_NAME}" STREQUAL "Darwin")
set(link_flags "-Llib -Wl,-all_load ")
FOREACH(LIB ${ARGN})
string(CONCAT link_flags ${link_flags} "${LIB}")
ENDFOREACH(LIB)
else()
set(link_flags "-Llib -Wl,--whole-archive,")
FOREACH(LIB ${ARGN})
string(CONCAT link_flags ${link_flags} "${LIB},")
ENDFOREACH(LIB)
string(CONCAT link_flags ${link_flags} "--no-whole-archive")
endif()
message(STATUS "MLIR Ops link flag: ${link_flags}" )
set_target_properties(${target} PROPERTIES LINK_FLAGS ${link_flags})
set_target_properties(${target} PROPERTIES LINK_FLAGS ${link_flags})
endfunction(whole_archive_link)
whole_archive_link(mlir_backend
......@@ -87,8 +87,8 @@ if (NGRAPH_MLIR_ENABLE)
)
# Link LLVM libs
target_link_libraries(
mlir_backend PRIVATE
${llvm_libs}
mlir_backend PRIVATE
${llvm_libs}
)
# Link ngraph
......@@ -102,17 +102,14 @@ if (NGRAPH_MLIR_ENABLE)
tablegen(MLIR ${ARGV} "-I${MLIR_SRC_INCLUDE_PATH}" "-I${MLIR_BIN_INCLUDE_PATH}")
set(TABLEGEN_OUTPUT ${TABLEGEN_OUTPUT} ${CMAKE_CURRENT_BINARY_DIR}/${ofn} PARENT_SCOPE)
endfunction()
set(MLIR_TABLEGEN_EXE mlir-tblgen)
set(LLVM_TARGET_DEFINITIONS dialect/ops.td)
ngraph_tablegen(ops.h.inc -gen-op-decls)
ngraph_tablegen(ops.cpp.inc -gen-op-defs)
add_public_tablegen_target(ngraph_ops_gen)
add_dependencies(mlir_backend ngraph_ops_gen)
target_include_directories(mlir_backend PRIVATE ${CMAKE_CURRENT_BINARY_DIR})
endif()
endif()
//*****************************************************************************
// Copyright 2019 Intel Corporation
// Copyright 2017-2019 Intel Corporation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
......@@ -161,12 +161,19 @@ mlir::Type MLIRCompiler::get_mlir_type(const descriptor::Tensor* tensor)
// Converts an nGraph element type into an MLIR type.
mlir::Type MLIRCompiler::get_mlir_type(const element::Type& type)
{
#if !(defined(__GNUC__) && (__GNUC__ == 4 && __GNUC_MINOR__ == 8))
#pragma GCC diagnostic push
#pragma GCC diagnostic error "-Wswitch"
#pragma GCC diagnostic error "-Wswitch-enum"
#endif
switch (type.get_type_enum())
{
case ngraph::element::Type_t::undefined:
case ngraph::element::Type_t::dynamic:
default: NGRAPH_FAIL() << "MLIR: Unsupported NGraph types"; break;
case ngraph::element::Type_t::bf16: return mlir::NGFloatType::getBF16(&m_context);
case ngraph::element::Type_t::f16: return mlir::NGFloatType::getF16(&m_context);
case ngraph::element::Type_t::f32: return mlir::NGFloatType::getF32(&m_context);
case ngraph::element::Type_t::f64: return mlir::NGFloatType::getF64(&m_context);
case ngraph::element::Type_t::i8: return mlir::NGIntegerType::getInt8(&m_context);
......@@ -181,6 +188,10 @@ mlir::Type MLIRCompiler::get_mlir_type(const element::Type& type)
}
NGRAPH_FAIL() << "Unreachable";
return mlir::Type();
#if !(defined(__GNUC__) && (__GNUC__ == 4 && __GNUC_MINOR__ == 8))
#pragma GCC diagnostic pop
#endif
}
void MLIRCompiler::update_tensor_value(descriptor::Tensor* tensor, mlir::Value* value)
......
//*****************************************************************************
// Copyright 2019 Intel Corporation
// Copyright 2017-2019 Intel Corporation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
......
//*****************************************************************************
// Copyright 2019 Intel Corporation
// Copyright 2017-2019 Intel Corporation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
......
//*****************************************************************************
// Copyright 2019 Intel Corporation
// Copyright 2017-2019 Intel Corporation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
......
//*****************************************************************************
// Copyright 2019 Intel Corporation
// Copyright 2017-2019 Intel Corporation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
......
//*****************************************************************************
// Copyright 2019 Intel Corporation
// Copyright 2017-2019 Intel Corporation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
......
//*****************************************************************************
// Copyright 2019 Intel Corporation
// Copyright 2017-2019 Intel Corporation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
......
//*****************************************************************************
// Copyright 2019 Intel Corporation
// Copyright 2017-2019 Intel Corporation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
......
//*****************************************************************************
// Copyright 2019 Intel Corporation
// Copyright 2017-2019 Intel Corporation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
......
//*****************************************************************************
// Copyright 2019 Intel Corporation
// Copyright 2017-2019 Intel Corporation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
......
//*****************************************************************************
// Copyright 2019 Intel Corporation
// Copyright 2017-2019 Intel Corporation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
......
//*****************************************************************************
// Copyright 2019 Intel Corporation
// Copyright 2017-2019 Intel Corporation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
......
//*****************************************************************************
// Copyright 2019 Intel Corporation
// Copyright 2017-2019 Intel Corporation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
......
//*****************************************************************************
// Copyright 2019 Intel Corporation
// Copyright 2017-2019 Intel Corporation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
......@@ -36,7 +36,9 @@ bool MLIRSubgraphExtractionPass::run_on_function(std::shared_ptr<Function> func)
{
// All ops must be supported by MLIR compiler
if (!is_supported_mlir_op(op))
{
return false;
}
if (TI(Parameter) != TI(*op) && TI(Result) != TI(*op))
{
......@@ -52,7 +54,9 @@ bool MLIRSubgraphExtractionPass::run_on_function(std::shared_ptr<Function> func)
NodeVector ck_outputs = std::move(get_subgraph_outputs(ck_ops, {} /*exclusions*/));
if (ck_outputs.size() != 1)
{
return false;
}
auto ck = std::make_shared<CompiledKernel>(ck_ops, ck_outputs, ck_args);
......@@ -81,11 +85,15 @@ bool MLIRSubgraphExtractionPass::run_on_function(std::shared_ptr<Function> func)
bool MLIRSubgraphExtractionPass::is_supported_mlir_op(std::shared_ptr<Node> node)
{
if (TI(Parameter) == TI(*node) || TI(Result) == TI(*node))
{
return true;
}
// supported by backend ?
if (m_supported_ops.find(TI(*node)) == m_supported_ops.end())
{
return false;
}
// check on invariants expected by MLIR backend
......@@ -93,7 +101,9 @@ bool MLIRSubgraphExtractionPass::is_supported_mlir_op(std::shared_ptr<Node> node
if (TI(ngraph::op::Dot) == TI(*node))
{
if (node->get_input_shape(0).size() != 2 || node->get_input_shape(1).size() != 2)
{
return false;
}
}
return true;
}
......
//*****************************************************************************
// Copyright 2019 Intel Corporation
// Copyright 2017-2019 Intel Corporation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
......@@ -32,7 +32,6 @@ namespace ngraph
MLIRSubgraphExtractionPass() {}
bool run_on_function(std::shared_ptr<Function> func) override;
/// Checks if an ngraph node is supported by MLIR backend
/// Currently this check is only valid for CPU backend.
bool is_supported_mlir_op(std::shared_ptr<Node> node);
private:
......
//*****************************************************************************
// Copyright 2018-2019 Intel Corporation
// Copyright 2017-2019 Intel Corporation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment