Browse Source

!8965 Synchronize latest Ascend software suite 09 Dec 2020, with refactored GraphEngine

From: @nicholas_yhr
Reviewed-by: 
Signed-off-by:
tags/v1.1.0
mindspore-ci-bot Gitee 5 years ago
parent
commit
63d5eb215f
14 changed files with 76 additions and 34 deletions
  1. +1
    -1
      CMakeLists.txt
  2. +3
    -0
      build.sh
  3. +39
    -0
      cmake/dependency_graphenginev2.cmake
  4. +3
    -2
      cmake/mind_expression.cmake
  5. +6
    -6
      cmake/package.cmake
  6. +1
    -1
      graphengine
  7. +5
    -8
      mindspore/ccsrc/runtime/device/ascend/executor/ai_core_dynamic_kernel.cc
  8. +2
    -0
      mindspore/ccsrc/runtime/device/ascend/executor/ai_core_dynamic_kernel.h
  9. +4
    -4
      mindspore/ccsrc/runtime/device/ascend/executor/tiling/op_tiling_calculater.cc
  10. +1
    -2
      mindspore/ccsrc/runtime/device/ascend/executor/tiling/op_tiling_calculater.h
  11. +1
    -1
      mindspore/ccsrc/runtime/hccl_adapter/hccl_adapter.h
  12. +3
    -3
      tests/st/model_zoo_tests/transformer/test_transformer.py
  13. +4
    -2
      tests/ut/cpp/runtest.sh
  14. +3
    -4
      tests/ut/cpp/stub/dynamic_shape/dynamic_shape_stub.cc

+ 1
- 1
CMakeLists.txt View File

@@ -71,7 +71,7 @@ set(MS_CCSRC_PATH ${CMAKE_SOURCE_DIR}/mindspore/ccsrc)
set(MS_CCSRC_BUILD_PATH ${BUILD_PATH}/mindspore/mindspore/ccsrc)

if (ENABLE_D OR ENABLE_ACL OR ENABLE_TESTCASES)
include(${CMAKE_SOURCE_DIR}/cmake/dependency_graphengine.cmake)
include(${CMAKE_SOURCE_DIR}/cmake/dependency_graphenginev2.cmake)
endif ()

set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fvisibility=hidden")


+ 3
- 0
build.sh View File

@@ -398,6 +398,9 @@ parse_device
echo "---------------- MindSpore: build start ----------------"
mkdir -pv "${BUILD_PATH}/package/mindspore/lib"
git submodule update --init graphengine
cd "${BASEPATH}/graphengine"
git submodule update --init metadef
cd "${BASEPATH}"
if [[ "X$ENABLE_AKG" = "Xon" ]] && [[ "X$ENABLE_D" = "Xon" || "X$ENABLE_GPU" = "Xon" ]]; then
git submodule update --init --recursive akg
fi


+ 39
- 0
cmake/dependency_graphenginev2.cmake View File

@@ -0,0 +1,39 @@
message(STATUS "Compiling GraphEngine")
set(GE_SOURCE_DIR ${CMAKE_SOURCE_DIR}/graphengine)

message(STATUS "[ME] build_path: ${BUILD_PATH}")

function(find_submodule_lib module name path)
find_library(${module}_LIBRARY_DIR NAMES ${name} NAMES_PER_DIR PATHS ${path}
PATH_SUFFIXES lib
)
if ("${${module}_LIBRARY_DIR}" STREQUAL "${module}_LIBRARY_DIR-NOTFOUND")
message(FATAL_ERROR "${name} not found in any of following paths: ${path}")
endif()
add_library(${module} SHARED IMPORTED)
set_target_properties(${module} PROPERTIES
IMPORTED_LOCATION ${${module}_LIBRARY_DIR}
)
endfunction()

if (ENABLE_D OR ENABLE_ACL OR ENABLE_TESTCASES)
set(_ge_tmp_CMAKE_INSTALL_PREFIX ${CMAKE_INSTALL_PREFIX})
set(_ge_tmp_ENABLE_GITEE ${ENABLE_GITEE})
set(ENABLE_GITEE ON)
set(CMAKE_INSTALL_PREFIX ${BUILD_PATH}/graphengine)

if (ENABLE_TESTCASES)
# use slog, error manager, mmpa in non ascend mode, e.g. tests
set(GE_PREBUILD_PATH ${GE_SOURCE_DIR}/third_party/prebuild/${CMAKE_HOST_SYSTEM_PROCESSOR})
set(ENABLE_MS_TESTCASES TRUE)
find_submodule_lib(slog libslog.so ${GE_PREBUILD_PATH})
find_submodule_lib(error_manager liberror_manager.so ${GE_PREBUILD_PATH})
find_submodule_lib(static_mmpa libmmpa.a ${GE_PREBUILD_PATH})
endif()

add_subdirectory(${GE_SOURCE_DIR})
set(CMAKE_INSTALL_PREFIX ${_ge_tmp_CMAKE_INSTALL_PREFIX})
set(ENABLE_GITEE ${_ge_tmp_ENABLE_GITEE})
else()
message(FATAL_ERROR "No compile option defined for GraphEngine, exiting")
endif()

+ 3
- 2
cmake/mind_expression.cmake View File

@@ -61,9 +61,10 @@ if (ENABLE_GE)
link_directories(${CMAKE_SOURCE_DIR}/third_party/ge/lib)
elseif(ENABLE_D OR ENABLE_ACL OR ENABLE_TESTCASES)
include_directories(${CMAKE_SOURCE_DIR}/graphengine/inc)
include_directories(${CMAKE_SOURCE_DIR}/graphengine/inc/ops)
include_directories(${CMAKE_SOURCE_DIR}/graphengine/inc/external)
include_directories(${CMAKE_SOURCE_DIR}/graphengine/inc/external/graph)
include_directories(${CMAKE_SOURCE_DIR}/graphengine/metadef/inc)
include_directories(${CMAKE_SOURCE_DIR}/graphengine/metadef/inc/external)
include_directories(${CMAKE_SOURCE_DIR}/graphengine/metadef/inc/external/graph)
endif()

if (ENABLE_GE OR ENABLE_D OR ENABLE_ACL OR ENABLE_TESTCASES)


+ 6
- 6
cmake/package.cmake View File

@@ -211,7 +211,7 @@ if (NOT ENABLE_GE)
set(ASCEND_DRIVER_PATH ${ASCEND_PATH}/driver/lib64/common)

install(
FILES ${CMAKE_SOURCE_DIR}/build/graphengine/libc_sec.so
FILES ${CMAKE_SOURCE_DIR}/build/graphengine/c_sec/lib/libc_sec.so
DESTINATION ${INSTALL_LIB_DIR}
COMPONENT mindspore
)
@@ -224,9 +224,9 @@ if (NOT ENABLE_GE)
)
install(
FILES
${CMAKE_BINARY_DIR}/graphengine/src/common/graph/libgraph.so
${CMAKE_BINARY_DIR}/graphengine/src/ge/common/libge_common.so
${CMAKE_BINARY_DIR}/graphengine/src/ge/ge_runtime/libge_runtime.so
${CMAKE_BINARY_DIR}/graphengine/metadef/graph/libgraph.so
${CMAKE_BINARY_DIR}/graphengine/ge/common/libge_common.so
${CMAKE_BINARY_DIR}/graphengine/ge/ge_runtime/libge_runtime.so
DESTINATION ${INSTALL_LIB_DIR}
COMPONENT mindspore
)
@@ -234,8 +234,8 @@ if (NOT ENABLE_GE)
elseif (ENABLE_TESTCASES)
install(
FILES
${CMAKE_BINARY_DIR}/graphengine/src/common/graph/libgraph.so
${CMAKE_SOURCE_DIR}/build/graphengine/libc_sec.so
${CMAKE_BINARY_DIR}/graphengine/metadef/graph/libgraph.so
${CMAKE_SOURCE_DIR}/build/graphengine/c_sec/lib/libc_sec.so
${LIBEVENT_LIB_LIST}
DESTINATION ${INSTALL_LIB_DIR}
COMPONENT mindspore


+ 1
- 1
graphengine

@@ -1 +1 @@
Subproject commit ce23341ee2c7e48b39f227b293d1aa7f54effd34
Subproject commit 20a0326976db65ca01f43ae4ccdd85677faaeb5e

+ 5
- 8
mindspore/ccsrc/runtime/device/ascend/executor/ai_core_dynamic_kernel.cc View File

@@ -74,17 +74,14 @@ void AiCoreDynamicKernel::ParseCompileJson() {
std::replace(compile_info_attr.begin(), compile_info_attr.end(), '\'', '\"');
compile_info_attr = ReplaceInvalidJsonStr(compile_info_attr);
MS_LOG(INFO) << "Get compile_info:" << compile_info_attr;

try {
compile_info_json_ = std::make_shared<nlohmann::json>(nlohmann::json::parse(compile_info_attr));
} catch (nlohmann::json::parse_error &e) {
MS_LOG(EXCEPTION) << "parse json failed, error:" << e.what();
}
op_compile_info_.str = compile_info_attr;
op_compile_info_.key = "";

if (AnfAlgo::HasNodeAttr(kAttrFusionType, cnode_ptr_)) {
auto fusion_type = AnfAlgo::GetNodeAttr<std::string>(cnode_ptr_, kAttrFusionType);
MS_LOG(INFO) << "Get fusion_type:" << fusion_type;
(*compile_info_json_)["_pattern"] = fusion_type;
op_compile_info_.key = std::hash<std::string>{}(fusion_type);
}
}

@@ -132,8 +129,8 @@ void AiCoreDynamicKernel::ComputeTiling() {
MS_LOG(INFO) << "Start compute tiling of:" << cnode_ptr_->fullname_with_scope();
optiling::OpRunInfo op_run_info;

OpTilingCalculater::GetInstance().CalculateTiling(NOT_NULL(cnode_ptr_), NOT_NULL(compile_info_json_),
depend_tensor_map_, NOT_NULL(&op_run_info));
OpTilingCalculater::GetInstance().CalculateTiling(NOT_NULL(cnode_ptr_), op_compile_info_, depend_tensor_map_,
NOT_NULL(&op_run_info));
block_dim_ = op_run_info.block_dim;
workspaces_size_ = op_run_info.workspaces;
tiling_data_ = op_run_info.tiling_data.str();


+ 2
- 0
mindspore/ccsrc/runtime/device/ascend/executor/ai_core_dynamic_kernel.h View File

@@ -24,6 +24,7 @@
#include "nlohmann/json.hpp"
#include "ir/tensor.h"
#include "runtime/device/device_address.h"
#include "register/op_tiling.h"
#include "mindspore/ccsrc/runtime/device/executor/dynamic_kernel.h"

namespace mindspore {
@@ -60,6 +61,7 @@ class AiCoreDynamicKernel : public DynamicKernel {
std::vector<int64_t> workspaces_size_;
std::vector<DeviceAddressPtr> workspace_addr_;
std::shared_ptr<nlohmann::json> compile_info_json_;
optiling::OpCompileInfo op_compile_info_{};

void ComputeTiling();
bool CopyTilingToDevice();


+ 4
- 4
mindspore/ccsrc/runtime/device/ascend/executor/tiling/op_tiling_calculater.cc View File

@@ -25,6 +25,7 @@
#include "runtime/device/ascend/ge_types_convert.h"
#include "utils/utils.h"
#include "external/graph/tensor.h"
#include "external/register/op_tiling_registry.h"

namespace mindspore {
namespace device {
@@ -136,7 +137,7 @@ void FeedTeOpConstTensor(const NotNull<CNodePtr> &cnode, const std::map<uint32_t

void OpTilingCalculater::Init() {
MS_LOG(INFO) << "Start init OpTilingCalculater";
tiling_func_map_ = optiling::OpTilingInterf::RegisteredOpInterf();
tiling_func_map_ = optiling::OpTilingRegistryInterf::RegisteredOpInterf();
MS_LOG(INFO) << "tiling_func_map_ size:" << tiling_func_map_.size();
for (const auto &iter : tiling_func_map_) {
MS_LOG(INFO) << "Regist tiling func:" << iter.first;
@@ -157,8 +158,7 @@ std::string GetRealOpType(const std::string &op_type) {
return iter->second;
}

void OpTilingCalculater::CalculateTiling(const NotNull<CNodePtr> &cnode,
const NotNull<std::shared_ptr<nlohmann::json>> &compile_info_json,
void OpTilingCalculater::CalculateTiling(const NotNull<CNodePtr> &cnode, const optiling::OpCompileInfo &op_compile_info,
const std::map<uint32_t, tensor::TensorPtr> &depend_tensor_map,
NotNull<optiling::OpRunInfo *> op_run_info) {
optiling::TeOpParas op_param;
@@ -181,7 +181,7 @@ void OpTilingCalculater::CalculateTiling(const NotNull<CNodePtr> &cnode,
MS_LOG(INFO) << "Get tiling func:" << iter->first;

if (iter != tiling_func_map_.end()) {
bool ret = (iter->second)(op_type, op_param, *compile_info_json.get(), *op_run_info);
bool ret = (iter->second)(op_param, op_compile_info, *op_run_info);
if (!ret) {
MS_LOG(EXCEPTION) << "Calculate tiling failed";
}


+ 1
- 2
mindspore/ccsrc/runtime/device/ascend/executor/tiling/op_tiling_calculater.h View File

@@ -37,8 +37,7 @@ class OpTilingCalculater {
}

void Init();
void CalculateTiling(const NotNull<CNodePtr> &cnode,
const NotNull<std::shared_ptr<nlohmann::json>> &compile_info_json,
void CalculateTiling(const NotNull<CNodePtr> &cnode, const optiling::OpCompileInfo &op_compile_info,
const std::map<uint32_t, tensor::TensorPtr> &depend_tensor_map,
NotNull<optiling::OpRunInfo *> op_run_info);



+ 1
- 1
mindspore/ccsrc/runtime/hccl_adapter/hccl_adapter.h View File

@@ -21,7 +21,7 @@
#include <vector>
#include <memory>
#include "mindspore/core/ir/anf.h"
#include "external/hccl/hccl_types.h"
#include "hccl/hccl_types.h"

#define MS_API __attribute__((visibility("default")))



+ 3
- 3
tests/st/model_zoo_tests/transformer/test_transformer.py View File

@@ -164,10 +164,10 @@ def test_transformer():

# assertion occurs while the loss value, overflow state or loss_scale value is wrong
loss_value = np.array(callback.loss_list)
assert np.allclose(loss_value[0], 11.241624, 0, 0.000005)
assert np.allclose(loss_value[0], 11.241604, 0, 0.000005)

expect_loss_value = [11.241624, 11.243232, 11.217465, 11.204196, 11.2138195,
11.215386, 11.19053, 11.150403, 11.191858, 11.160057]
expect_loss_value = [11.241604, 11.243231, 11.217458, 11.204156, 11.213805,
11.215374, 11.19065, 11.150393, 11.191824, 11.160044]
print("loss value: {}".format(loss_value))
assert np.allclose(loss_value[0:10], expect_loss_value, 0, 0.0005)



+ 4
- 2
tests/ut/cpp/runtest.sh View File

@@ -15,7 +15,7 @@
# ============================================================================

set -e
BASEPATH=$(cd $(dirname $0); pwd)
BASEPATH=$(cd "$(dirname "$0")"; pwd)
PROJECT_PATH=${BASEPATH}/../../..
if [ $BUILD_PATH ];then
echo "BUILD_PATH = $BUILD_PATH"
@@ -26,7 +26,9 @@ fi
cd ${BUILD_PATH}/mindspore/tests/ut/cpp


export LD_LIBRARY_PATH=${BUILD_PATH}/mindspore/googletest/googlemock/gtest:${PROJECT_PATH}/mindspore:${PROJECT_PATH}/mindspore/lib:$LD_LIBRARY_PATH
export LD_LIBRARY_PATH=${BUILD_PATH}/mindspore/googletest/googlemock/gtest:${PROJECT_PATH}/mindspore:\
${PROJECT_PATH}/mindspore/lib:${PROJECT_PATH}/graphengine/third_party/prebuild/x86_64:\
${PROJECT_PATH}/graphengine/third_party/prebuild/aarch64:${LD_LIBRARY_PATH}
export PYTHONPATH=${PROJECT_PATH}/tests/ut/cpp/python_input:$PYTHONPATH:${PROJECT_PATH}
export GLOG_v=2
export GC_COLLECT_IN_CELL=1


+ 3
- 4
tests/ut/cpp/stub/dynamic_shape/dynamic_shape_stub.cc View File

@@ -53,10 +53,9 @@ bool HcclExecutorManager::Initialize() { return true; }
bool HcclExecutorManager::Finalize() { return true; }

void OpTilingCalculater::Init() {}
void OpTilingCalculater::CalculateTiling(const NotNull<CNodePtr> &cnode,
const NotNull<std::shared_ptr<nlohmann::json>> &compile_info_json,
const std::map<uint32_t, tensor::TensorPtr> &depend_tensor_map,
NotNull<optiling::OpRunInfo *> op_run_info) {}
void OpTilingCalculater::CalculateTiling(const NotNull<CNodePtr> &cnode, const optiling::OpCompileInfo &op_compile_info,
const std::map<uint32_t, tensor::TensorPtr> &depend_tensor_map,
NotNull<optiling::OpRunInfo *> op_run_info) {}
} // namespace ascend
} // namespace device
} // namespace mindspore


Loading…
Cancel
Save