Browse Source

Merge branch 'incubator-master' into sync_a7744bde_acce61d0

tags/v0.6.0-beta
jonyguo 5 years ago
parent
commit
34428e0082
9 changed files with 17 additions and 14 deletions
  1. +1
    -1
      .gitmodules
  2. +3
    -0
      CMakeLists.txt
  3. +1
    -1
      graphengine
  4. +1
    -1
      mindspore/ccsrc/CMakeLists.txt
  5. +3
    -3
      mindspore/ccsrc/transform/op_declare.cc
  6. +7
    -5
      mindspore/context.py
  7. +0
    -1
      mindspore/nn/optim/optimizer.py
  8. +1
    -1
      tests/ut/cpp/CMakeLists.txt
  9. +0
    -1
      tests/ut/python/optimizer/test_optimize_with_parameter_groups.py

+ 1
- 1
.gitmodules View File

@@ -12,4 +12,4 @@
url = https://github.com/protocolbuffers/protobuf.git
[submodule "graphengine"]
path = graphengine
url = https://gitee.com/mindspore/graphengine.git
url = https://gitee.com/ms-incubator/graphengine.git

+ 3
- 0
CMakeLists.txt View File

@@ -7,6 +7,9 @@ endif ()

include(${CMAKE_SOURCE_DIR}/cmake/options.cmake)
set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} "${CMAKE_SOURCE_DIR}/cmake/modules/")
if (ENABLE_GE)
add_compile_definitions(_GLIBCXX_USE_CXX11_ABI=0)
endif ()

if (${CMAKE_SYSTEM_NAME} MATCHES "Darwin")
set(CMAKE_CXX_FLAGS_RELEASE "$ENV{CXXFLAGS} -O2 -Werror -Wno-return-std-move -Wno-unused-private-field -Wno-unused-lambda-capture -Wno-sign-compare -Wno-overloaded-virtual -Wno-unneeded-internal-declaration -Wno-unused-variable -Wno-pessimizing-move -Wno-inconsistent-missing-override -DHALF_ENABLE_CPP11_USER_LITERALS=0 -D_FORTIFY_SOURCE=2")


+ 1
- 1
graphengine

@@ -1 +1 @@
Subproject commit 579dcb75a990b533f9182733a6424f2bd66f0f23
Subproject commit c27e428e9698dd4f9b198008596676bc2d1b49aa

+ 1
- 1
mindspore/ccsrc/CMakeLists.txt View File

@@ -126,7 +126,7 @@ endif()

if (ENABLE_GE)
if(ENABLE_TRAIN)
target_link_libraries(mindspore ge_client_train hccl)
target_link_libraries(mindspore ge_runner hccl)
else ()
target_link_libraries(mindspore ge_client)
endif ()


+ 3
- 3
mindspore/ccsrc/transform/op_declare.cc View File

@@ -470,7 +470,7 @@ INPUT_MAP(ApplyAdam) = {{1, INPUT_DESC(var)}, {2, INPUT_DESC(m)},
{10, INPUT_DESC(grad)}};
ATTR_MAP(ApplyAdam) = {{"use_locking", ATTR_DESC(use_locking, AnyTraits<bool>())},
{"use_nesterov", ATTR_DESC(use_nesterov, AnyTraits<bool>())}};
OUTPUT_MAP(ApplyAdam) = {{0, OUTPUT_DESC(var)}, {1, OUTPUT_DESC(m)}, {2, OUTPUT_DESC(v)}};
OUTPUT_MAP(ApplyAdam) = {{0, OUTPUT_DESC(var)}};

// Relu6
INPUT_MAP(Relu6) = {{1, INPUT_DESC(x)}};
@@ -823,7 +823,7 @@ OUTPUT_MAP(RealDiv) = {{0, OUTPUT_DESC(y)}};
// Cast
INPUT_MAP(Cast) = {{1, INPUT_DESC(x)}};
INPUT_ATTR_MAP(Cast) = {{2, ATTR_DESC(dst_type, AnyTraits<GEType>())}};
ATTR_MAP(Cast) = {{"Truncate", ATTR_DESC(truncate, AnyTraits<bool>())}};
ATTR_MAP(Cast) = EMPTY_ATTR_MAP;
OUTPUT_MAP(Cast) = {{0, OUTPUT_DESC(y)}};

// Reciprocal
@@ -1153,7 +1153,7 @@ INPUT_MAP(SparseApplyAdagradD) = {
{1, INPUT_DESC(var)}, {2, INPUT_DESC(accum)}, {3, INPUT_DESC(grad)}, {4, INPUT_DESC(indices)}};
ATTR_MAP(SparseApplyAdagradD) = {{"lr", ATTR_DESC(lr, AnyTraits<float>())},
{"use_locking", ATTR_DESC(use_locking, AnyTraits<bool>())}};
OUTPUT_MAP(SparseApplyAdagradD) = {{0, OUTPUT_DESC(var)}};
OUTPUT_MAP(SparseApplyAdagradD) = {{0, OUTPUT_DESC(var)}, {1, OUTPUT_DESC(accum)}};

// SparseApplyFtrlD
INPUT_MAP(SparseApplyFtrlD) = {{1, INPUT_DESC(var)},


+ 7
- 5
mindspore/context.py View File

@@ -453,11 +453,13 @@ def reset_auto_parallel_context():
_reset_auto_parallel_context()


@args_type_check(mode=int, precompile_only=bool, device_target=str, device_id=int, save_graphs=bool,
save_graphs_path=str, save_ms_model=bool, save_ms_model_path=str, enable_dump=bool,
save_dump_path=str, enable_reduce_precision=bool, variable_memory_max_size=str,
enable_profiling=bool, profiling_options=str, enable_auto_mixed_precision=bool,
check_bprop=bool)
@args_type_check(mode=int, precompile_only=bool, device_target=str,
device_id=int, enable_ir_fusion=bool, save_graphs=bool,
enable_task_sink=bool, save_graphs_path=str, enable_loop_sink=bool,
enable_mem_reuse=bool, save_ms_model=bool, save_ms_model_path=str, enable_gpu_summary=bool,
enable_auto_mixed_precision=bool, enable_dump=bool, save_dump_path=str,
enable_reduce_precision=bool, enable_dynamic_memory=bool, graph_memory_max_size=str,
variable_memory_max_size=str, enable_profiling=bool, profiling_options=str)
def set_context(**kwargs):
"""
Sets context for running environment.


+ 0
- 1
mindspore/nn/optim/optimizer.py View File

@@ -292,7 +292,6 @@ class Optimizer(Cell):
current_dynamic_lr = self.gather(self.learning_rate[i], self.global_step, 0)
lr += (current_dynamic_lr,)
F.control_depend(lr, self.assignadd(self.global_step, 1))

else:
lr = self.learning_rate
if self.dynamic_lr:


+ 1
- 1
tests/ut/cpp/CMakeLists.txt View File

@@ -129,7 +129,7 @@ add_executable(ut_tests ${UT_SRCS} ${MINDSPORE_SRC_LIST} ${UT_SUTB_SRC_LIST})

if (ENABLE_GE)
if(ENABLE_TRAIN)
target_link_libraries(ut_tests PRIVATE graph ge_client_train)
target_link_libraries(ut_tests PRIVATE graph ge_runner)
else()
target_link_libraries(ut_tests PRIVATE graph ge_client)
endif()


+ 0
- 1
tests/ut/python/optimizer/test_optimize_with_parameter_groups.py View File

@@ -29,7 +29,6 @@ context.set_context(mode=context.GRAPH_MODE)

class LeNet5(nn.Cell):
""" LeNet5 definition """

def __init__(self):
super(LeNet5, self).__init__()
self.conv1 = nn.Conv2d(1, 6, 5, pad_mode='valid')


Loading…
Cancel
Save