Browse Source

!1321 fix codex and reviewbot

Merge pull request !1321 from rick_sanchez/master
tags/v0.3.0-alpha
mindspore-ci-bot Gitee 5 years ago
parent
commit
f0096d1624
7 changed files with 6 additions and 14 deletions
  1. +0
    -2
      mindspore/ccsrc/gvar/logging_level.cc
  2. +0
    -1
      mindspore/ccsrc/ir/anf_extends.cc
  3. +0
    -1
      mindspore/ccsrc/optimizer/irpass/arithmetic_simplify.h
  4. +0
    -3
      mindspore/ccsrc/optimizer/opt.cc
  5. +1
    -1
      mindspore/ccsrc/pipeline/action.cc
  6. +2
    -3
      mindspore/ccsrc/utils/contract.h
  7. +3
    -3
      mindspore/ccsrc/utils/tensorprint_utils.cc

+ 0
- 2
mindspore/ccsrc/gvar/logging_level.cc View File

@@ -17,8 +17,6 @@
#include "utils/log_adapter.h"

namespace mindspore {

// set default log level to WARNING for all sub modules
int g_ms_submodule_log_levels[NUM_SUBMODUES] = {WARNING};

} // namespace mindspore

+ 0
- 1
mindspore/ccsrc/ir/anf_extends.cc View File

@@ -99,5 +99,4 @@ std::string CNode::fullname_with_scope() {
void CNode::accept(AnfVisitor *v) { v->Visit(shared_from_base<CNode>()); }
void ValueNode::accept(AnfVisitor *v) { v->Visit(shared_from_base<ValueNode>()); }
void Parameter::accept(AnfVisitor *v) { v->Visit(shared_from_base<Parameter>()); }

} // namespace mindspore

+ 0
- 1
mindspore/ccsrc/optimizer/irpass/arithmetic_simplify.h View File

@@ -275,7 +275,6 @@ class AdjustAllReduceMulAdd : public AnfVisitor {
// If has dynamic loss scale.
auto &users_map = fg->manager()->node_users();
auto it = users_map.find(mul_cnode_);

if (it != users_map.end()) {
auto users = it->second;
for (auto &user_pair : users) {


+ 0
- 3
mindspore/ccsrc/optimizer/opt.cc View File

@@ -92,15 +92,12 @@ inline bool isTraversable(const AnfNodePtr &node) {
if (node == nullptr) {
return false;
}

if (node->isa<CNode>() || node->isa<Parameter>()) {
return true;
}

if (IsValueNode<FuncGraph>(node) || IsValueNode<RefKey>(node)) {
return true;
}

return false;
}



+ 1
- 1
mindspore/ccsrc/pipeline/action.cc View File

@@ -287,7 +287,7 @@ static bool IsCtrlSink() {
return false;
}

char *enable_ctrl_sink = std::getenv("ENABLE_CTRL_SINK");
const char *enable_ctrl_sink = std::getenv("ENABLE_CTRL_SINK");
if (enable_ctrl_sink == nullptr) {
return false;
}


+ 2
- 3
mindspore/ccsrc/utils/contract.h View File

@@ -63,12 +63,11 @@ class Ensures : public EnsuresAccess<T, R> {
Ensures(const Ensures<O, R> &other) : value_(other.get()) {}
~Ensures() = default;

T get() const { return value_; }
const T get() const { return value_; }
T &get() { return value_; }

operator T() const { return value_; }
operator const T() const { return value_; }

private:
T value_;
};



+ 3
- 3
mindspore/ccsrc/utils/tensorprint_utils.cc View File

@@ -86,7 +86,7 @@ bool PrintTensorToString(const char *str_data_ptr, mindspore::tensor::Tensor *co
}

template <typename T>
void PrintScalarToString(const char *str_data_ptr, const string &tensor_type, std::ostringstream *buf) {
void PrintScalarToString(const char *str_data_ptr, const string &tensor_type, std::ostringstream *const buf) {
MS_EXCEPTION_IF_NULL(str_data_ptr);
MS_EXCEPTION_IF_NULL(buf);
const T *data_ptr = reinterpret_cast<const T *>(str_data_ptr);
@@ -95,7 +95,7 @@ void PrintScalarToString(const char *str_data_ptr, const string &tensor_type, st
*buf << *data_ptr << "\n";
}

void PrintScalarToBoolString(const char *str_data_ptr, const string &tensor_type, std::ostringstream *buf) {
void PrintScalarToBoolString(const char *str_data_ptr, const string &tensor_type, std::ostringstream *const buf) {
MS_EXCEPTION_IF_NULL(str_data_ptr);
MS_EXCEPTION_IF_NULL(buf);
const bool *data_ptr = reinterpret_cast<const bool *>(str_data_ptr);
@@ -108,7 +108,7 @@ void PrintScalarToBoolString(const char *str_data_ptr, const string &tensor_type
}
}

void convertDataItem2Scalar(const char *str_data_ptr, const string &tensor_type, std::ostringstream *buf) {
void convertDataItem2Scalar(const char *str_data_ptr, const string &tensor_type, std::ostringstream *const buf) {
MS_EXCEPTION_IF_NULL(str_data_ptr);
MS_EXCEPTION_IF_NULL(buf);
auto type_iter = print_type_map.find(tensor_type);


Loading…
Cancel
Save