Browse Source

d

tags/v1.6.0
huchunmei 4 years ago
parent
commit
350cfaf37d
55 changed files with 395 additions and 0 deletions
  1. +5
    -0
      mindspore/core/ops/gather.h
  2. +5
    -0
      mindspore/core/ops/gather_d.h
  3. +4
    -0
      mindspore/core/ops/gather_nd.h
  4. +5
    -0
      mindspore/core/ops/gelu.h
  5. +4
    -0
      mindspore/core/ops/ger.h
  6. +5
    -0
      mindspore/core/ops/getnext.h
  7. +5
    -0
      mindspore/core/ops/greater.h
  8. +4
    -0
      mindspore/core/ops/greater_equal.h
  9. +4
    -0
      mindspore/core/ops/hshrink.h
  10. +3
    -0
      mindspore/core/ops/hsigmoid.h
  11. +5
    -0
      mindspore/core/ops/identity.h
  12. +4
    -0
      mindspore/core/ops/index_add.h
  13. +4
    -0
      mindspore/core/ops/invert_permutation.h
  14. +5
    -0
      mindspore/core/ops/is_finite.h
  15. +12
    -0
      mindspore/core/ops/l2_normalize.h
  16. +17
    -0
      mindspore/core/ops/layer_norm.h
  17. +8
    -0
      mindspore/core/ops/leaky_relu.h
  18. +4
    -0
      mindspore/core/ops/lerp.h
  19. +4
    -0
      mindspore/core/ops/less.h
  20. +5
    -0
      mindspore/core/ops/less_equal.h
  21. +4
    -0
      mindspore/core/ops/lin_space.h
  22. +4
    -0
      mindspore/core/ops/log.h
  23. +5
    -0
      mindspore/core/ops/log1p.h
  24. +8
    -0
      mindspore/core/ops/log_softmax.h
  25. +5
    -0
      mindspore/core/ops/logical_and.h
  26. +5
    -0
      mindspore/core/ops/logical_not.h
  27. +5
    -0
      mindspore/core/ops/logical_or.h
  28. +5
    -0
      mindspore/core/ops/logical_xor.h
  29. +24
    -0
      mindspore/core/ops/lrn.h
  30. +44
    -0
      mindspore/core/ops/lstm.h
  31. +4
    -0
      mindspore/core/ops/masked_fill.h
  32. +12
    -0
      mindspore/core/ops/mat_mul.h
  33. +28
    -0
      mindspore/core/ops/max_pool.h
  34. +5
    -0
      mindspore/core/ops/maximum.h
  35. +5
    -0
      mindspore/core/ops/minimum.h
  36. +5
    -0
      mindspore/core/ops/mod.h
  37. +4
    -0
      mindspore/core/ops/mul.h
  38. +5
    -0
      mindspore/core/ops/neg.h
  39. +5
    -0
      mindspore/core/ops/neighborexchange.h
  40. +5
    -0
      mindspore/core/ops/not_equal.h
  41. +8
    -0
      mindspore/core/ops/one_hot.h
  42. +4
    -0
      mindspore/core/ops/ones.h
  43. +5
    -0
      mindspore/core/ops/ones_like.h
  44. +9
    -0
      mindspore/core/ops/pack.h
  45. +8
    -0
      mindspore/core/ops/pad.h
  46. +5
    -0
      mindspore/core/ops/pow.h
  47. +5
    -0
      mindspore/core/ops/prelu.h
  48. +21
    -0
      mindspore/core/ops/range.h
  49. +4
    -0
      mindspore/core/ops/rank.h
  50. +5
    -0
      mindspore/core/ops/real_div.h
  51. +5
    -0
      mindspore/core/ops/reciprocal.h
  52. +4
    -0
      mindspore/core/ops/reduce_all.h
  53. +4
    -0
      mindspore/core/ops/reduce_any.h
  54. +5
    -0
      mindspore/core/ops/reduce_max.h
  55. +4
    -0
      mindspore/core/ops/reduce_mean.h

+ 5
- 0
mindspore/core/ops/gather.h View File

@@ -27,11 +27,16 @@
namespace mindspore {
namespace ops {
constexpr auto kNameGather = "Gather";
/// \brief Returns a slice of the input tensor based on the specified indices and axis.
/// Refer to Python API @ref mindspore.ops.Gather for more details.
class MS_CORE_API Gather : public PrimitiveC {
public:
/// \brief Constructor.
Gather() : PrimitiveC(kNameGather) { InitIOName({"param", "indices", "axis"}, {"output"}); }
/// \brief Destructor.
~Gather() = default;
MS_DECLARE_PARENT(Gather, PrimitiveC);
/// \brief Init. Refer to the parameters of Python API @ref mindspore.ops.Gather for the inputs.
void Init() {}
};
} // namespace ops


+ 5
- 0
mindspore/core/ops/gather_d.h View File

@@ -27,11 +27,16 @@

namespace mindspore {
namespace ops {
/// \brief Gathers values along an axis specified by dimension.
/// Refer to Python API @ref mindspore.ops.GatherD for more details.
class MS_CORE_API GatherD : public PrimitiveC {
public:
/// \brief Constructor.
GatherD() : PrimitiveC(prim::kPrimGatherD->name()) { InitIOName({"x", "dim", "index"}, {"output"}); }
/// \brief Destructor.
~GatherD() = default;
MS_DECLARE_PARENT(GatherD, PrimitiveC);
/// \brief Init. Refer to the parameters of Python API @ref mindspore.ops.GatherD for the inputs.
void Init() {}
};
} // namespace ops


+ 4
- 0
mindspore/core/ops/gather_nd.h View File

@@ -25,11 +25,15 @@
namespace mindspore {
namespace ops {
constexpr auto kNameGatherNd = "GatherNd";
/// \brief Gathers slices from a tensor by indices. Refer to Python API @ref mindspore.ops.GatherNd for more details.
class MS_CORE_API GatherNd : public PrimitiveC {
public:
/// \brief Constructor.
GatherNd() : PrimitiveC(kNameGatherNd) { InitIOName({"input_x", "indices"}, {"y"}); }
/// \brief Destructor.
~GatherNd() = default;
MS_DECLARE_PARENT(GatherNd, PrimitiveC);
/// \brief Init. Refer to the parameters of Python API @ref mindspore.ops.GatherNd for the inputs.
void Init() {}
};
AbstractBasePtr GatherNdInfer(const abstract::AnalysisEnginePtr &, const PrimitivePtr &primitive,


+ 5
- 0
mindspore/core/ops/gelu.h View File

@@ -25,11 +25,16 @@
namespace mindspore {
namespace ops {
constexpr auto kNameGeLU = prim::kGeLU;
/// \brief Gaussian Error Linear Units activation function.
/// Refer to Python API @ref mindspore.ops.GeLU for more details.
class MS_CORE_API GeLU : public PrimitiveC {
public:
/// \brief Constructor.
GeLU() : PrimitiveC(kNameGeLU) { InitIOName({"x"}, {"output"}); }
/// \brief Destructor.
~GeLU() = default;
MS_DECLARE_PARENT(GeLU, PrimitiveC);
/// \brief Init. Refer to the parameters of Python API @ref mindspore.ops.GeLU for the inputs.
void Init() {}
};



+ 4
- 0
mindspore/core/ops/ger.h View File

@@ -28,9 +28,13 @@
namespace mindspore {
namespace ops {
constexpr auto kNameGer = "Ger";
/// \brief Ger product of `x1` and `x2`. Calculate the outer product of two one-dimensional arrays.
/// Refer to Python API @ref mindspore.ops.Ger for more details.
class MS_CORE_API Ger : public PrimitiveC {
public:
/// \brief Constructor.
Ger() : PrimitiveC(kNameGer) { InitIOName({"x", "y"}, {"output"}); }
/// \brief Destructor.
~Ger() = default;
MS_DECLARE_PARENT(Ger, PrimitiveC);
};


+ 5
- 0
mindspore/core/ops/getnext.h View File

@@ -28,11 +28,16 @@
namespace mindspore {
namespace ops {
constexpr auto kNameGetNext = prim::kGetNext;
/// \brief Returns the next element in the dataset queue.
/// Refer to Python API @ref mindspore.ops.GetNext for more details.
class MS_CORE_API GetNext : public PrimitiveC {
public:
/// \brief Constructor.
GetNext() : PrimitiveC(prim::kPrimGetNext->name()) {}
/// \brief Destructor.
~GetNext() = default;
MS_DECLARE_PARENT(GetNext, PrimitiveC);
/// \brief Init. Refer to the parameters of Python API @ref mindspore.ops.GetNext for the inputs.
void Init() {}
};
AbstractBasePtr GetNextInfer(const abstract::AnalysisEnginePtr &, const PrimitivePtr &primitive,


+ 5
- 0
mindspore/core/ops/greater.h View File

@@ -27,11 +27,16 @@
namespace mindspore {
namespace ops {
constexpr auto kNameGreater = "Greater";
/// \brief Computes the boolean value of \f$x>y\f$ element-wise.
/// Refer to Python API @ref mindspore.ops.Greater for more details.
class MS_CORE_API Greater : public PrimitiveC {
public:
/// \brief Constructor.
Greater() : PrimitiveC(kNameGreater) { InitIOName({"x", "y"}, {"output"}); }
/// \brief Destructor.
~Greater() = default;
MS_DECLARE_PARENT(Greater, PrimitiveC);
/// \brief Init. Refer to the parameters of Python API @ref mindspore.ops.Greater for the inputs.
void Init() {}
};
} // namespace ops


+ 4
- 0
mindspore/core/ops/greater_equal.h View File

@@ -26,9 +26,13 @@
namespace mindspore {
namespace ops {
constexpr auto kNameGreaterEqual = "GreaterEqual";
/// \brief Computes the boolean value of \f$x>=y\f$ element-wise.
/// Refer to Python API @ref mindspore.ops.GreaterEqual for more details.
class MS_CORE_API GreaterEqual : public PrimitiveC {
public:
/// \brief Constructor.
GreaterEqual() : PrimitiveC(kNameGreaterEqual) {}
/// \brief Destructor.
~GreaterEqual() = default;
MS_DECLARE_PARENT(GreaterEqual, PrimitiveC);
};


+ 4
- 0
mindspore/core/ops/hshrink.h View File

@@ -26,9 +26,13 @@
namespace mindspore {
namespace ops {
constexpr auto kNameHShrink = "HShrink";
/// \brief Applies the hard shrinkage function element-wise.
/// Refer to Python API @ref mindspore.ops.HShrink for more details.
class MS_CORE_API HShrink : public PrimitiveC {
public:
/// \brief Constructor.
HShrink() : PrimitiveC(kNameHShrink) { InitIOName({"input_x"}, {"output"}); }
/// \brief Destructor.
~HShrink() = default;
MS_DECLARE_PARENT(HShrink, PrimitiveC);
};


+ 3
- 0
mindspore/core/ops/hsigmoid.h View File

@@ -26,9 +26,12 @@
namespace mindspore {
namespace ops {
constexpr auto kNameHSigmoid = "HSigmoid";
/// \brief Hard sigmoid activation function. Refer to Python API @ref mindspore.ops.HSigmoid for more details.
class MS_CORE_API HSigmoid : public PrimitiveC {
public:
/// \brief Constructor.
HSigmoid() : PrimitiveC(kNameHSigmoid) { InitIOName({"input_x"}, {"output"}); }
/// \brief Destructor.
~HSigmoid() = default;
MS_DECLARE_PARENT(HSigmoid, PrimitiveC); // come from ops/primitive_c.h
};


+ 5
- 0
mindspore/core/ops/identity.h View File

@@ -25,11 +25,16 @@
namespace mindspore {
namespace ops {
constexpr auto kNameIdentity = "Identity";
/// \brief Returns a Tensor with the same shape and contents as input.
/// Refer to Python API @ref mindspore.ops.Identity for more details.
class MS_CORE_API Identity : public PrimitiveC {
public:
/// \brief Constructor.
Identity() : PrimitiveC(kNameIdentity) {}
/// \brief Destructor.
~Identity() = default;
MS_DECLARE_PARENT(Identity, PrimitiveC);
/// \brief Init. Refer to the parameters of Python API @ref mindspore.ops.Identity for the inputs.
void Init() {}
};
} // namespace ops


+ 4
- 0
mindspore/core/ops/index_add.h View File

@@ -28,9 +28,13 @@
namespace mindspore {
namespace ops {
constexpr auto kNameIndexAdd = "IndexAdd";
/// \brief Adds tensor y to specified axis and indices of tensor x.
/// Refer to Python API @ref mindspore.ops.IndexAdd for more details.
class IndexAdd : public PrimitiveC {
public:
/// \brief Constructor.
IndexAdd() : PrimitiveC(kNameIndexAdd) { InitIOName({"input_x", "indices", "input_y"}, {"output"}); }
/// \brief Destructor.
~IndexAdd() = default;
MS_DECLARE_PARENT(IndexAdd, PrimitiveC);
};


+ 4
- 0
mindspore/core/ops/invert_permutation.h View File

@@ -25,9 +25,13 @@
namespace mindspore {
namespace ops {
constexpr auto kNameInvertPermutation = "InvertPermutation";
/// \brief Computes the inverse of an index permutation.
/// Refer to Python API @ref mindspore.ops.InvertPermutation for more details.
class MS_CORE_API InvertPermutation : public PrimitiveC {
public:
/// \brief Constructor.
InvertPermutation() : PrimitiveC(kNameInvertPermutation) {}
/// \brief Destructor.
~InvertPermutation() = default;
MS_DECLARE_PARENT(InvertPermutation, PrimitiveC);
};


+ 5
- 0
mindspore/core/ops/is_finite.h View File

@@ -24,11 +24,16 @@
namespace mindspore {
namespace ops {
constexpr auto kNameIsFinite = "IsFinite";
/// \brief Determines which elements are finite for each position.
/// Refer to Python API @ref mindspore.ops.IsFinite for more details.
class MS_CORE_API IsFinite : public PrimitiveC {
public:
/// \brief Constructor.
IsFinite() : PrimitiveC(kNameIsFinite) {}
/// \brief Destructor.
~IsFinite() = default;
MS_DECLARE_PARENT(IsFinite, PrimitiveC);
/// \brief Init. Refer to the parameters of Python API @ref mindspore.ops.IsFinite for the inputs.
void Init() {}
};
} // namespace ops


+ 12
- 0
mindspore/core/ops/l2_normalize.h View File

@@ -27,15 +27,27 @@
namespace mindspore {
namespace ops {
constexpr auto kNameL2Normalize = "L2Normalize";
/// \brief L2 Normalization Operator. Refer to Python API @ref mindspore.ops.L2Normalize for more details.
class MS_CORE_API L2Normalize : public PrimitiveC {
public:
/// \brief Constructor.
explicit L2Normalize(const std::string &name = kNameL2Normalize) : PrimitiveC(name) {}
/// \brief Destructor.
~L2Normalize() = default;
MS_DECLARE_PARENT(L2Normalize, PrimitiveC);
/// \brief Init. Refer to the parameters of Python API @ref mindspore.ops.L2Normalize for the inputs.
void Init(const std::vector<int64_t> &axis, const float epsilon = 1e-4);
/// \brief Set axis.
void set_axis(const std::vector<int64_t> &axis);
/// \brief Set epsilon.
void set_epsilon(const float epsilon);
/// \brief Get axis.
///
/// \return axis.
std::vector<int64_t> get_axis() const;
/// \brief Get epsilon.
///
/// \return epsilon.
float get_epsilon() const;
};
AbstractBasePtr L2NormalizeInfer(const abstract::AnalysisEnginePtr &, const PrimitivePtr &primitive,


+ 17
- 0
mindspore/core/ops/layer_norm.h View File

@@ -27,18 +27,35 @@
namespace mindspore {
namespace ops {
constexpr auto kNameLayerNorm = prim::kLayerNorm;
/// \brief Applies the Layer Normalization to the input tensor.
/// Refer to Python API @ref mindspore.ops.LayerNorm for more details.
class MS_CORE_API LayerNorm : public PrimitiveC {
public:
/// \brief Constructor.
LayerNorm() : PrimitiveC(kNameLayerNorm) {}
explicit LayerNorm(const std::string k_name) : PrimitiveC(k_name) {}
/// \brief Destructor.
~LayerNorm() = default;
MS_DECLARE_PARENT(LayerNorm, PrimitiveC);
/// \brief Init. Refer to the parameters of Python API @ref mindspore.ops.LayerNorm for the inputs.
void Init(const int64_t begin_norm_axis = 1, const int64_t begin_params_axis = 1, const float epsilon = 1e-7);
/// \brief Set begin_norm_axis.
void set_begin_norm_axis(const int64_t begin_norm_axis);
/// \brief Set begin_params_axis.
void set_begin_params_axis(const int64_t begin_params_axis);
/// \brief Set epsilon.
void set_epsilon(const float epsilon);
/// \brief Get begin_norm_axis.
///
/// \return begin_norm_axis.
int64_t get_begin_norm_axis() const;
/// \brief Get begin_params_axis.
///
/// \return begin_params_axis.
int64_t get_begin_params_axis() const;
/// \brief Get epsilon.
///
/// \return epsilon.
float get_epsilon() const;
};



+ 8
- 0
mindspore/core/ops/leaky_relu.h View File

@@ -28,13 +28,21 @@
namespace mindspore {
namespace ops {
constexpr auto kNameLeakyRelu = "LeakyRelu";
/// \brief Leaky ReLU activation function. Refer to Python API @ref mindspore.nn.LeakyReLU for more details.
class MS_CORE_API LeakyRelu : public PrimitiveC {
public:
/// \brief Constructor.
LeakyRelu() : PrimitiveC(kNameLeakyRelu) {}
/// \brief Destructor.
~LeakyRelu() = default;
MS_DECLARE_PARENT(LeakyRelu, PrimitiveC);
/// \brief Init. Refer to the parameters of Python API @ref mindspore.nn.LeakyReLU for the inputs.
void Init(const float negative_slope);
/// \brief Set negative_slope.
void set_negative_slope(const float negative_slope);
/// \brief Get negative_slope.
///
/// \return negative_slope.
float get_negative_slope() const;
};



+ 4
- 0
mindspore/core/ops/lerp.h View File

@@ -27,9 +27,13 @@
namespace mindspore {
namespace ops {
constexpr auto kNameLerp = "Lerp";
/// \brief Does a linear interpolation of two tensors start and end based on a float or tensor weight.
/// Refer to Python API @ref mindspore.ops.Lerp for more details.
class Lerp : public PrimitiveC {
public:
/// \brief Constructor.
Lerp() : PrimitiveC(kNameLerp) { InitIOName({"start", "end", "weight"}, {"output"}); }
/// \brief Destructor.
~Lerp() = default;
MS_DECLARE_PARENT(Lerp, PrimitiveC);
};


+ 4
- 0
mindspore/core/ops/less.h View File

@@ -27,9 +27,13 @@
namespace mindspore {
namespace ops {
constexpr auto kNameLess = "Less";
/// \brief Computes the boolean value of \f$x<y\f$ element-wise.
/// Refer to Python API @ref mindspore.ops.Less for more details.
class MS_CORE_API Less : public PrimitiveC {
public:
/// \brief Constructor.
Less() : PrimitiveC(kNameLess) { InitIOName({"x", "y"}, {"output"}); }
/// \brief Destructor.
~Less() = default;
MS_DECLARE_PARENT(Less, PrimitiveC);
};


+ 5
- 0
mindspore/core/ops/less_equal.h View File

@@ -26,11 +26,16 @@
namespace mindspore {
namespace ops {
constexpr auto kNameLessEqual = "LessEqual";
/// \brief Computes the boolean value of \f$x<=y\f$ element-wise.
/// Refer to Python API @ref mindspore.ops.LessEqual for more details.
class MS_CORE_API LessEqual : public PrimitiveC {
public:
/// \brief Constructor.
LessEqual() : PrimitiveC(kNameLessEqual) { InitIOName({"x", "y"}, {"output"}); }
/// \brief Destructor.
~LessEqual() = default;
MS_DECLARE_PARENT(LessEqual, PrimitiveC);
/// \brief Init. Refer to the parameters of Python API @ref mindspore.ops.LessEqual for the inputs.
void Init() {}
};



+ 4
- 0
mindspore/core/ops/lin_space.h View File

@@ -26,9 +26,13 @@
namespace mindspore {
namespace ops {
constexpr auto kNameLinSpace = "LinSpace";
/// \brief Returns a Tensor whose value is evenly spaced in the interval start and stop (including start and stop).
/// Refer to Python API @ref mindspore.ops.LinSpace for more details.
class MS_CORE_API LinSpace : public PrimitiveC {
public:
/// \brief Constructor.
LinSpace() : PrimitiveC(kNameLinSpace) { InitIOName({"start", "stop", "num"}, {"output"}); }
/// \brief Destructor.
~LinSpace() = default;
MS_DECLARE_PARENT(LinSpace, PrimitiveC);
};


+ 4
- 0
mindspore/core/ops/log.h View File

@@ -26,9 +26,13 @@
namespace mindspore {
namespace ops {
constexpr auto kNameLog = prim::kLog;
/// \brief Returns the natural logarithm of a tensor element-wise.
/// Refer to Python API @ref mindspore.ops.Log for more details.
class MS_CORE_API Log : public PrimitiveC {
public:
/// \brief Constructor.
Log() : PrimitiveC(prim::kPrimLog->name()) { InitIOName({"x"}, {"y"}); }
/// \brief Destructor.
~Log() = default;
MS_DECLARE_PARENT(Log, PrimitiveC);
};


+ 5
- 0
mindspore/core/ops/log1p.h View File

@@ -27,11 +27,16 @@

namespace mindspore {
namespace ops {
/// \brief Returns the natural logarithm of one plus the input tensor element-wise.
/// Refer to Python API @ref mindspore.ops.Log1p for more details.
class MS_CORE_API Log1p : public PrimitiveC {
public:
/// \brief Constructor.
Log1p() : PrimitiveC(prim::kPrimLog1p->name()) { InitIOName({"x"}, {"y"}); }
/// \brief Destructor.
~Log1p() = default;
MS_DECLARE_PARENT(Log1p, PrimitiveC);
/// \brief Init. Refer to the parameters of Python API @ref mindspore.ops.Log1p for the inputs.
void Init() {}
};
} // namespace ops


+ 8
- 0
mindspore/core/ops/log_softmax.h View File

@@ -28,13 +28,21 @@
namespace mindspore {
namespace ops {
constexpr auto kNameLogSoftmax = "LogSoftmax";
/// \brief Log Softmax activation function. Refer to Python API @ref mindspore.ops.LogSoftmax for more details.
class MS_CORE_API LogSoftmax : public PrimitiveC {
public:
/// \brief Constructor.
LogSoftmax() : PrimitiveC(kNameLogSoftmax) { InitIOName({"x"}, {"output"}); }
/// \brief Destructor.
~LogSoftmax() = default;
MS_DECLARE_PARENT(LogSoftmax, PrimitiveC);
/// \brief Init. Refer to the parameters of Python API @ref mindspore.ops.LogSoftmax for the inputs.
void Init(const int64_t axis = -1);
/// \brief Set axis.
void set_axis(const int64_t axis);
/// \brief Get axis.
///
/// \return axis.
int64_t get_axis() const;
};
} // namespace ops


+ 5
- 0
mindspore/core/ops/logical_and.h View File

@@ -27,11 +27,16 @@
namespace mindspore {
namespace ops {
constexpr auto kNameLogicalAnd = "LogicalAnd";
/// \brief Computes the “logical AND” of two tensors element-wise.
/// Refer to Python API @ref mindspore.ops.LogicalAnd for more details.
class MS_CORE_API LogicalAnd : public PrimitiveC {
public:
/// \brief Constructor.
LogicalAnd() : PrimitiveC(kNameLogicalAnd) { InitIOName({"x", "y"}, {"output"}); }
/// \brief Destructor.
~LogicalAnd() = default;
MS_DECLARE_PARENT(LogicalAnd, PrimitiveC);
/// \brief Init. Refer to the parameters of Python API @ref mindspore.ops.LogicalAnd for the inputs.
void Init() {}
};
AbstractBasePtr LogicalAndInfer(const abstract::AnalysisEnginePtr &, const PrimitivePtr &primitive,


+ 5
- 0
mindspore/core/ops/logical_not.h View File

@@ -25,11 +25,16 @@
namespace mindspore {
namespace ops {
constexpr auto kNameLogicalNot = "LogicalNot";
/// \brief Computes the “logical NOT” of a tensor element-wise.
/// Refer to Python API @ref mindspore.ops.LogicalNot for more details.
class MS_CORE_API LogicalNot : public PrimitiveC {
public:
/// \brief Constructor.
LogicalNot() : PrimitiveC(kNameLogicalNot) { InitIOName({"x"}, {"output"}); }
/// \brief Destructor.
~LogicalNot() = default;
MS_DECLARE_PARENT(LogicalNot, PrimitiveC);
/// \brief Init. Refer to the parameters of Python API @ref mindspore.ops.LogicalNot for the inputs.
void Init() {}
};



+ 5
- 0
mindspore/core/ops/logical_or.h View File

@@ -25,11 +25,16 @@
namespace mindspore {
namespace ops {
constexpr auto kNameLogicalOr = "LogicalOr";
/// \brief Computes the “logical OR” of two tensors element-wise.
/// Refer to Python API @ref mindspore.ops.LogicalOr for more details.
class MS_CORE_API LogicalOr : public PrimitiveC {
public:
/// \brief Constructor.
LogicalOr() : PrimitiveC(kNameLogicalOr) { InitIOName({"x", "y"}, {"output"}); }
/// \brief Destructor.
~LogicalOr() = default;
MS_DECLARE_PARENT(LogicalOr, PrimitiveC);
/// \brief Init. Refer to the parameters of Python API @ref mindspore.ops.LogicalOr for the inputs.
void Init() {}
};
AbstractBasePtr LogicalOrInfer(const abstract::AnalysisEnginePtr &, const PrimitivePtr &primitive,


+ 5
- 0
mindspore/core/ops/logical_xor.h View File

@@ -23,11 +23,16 @@
namespace mindspore {
namespace ops {
constexpr auto kNameLogicalXor = "LogicalXor";
/// \brief Computes the truth value of x1 XOR x2, element-wise.
/// Refer to Python API @ref mindspore.numpy.logical_xor for more details.
class MS_CORE_API LogicalXor : public PrimitiveC {
public:
/// \brief Constructor.
LogicalXor() : PrimitiveC(kNameLogicalXor) {}
/// \brief Destructor.
~LogicalXor() = default;
MS_DECLARE_PARENT(LogicalXor, PrimitiveC);
/// \brief Init. Refer to the parameters of Python API @ref mindspore.numpy.logical_xor for the inputs.
void Init() {}
};
} // namespace ops


+ 24
- 0
mindspore/core/ops/lrn.h View File

@@ -27,22 +27,46 @@
namespace mindspore {
namespace ops {
constexpr auto kNameLRN = "LRN";
/// \brief Local Response Normalization. Refer to Python API @ref mindspore.ops.LRN for more details.
class MS_CORE_API LRN : public PrimitiveC {
public:
/// \brief Constructor.
LRN() : PrimitiveC(kNameLRN) { InitIOName({"x"}, {"y"}); }
/// \brief Destructor.
~LRN() = default;
MS_DECLARE_PARENT(LRN, PrimitiveC);
/// \brief Init. Refer to the parameters of Python API @ref mindspore.ops.LRN for the inputs.
void Init(const int64_t depth_radius = 5, const float bias = 1.0, const float alpha = 1.0, const float beta = 0.5,
const std::string &norm_region = "ACROSS_CHANNELS");
/// \brief Set depth_radius.
void set_depth_radius(const int64_t depth_radius);
/// \brief Set bias.
void set_bias(const float bias);
/// \brief Set alpha.
void set_alpha(const float alpha);
/// \brief Set beta.
void set_beta(const float beta);
/// \brief Set norm_region.
void set_norm_region(const std::string &norm_region);
/// \brief Get depth_radius.
///
/// \return depth_radius.
int64_t get_depth_radius() const;
/// \brief Get bias.
///
/// \return bias.
float get_bias() const;
/// \brief Get alpha.
///
/// \return alpha.
float get_alpha() const;
/// \brief Get beta.
///
/// \return beta.
float get_beta() const;
/// \brief Get norm_region.
///
/// \return norm_region.
std::string get_norm_region() const;
};
AbstractBasePtr LrnInfer(const abstract::AnalysisEnginePtr &, const PrimitivePtr &primitive,


+ 44
- 0
mindspore/core/ops/lstm.h View File

@@ -31,32 +31,76 @@
namespace mindspore {
namespace ops {
constexpr auto kNameLSTM = "LSTM";
/// \brief Performs the Long Short-Term Memory (LSTM) on the input.
/// Refer to Python API @ref mindspore.ops.LSTM for more details.
class MS_CORE_API LSTM : public PrimitiveC {
public:
/// \brief Constructor.
LSTM() : PrimitiveC(kNameLSTM) {}
/// \brief Destructor.
~LSTM() = default;
MS_DECLARE_PARENT(LSTM, PrimitiveC);
/// \brief Init. Refer to the parameters of Python API @ref mindspore.ops.LSTM for the inputs.
void Init(const int64_t input_size, const int64_t hidden_size, const int64_t num_layers, const bool has_bias,
const float dropout, const bool bidirectional = false, const float zoneout_cell = 0.0f,
const float zoneout_hidden = 0.0f);
/// \brief Set input_size.
void set_input_size(const int64_t input_size);
/// \brief Get input_size.
///
/// \return input_size.
int64_t get_input_size() const;
/// \brief Set hidden_size.
void set_hidden_size(const int64_t hidden_size);
/// \brief Get hidden_size.
///
/// \return hidden_size.
int64_t get_hidden_size() const;
/// \brief Set num_layers.
void set_num_layers(const int64_t num_layers);
/// \brief Get num_layers.
///
/// \return num_layers.
int64_t get_num_layers() const;
/// \brief Set has_bias.
void set_has_bias(const bool has_bias);
/// \brief Get has_bias.
///
/// \return has_bias.
bool get_has_bias() const;
/// \brief Set dropout.
void set_dropout(const float dropout);
/// \brief Get dropout.
///
/// \return dropout.
float get_dropout() const;
/// \brief Set bidirectional.
void set_bidirectional(const bool bidirectional);
/// \brief Get bidirectional.
///
/// \return bidirectional.
bool get_bidirectional() const;
/// \brief Set num_directions.
void set_num_directions(const int64_t num_directions);
/// \brief Get num_directions.
///
/// \return num_directions.
int64_t get_num_directions() const;
/// \brief Set zoneout_cell.
void set_zoneout_cell(float zoneout_cell);
/// \brief Get zoneout_cell.
///
/// \return zoneout_cell.
float get_zoneout_cell() const;
/// \brief Set zoneout_hidden.
void set_zoneout_hidden(float zoneout_hidden);
/// \brief Get zoneout_hidden.
///
/// \return zoneout_hidden.
float get_zoneout_hidden() const;
/// \brief Get good_ld.
///
/// \return good_ld.
int64_t get_good_ld(const int64_t dim, const int64_t type_size);
};
AbstractBasePtr LstmInfer(const abstract::AnalysisEnginePtr &, const PrimitivePtr &primitive,


+ 4
- 0
mindspore/core/ops/masked_fill.h View File

@@ -27,9 +27,13 @@
namespace mindspore {
namespace ops {
constexpr auto kNameMaskedFill = "MaskedFill";
/// \brief Fills elements of self tensor with value where mask is True.
/// Refer to Python API @ref mindspore.ops.MaskedFill for more details.
class MaskedFill : public PrimitiveC {
public:
/// \brief Constructor.
MaskedFill() : PrimitiveC(kNameMaskedFill) { InitIOName({"input", "mask", "value"}, {"output"}); }
/// \brief Destructor.
~MaskedFill() = default;
MS_DECLARE_PARENT(MaskedFill, PrimitiveC);
};


+ 12
- 0
mindspore/core/ops/mat_mul.h View File

@@ -28,15 +28,27 @@
namespace mindspore {
namespace ops {
constexpr auto kNameMatMul = "MatMul";
/// \brief Multiplies matrix a and matrix b. Refer to Python API @ref mindspore.ops.MatMul for more details.
class MS_CORE_API MatMul : public PrimitiveC {
public:
/// \brief Constructor.
MatMul() : PrimitiveC(kNameMatMul) { InitIOName({"x1", "x2"}, {"output"}); }
/// \brief Destructor.
~MatMul() = default;
MS_DECLARE_PARENT(MatMul, PrimitiveC);
/// \brief Init. Refer to the parameters of Python API @ref mindspore.ops.MatMul for the inputs.
void Init(bool transpose_a = false, bool transpose_b = false);
/// \brief Set transpose_a.
void set_transpose_a(bool transpose_a);
/// \brief Set transpose_b.
void set_transpose_b(bool transpose_b);
/// \brief Get transpose_a.
///
/// \return transpose_a.
bool get_transpose_a() const;
/// \brief Get transpose_b.
///
/// \return transpose_b.
bool get_transpose_b() const;
};
} // namespace ops


+ 28
- 0
mindspore/core/ops/max_pool.h View File

@@ -28,27 +28,55 @@
namespace mindspore {
namespace ops {
constexpr auto kNameMaxPool = "MaxPool";
/// \brief Max pooling operation. Refer to Python API @ref mindspore.ops.MaxPool for more details.
class MS_CORE_API MaxPool : public PrimitiveC {
public:
/// \brief Constructor.
MaxPool() : PrimitiveC(kNameMaxPool) { InitIOName({"x"}, {"output"}); }
explicit MaxPool(const std::string k_name) : PrimitiveC(k_name) { InitIOName({"x"}, {"output"}); }
/// \brief Destructor.
~MaxPool() = default;
MS_DECLARE_PARENT(MaxPool, PrimitiveC);
/// \brief Init. Refer to the parameters of Python API @ref mindspore.ops.MaxPool for the inputs.
void Init(const std::vector<int64_t> &kernel_size = {1}, const std::vector<int64_t> &stride = {1},
const PadMode &pad_mode = VALID, const Format &format = NCHW,
const std::vector<int64_t> &pad = {0, 0, 0, 0}, const RoundMode &round_mode = FLOOR);
/// \brief Set pad_mode.
void set_pad_mode(const PadMode &pad_mode);
/// \brief Set kernel_size.
void set_kernel_size(const std::vector<int64_t> &kernel_size);
/// \brief Set strides.
void set_strides(const std::vector<int64_t> &strides);
/// \brief Set format.
void set_format(const Format &format);
/// \brief Set pad.
void set_pad(const std::vector<int64_t> &pad);
/// \brief Set round_mode.
void set_round_mode(const RoundMode &round_mode);

/// \brief Get kernel_size.
///
/// \return kernel_size.
std::vector<int64_t> get_kernel_size() const;
/// \brief Get strides.
///
/// \return strides.
std::vector<int64_t> get_strides() const;
/// \brief Get pad_mode.
///
/// \return pad_mode.
PadMode get_pad_mode() const;
/// \brief Get format.
///
/// \return format.
Format get_format() const;
/// \brief Get pad.
///
/// \return pad.
std::vector<int64_t> get_pad() const;
/// \brief Get round_mode.
///
/// \return round_mode.
RoundMode get_round_mode() const;
};



+ 5
- 0
mindspore/core/ops/maximum.h View File

@@ -25,11 +25,16 @@
namespace mindspore {
namespace ops {
constexpr auto kNameMaximum = "Maximum";
/// \brief Computes the maximum of input tensors element-wise.
/// Refer to Python API @ref mindspore.ops.Maximum for more details.
class MS_CORE_API Maximum : public PrimitiveC {
public:
/// \brief Constructor.
Maximum() : PrimitiveC(kNameMaximum) { InitIOName({"x", "y"}, {"output"}); }
/// \brief Destructor.
~Maximum() = default;
MS_DECLARE_PARENT(Maximum, PrimitiveC);
/// \brief Init. Refer to the parameters of Python API @ref mindspore.ops.Maximum for the inputs.
void Init() {}
};
AbstractBasePtr MaximumInfer(const abstract::AnalysisEnginePtr &, const PrimitivePtr &primitive,


+ 5
- 0
mindspore/core/ops/minimum.h View File

@@ -27,11 +27,16 @@
namespace mindspore {
namespace ops {
constexpr auto kNameMinimum = "Minimum";
/// \brief Computes the minimum of input tensors element-wise.
/// Refer to Python API @ref mindspore.ops.Minimum for more details.
class MS_CORE_API Minimum : public PrimitiveC {
public:
/// \brief Constructor.
Minimum() : PrimitiveC(kNameMinimum) { InitIOName({"x", "y"}, {"output"}); }
/// \brief Destructor.
~Minimum() = default;
MS_DECLARE_PARENT(Minimum, PrimitiveC);
/// \brief Init. Refer to the parameters of Python API @ref mindspore.ops.Minimum for the inputs.
void Init() {}
};



+ 5
- 0
mindspore/core/ops/mod.h View File

@@ -23,11 +23,16 @@
namespace mindspore {
namespace ops {
constexpr auto kNameMod = "Mod";
/// \brief Computes the remainder of dividing the first input tensor by the second input tensor element-wise.
/// Refer to Python API @ref mindspore.ops.Mod for more details.
class MS_CORE_API Mod : public PrimitiveC {
public:
/// \brief Constructor.
Mod() : PrimitiveC(kNameMod) { InitIOName({"x", "y"}, {"output"}); }
/// \brief Destructor.
~Mod() = default;
MS_DECLARE_PARENT(Mod, PrimitiveC);
/// \brief Init. Refer to the parameters of Python API @ref mindspore.ops.Mod for the inputs.
void Init() {}
};
} // namespace ops


+ 4
- 0
mindspore/core/ops/mul.h View File

@@ -27,12 +27,16 @@
namespace mindspore {
namespace ops {
constexpr auto kNameMul = prim::kMul;
/// \brief Multiply two tensors element-wise. Refer to Python API @ref mindspore.ops.Mul for more details.
class MS_CORE_API Mul : public PrimitiveC {
public:
/// \brief Constructor.
Mul() : PrimitiveC(kNameMul) { InitIOName({"x", "y"}, {"output"}); }
explicit Mul(const std::string k_name) : PrimitiveC(k_name) { InitIOName({"x", "y"}, {"output"}); }
/// \brief Destructor.
~Mul() = default;
MS_DECLARE_PARENT(Mul, PrimitiveC);
/// \brief Init. Refer to the parameters of Python API @ref mindspore.ops.Mul for the inputs.
void Init() {}
};
AbstractBasePtr MulInfer(const abstract::AnalysisEnginePtr &, const PrimitivePtr &primitive,


+ 5
- 0
mindspore/core/ops/neg.h View File

@@ -25,11 +25,16 @@
namespace mindspore {
namespace ops {
constexpr auto kNameNeg = prim::kNeg;
/// \brief Returns a tensor with negative values of the input tensor element-wise.
/// Refer to Python API @ref mindspore.ops.Neg for more details.
class MS_CORE_API Neg : public PrimitiveC {
public:
/// \brief Constructor.
Neg() : PrimitiveC(prim::kPrimNeg->name()) { InitIOName({"x"}, {"y"}); }
/// \brief Destructor.
~Neg() = default;
MS_DECLARE_PARENT(Neg, PrimitiveC);
/// \brief Init. Refer to the parameters of Python API @ref mindspore.ops.Neg for the inputs.
void Init() {}
};



+ 5
- 0
mindspore/core/ops/neighborexchange.h View File

@@ -25,11 +25,16 @@
namespace mindspore {
namespace ops {
constexpr auto kNameNeighborExchange = "NeighborExchange";
/// \brief NeighborExchange sends data from the local rank to ranks in the send_rank_ids.
/// Refer to Python API @ref mindspore.ops.NeighborExchange for more details.
class MS_CORE_API NeighborExchange : public PrimitiveC {
public:
/// \brief Constructor.
NeighborExchange() : PrimitiveC(kNameNeighborExchange) {}
/// \brief Destructor.
~NeighborExchange() = default;
MS_DECLARE_PARENT(NeighborExchange, PrimitiveC);
/// \brief Init. Refer to the parameters of Python API @ref mindspore.ops.NeighborExchange for the inputs.
void Init() {}
};
AbstractBasePtr NeighborExchangeInfer(const abstract::AnalysisEnginePtr &, const PrimitivePtr &primitive,


+ 5
- 0
mindspore/core/ops/not_equal.h View File

@@ -26,11 +26,16 @@
namespace mindspore {
namespace ops {
constexpr auto kNameNotEqual = prim::kNotEqual;
/// \brief Computes the non-equivalence of two tensors element-wise.
/// Refer to Python API @ref mindspore.ops.NotEqual for more details.
class MS_CORE_API NotEqual : public PrimitiveC {
public:
/// \brief Constructor.
NotEqual() : PrimitiveC(prim::kPrimNotEqual->name()) { InitIOName({"x", "y"}, {"output"}); }
/// \brief Destructor.
~NotEqual() = default;
MS_DECLARE_PARENT(NotEqual, PrimitiveC);
/// \brief Init. Refer to the parameters of Python API @ref mindspore.ops.NotEqual for the inputs.
void Init() {}
};



+ 8
- 0
mindspore/core/ops/one_hot.h View File

@@ -25,15 +25,23 @@

namespace mindspore {
namespace ops {
/// \brief Computes a one-hot tensor. Refer to Python API @ref mindspore.ops.OneHot for more details.
class MS_CORE_API OneHot : public PrimitiveC {
public:
/// \brief Constructor.
OneHot() : PrimitiveC(prim::kPrimOneHot->name()) {
InitIOName({"indices", "depth", "on_value", "off_value"}, {"output"});
}
/// \brief Destructor.
~OneHot() = default;
MS_DECLARE_PARENT(OneHot, PrimitiveC);
/// \brief Init. Refer to the parameters of Python API @ref mindspore.ops.OneHot for the inputs.
void Init(const int64_t axis);
/// \brief Set axis.
void set_axis(const int64_t axis);
/// \brief Get axis.
///
/// \return axis.
int64_t get_axis() const;
};
} // namespace ops


+ 4
- 0
mindspore/core/ops/ones.h View File

@@ -27,11 +27,15 @@

namespace mindspore {
namespace ops {
/// \brief Creates a tensor filled with value ones. Refer to Python API @ref mindspore.ops.Ones for more details.
class Ones : public PrimitiveC {
public:
/// \brief Constructor.
Ones() : PrimitiveC(prim::kPrimOnes->name()) {}
/// \brief Destructor.
~Ones() = default;
MS_DECLARE_PARENT(Ones, PrimitiveC);
/// \brief Init. Refer to the parameters of Python API @ref mindspore.ops.Ones for the inputs.
void Init() {}
};
} // namespace ops


+ 5
- 0
mindspore/core/ops/ones_like.h View File

@@ -24,11 +24,16 @@

namespace mindspore {
namespace ops {
/// \brief Creates a new tensor. The values of all elements are 1.
/// Refer to Python API @ref mindspore.ops.OnesLike for more details.
class MS_CORE_API OnesLike : public PrimitiveC {
public:
/// \brief Constructor.
OnesLike() : PrimitiveC(prim::kPrimOnesLike->name()) {}
/// \brief Destructor.
~OnesLike() = default;
MS_DECLARE_PARENT(OnesLike, PrimitiveC);
/// \brief Init. Refer to the parameters of Python API @ref mindspore.ops.OnesLike for the inputs.
void Init() {}
};
AbstractBasePtr OnesLikeInfer(const abstract::AnalysisEnginePtr &, const PrimitivePtr &primitive,


+ 9
- 0
mindspore/core/ops/pack.h View File

@@ -31,13 +31,22 @@
namespace mindspore {
namespace ops {
constexpr auto kNamePack = "Pack";
/// \brief Stacks a list of tensors in specified axis.
/// Refer to Python API @ref mindspore.ops.Stack for more details.
class MS_CORE_API Pack : public PrimitiveC {
public:
/// \brief Constructor.
Pack() : PrimitiveC(kNamePack) {}
/// \brief Destructor.
~Pack() = default;
MS_DECLARE_PARENT(Pack, PrimitiveC);
/// \brief Init. Refer to the parameters of Python API @ref mindspore.ops.Stack for the inputs.
void Init(const int64_t &axis = 0);
/// \brief Set axis.
void set_axis(const int64_t &axis);
/// \brief Get axis.
///
/// \return axis.
int64_t get_axis() const;
};
AbstractBasePtr PackInfer(const abstract::AnalysisEnginePtr &, const PrimitivePtr &primitive,


+ 8
- 0
mindspore/core/ops/pad.h View File

@@ -27,14 +27,22 @@
namespace mindspore {
namespace ops {
constexpr auto kNamePad = "Pad";
/// \brief Pads the input tensor according to the paddings. Refer to Python API @ref mindspore.ops.Pad for more details.
class MS_CORE_API Pad : public PrimitiveC {
public:
/// \brief Constructor.
Pad() : PrimitiveC(kNamePad) { InitIOName({"x"}, {"y"}); }
explicit Pad(const std::string k_name) : PrimitiveC(k_name) { InitIOName({"x"}, {"y"}); }
/// \brief Destructor.
~Pad() = default;
MS_DECLARE_PARENT(Pad, PrimitiveC);
/// \brief Init. Refer to the parameters of Python API @ref mindspore.ops.Pad for the inputs.
void Init(const std::vector<std::vector<int64_t>> &paddings);
/// \brief Set paddings.
void set_paddings(const std::vector<std::vector<int64_t>> &paddings);
/// \brief Get paddings.
///
/// \return paddings.
std::vector<std::vector<int64_t>> get_paddings() const;
};
AbstractBasePtr PadInfer(const abstract::AnalysisEnginePtr &, const PrimitivePtr &primitive,


+ 5
- 0
mindspore/core/ops/pow.h View File

@@ -28,11 +28,16 @@
namespace mindspore {
namespace ops {
constexpr auto kNamePow = "Pow";
/// \brief Computes a tensor to the power of the second input.
/// Refer to Python API @ref mindspore.ops.Pow for more details.
class MS_CORE_API Pow : public PrimitiveC {
public:
/// \brief Constructor.
explicit Pow(const std::string &k_name = kNamePow) : PrimitiveC(k_name) { InitIOName({"x", "y"}, {"output"}); }
/// \brief Destructor.
~Pow() = default;
MS_DECLARE_PARENT(Pow, PrimitiveC);
/// \brief Init. Refer to the parameters of Python API @ref mindspore.ops.Pow for the inputs.
void Init();
};
AbstractBasePtr PowInfer(const abstract::AnalysisEnginePtr &, const PrimitivePtr &primitive,


+ 5
- 0
mindspore/core/ops/prelu.h View File

@@ -26,12 +26,17 @@
namespace mindspore {
namespace ops {
constexpr auto kNamePReLU = "PReLU";
/// \brief Parametric Rectified Linear Unit activation function.
/// Refer to Python API @ref mindspore.ops.PReLU for more details.
class MS_CORE_API PReLU : public PrimitiveC {
public:
/// \brief Constructor.
PReLU() : PrimitiveC(kNamePReLU) { InitIOName({"x"}, {"y"}); }
explicit PReLU(const std::string k_name) : PrimitiveC(k_name) { InitIOName({"x"}, {"y"}); }
/// \brief Destructor.
~PReLU() = default;
MS_DECLARE_PARENT(PReLU, PrimitiveC);
/// \brief Init. Refer to the parameters of Python API @ref mindspore.ops.PReLU for the inputs.
void Init() {}
};



+ 21
- 0
mindspore/core/ops/range.h View File

@@ -27,19 +27,40 @@
namespace mindspore {
namespace ops {
constexpr auto kNameRange = "Range";
/// \brief Creates a sequence of numbers in range [start, limit) with step size delta.
/// Refer to Python API @ref mindspore.nn.Range for more details.
class MS_CORE_API Range : public PrimitiveC {
public:
/// \brief Constructor.
Range() : PrimitiveC(kNameRange) {}
/// \brief Destructor.
~Range() = default;
MS_DECLARE_PARENT(Range, PrimitiveC);
/// \brief Init. Refer to the parameters of Python API @ref mindspore.nn.Range for the inputs.
void Init(const int64_t d_type, const int64_t start, const int64_t limit, const int64_t delta);
/// \brief Set d_type.
void set_d_type(const int64_t d_type);
/// \brief Set start.
void set_start(const int64_t start);
/// \brief Set limit.
void set_limit(const int64_t limit);
/// \brief Set delta.
void set_delta(const int64_t delta);
/// \brief Get d_type.
///
/// \return d_type.
int64_t get_d_type() const;
/// \brief Get start.
///
/// \return start.
int64_t get_start() const;
/// \brief Get limit.
///
/// \return limit.
int64_t get_limit() const;
/// \brief Get delta.
///
/// \return delta.
int64_t get_delta() const;
};



+ 4
- 0
mindspore/core/ops/rank.h View File

@@ -27,11 +27,15 @@
namespace mindspore {
namespace ops {
constexpr auto kNameRank = "Rank";
/// \brief Returns the rank of a tensor. Refer to Python API @ref mindspore.ops.Rank for more details.
class MS_CORE_API Rank : public PrimitiveC {
public:
/// \brief Constructor.
Rank() : PrimitiveC(kNameRank) { auto prim_name = name(); }
/// \brief Destructor.
~Rank() = default;
MS_DECLARE_PARENT(Rank, PrimitiveC);
/// \brief Init. Refer to the parameters of Python API @ref mindspore.ops.Rank for the inputs.
void Init() {}
};
AbstractBasePtr RankInfer(const abstract::AnalysisEnginePtr &, const PrimitivePtr &primitive,


+ 5
- 0
mindspore/core/ops/real_div.h View File

@@ -26,11 +26,16 @@
namespace mindspore {
namespace ops {
constexpr auto kNameRealDiv = prim::kRealDiv;
/// \brief Divides the first input tensor by the second input tensor in floating-point type element-wise.
/// Refer to Python API @ref mindspore.ops.RealDiv for more details.
class MS_CORE_API RealDiv : public PrimitiveC {
public:
/// \brief Constructor.
RealDiv() : PrimitiveC(kNameRealDiv) { InitIOName({"x", "y"}, {"output"}); }
/// \brief Destructor.
~RealDiv() = default;
MS_DECLARE_PARENT(RealDiv, PrimitiveC);
/// \brief Init. Refer to the parameters of Python API @ref mindspore.ops.RealDiv for the inputs.
void Init() {}
};



+ 5
- 0
mindspore/core/ops/reciprocal.h View File

@@ -25,11 +25,16 @@
namespace mindspore {
namespace ops {
constexpr auto kNameReciprocal = prim::kReciprocal;
/// \brief Returns reciprocal of a tensor element-wise.
/// Refer to Python API @ref mindspore.ops.Reciprocal for more details.
class MS_CORE_API Reciprocal : public PrimitiveC {
public:
/// \brief Constructor.
Reciprocal() : PrimitiveC(prim::kPrimReciprocal->name()) { InitIOName({"x"}, {"y"}); }
/// \brief Destructor.
~Reciprocal() = default;
MS_DECLARE_PARENT(Reciprocal, PrimitiveC);
/// \brief Init. Refer to the parameters of Python API @ref mindspore.ops.Reciprocal for the inputs.
void Init() {}
};



+ 4
- 0
mindspore/core/ops/reduce_all.h View File

@@ -27,9 +27,13 @@
namespace mindspore {
namespace ops {
constexpr auto kNameReduceAll = "ReduceAll";
/// \brief Reduces a dimension of a tensor by the “logicalAND” of all elements in the dimension.
/// Refer to Python API @ref mindspore.ops.ReduceAll for more details.
class MS_CORE_API ReduceAll : public Reduce {
public:
/// \brief Constructor.
ReduceAll() : Reduce(kNameReduceAll) { InitIOName({"input_x", "axis"}, {"y"}); }
/// \brief Destructor.
~ReduceAll() = default;
MS_DECLARE_PARENT(ReduceAll, Reduce);
};


+ 4
- 0
mindspore/core/ops/reduce_any.h View File

@@ -27,9 +27,13 @@
namespace mindspore {
namespace ops {
constexpr auto kNameReduceAny = "ReduceAny";
/// \brief Reduces a dimension of a tensor by the “logical OR” of all elements in the dimension.
/// Refer to Python API @ref mindspore.ops.ReduceAny for more details.
class MS_CORE_API ReduceAny : public Reduce {
public:
/// \brief Constructor.
ReduceAny() : Reduce(kNameReduceAny) { InitIOName({"input_x", "axis"}, {"y"}); }
/// \brief Destructor.
~ReduceAny() = default;
MS_DECLARE_PARENT(ReduceAny, Reduce);
};


+ 5
- 0
mindspore/core/ops/reduce_max.h View File

@@ -27,11 +27,16 @@
namespace mindspore {
namespace ops {
constexpr auto kNameReduceMax = "ReduceMax";
/// \brief Reduces a dimension of a tensor by the maximum value in this dimension.
/// Refer to Python API @ref mindspore.ops.ReduceMax for more details.
class MS_CORE_API ReduceMax : public Reduce {
public:
/// \brief Constructor.
ReduceMax() : Reduce(kNameReduceMax) { InitIOName({"input_x", "axis"}, {"y"}); }
/// \brief Destructor.
~ReduceMax() = default;
MS_DECLARE_PARENT(ReduceMax, Reduce);
/// \brief Init. Refer to the parameters of Python API @ref mindspore.ops.ReduceMax for the inputs.
void Init() {}
};
} // namespace ops


+ 4
- 0
mindspore/core/ops/reduce_mean.h View File

@@ -27,9 +27,13 @@
namespace mindspore {
namespace ops {
constexpr auto kNameReduceMean = "ReduceMean";
/// \brief Reduces a dimension of a tensor by averaging all elements in the dimension.
/// Refer to Python API @ref mindspore.ops.ReduceMean for more details.
class MS_CORE_API ReduceMean : public Reduce {
public:
/// \brief Constructor.
ReduceMean() : Reduce(kNameReduceMean) { InitIOName({"input_x", "axis"}, {"y"}); }
/// \brief Destructor.
~ReduceMean() = default;
MS_DECLARE_PARENT(ReduceMean, Reduce);
};


Loading…
Cancel
Save