Browse Source

!27002 MindSpore aicpu ops support CpuKernel.

Merge pull request !27002 from linqingke/fix_aicpu
tags/v1.6.0
i-robot Gitee 4 years ago
parent
commit
037235be7f
6 changed files with 58 additions and 37 deletions
  1. +8
    -0
      mindspore/ccsrc/backend/kernel_compiler/aicpu/aicpu_kernel_mod.cc
  2. +2
    -0
      mindspore/ccsrc/backend/kernel_compiler/aicpu/aicpu_util.h
  3. +11
    -2
      mindspore/ccsrc/runtime/device/ascend/kernel_select_ascend.cc
  4. +1
    -0
      mindspore/ccsrc/utils/utils.h
  5. +36
    -3
      mindspore/ops/operations/custom_ops.py
  6. +0
    -32
      mindspore/ops/primitive.py

+ 8
- 0
mindspore/ccsrc/backend/kernel_compiler/aicpu/aicpu_kernel_mod.cc View File

@@ -85,6 +85,10 @@ void AicpuOpKernelMod::CreateCpuKernelInfo(const std::vector<AddressPtr> &inputs
node_so_ = kLibAicpuKernelSoName;
}
}
} else {
if (kCpuKernelBaseOps.find(node_name_) == kCpuKernelBaseOps.end()) {
node_name_ = kCpuRunApi;
}
}
// InputOutputAddr
vector<void *> io_addrs;
@@ -187,6 +191,10 @@ std::vector<TaskInfoPtr> AicpuOpKernelMod::GenTask(const std::vector<AddressPtr>
node_so_ = kLibAicpuKernelSoName;
}
}
} else {
if (kCpuKernelBaseOps.find(node_name_) == kCpuKernelBaseOps.end()) {
node_name_ = kCpuRunApi;
}
}
std::vector<void *> input_data_addrs;
(void)std::transform(std::begin(inputs), std::end(inputs), std::back_inserter(input_data_addrs),


+ 2
- 0
mindspore/ccsrc/backend/kernel_compiler/aicpu/aicpu_util.h View File

@@ -55,6 +55,7 @@ constexpr auto kStackDestroy = "StackDestroy";
constexpr auto kEditDistance = "EditDistance";
constexpr auto kGatherD = "GatherD";
constexpr auto kIdentity = "Identity";
constexpr auto kRandomChoiceWithMask = "RandomChoiceWithMask";
constexpr auto kUpdateCache = "UpdateCache";
constexpr auto kCacheSwapTable = "CacheSwapTable";
constexpr auto kSubAndFilter = "SubAndFilter";
@@ -73,6 +74,7 @@ const std::set<std::string> kCpuKernelOps{kIdentity, kMaskedSelect, kMaske
kSearchSorted, kResizeBilinear, kResizeBilinearGrad, kScatterElements};
const std::set<std::string> kCacheKernelOps{kUpdateCache, kCacheSwapTable, kSubAndFilter,
kPadAndShift, kDropout3D, kDropout2D};
const std::set<std::string> kCpuKernelBaseOps{kGetNext, kInitData, kRandomChoiceWithMask};
const std::set<std::string> kDynamicInputOps{
kPrint, kPack, kMeshgrid, kStackInitOpName, kStackDestroyOpName, kStackPushOpName, kStackPopOpName, kDynamicStitch};
struct AicpuParamHead {


+ 11
- 2
mindspore/ccsrc/runtime/device/ascend/kernel_select_ascend.cc View File

@@ -485,12 +485,21 @@ KernelSelectStatus SelectCustomKernelInfo(const CNodePtr &kernel_node, KernelTyp
*kernel_type = KernelType::TBE_KERNEL;
} else if (kCustomTypeAkg.find(func_type) != kCustomTypeAkg.end()) {
*kernel_type = KernelType::AKG_KERNEL;
} else if (func_type == kCustomTypeAICPU) {
*kernel_type = KernelType::AICPU_KERNEL;
} else {
MS_LOG(EXCEPTION) << "Unsupported func type for Custom op on Ascend, it should be 'tbe', 'ir_builder', "
<< "'tvm_compute' or 'hybrid', but got [" << func_type << "] for Custom op [" << op_name << "]";
}
kernel::OpImplyType imply_type =
*kernel_type == KernelType::TBE_KERNEL ? kernel::OpImplyType::kTBE : kernel::OpImplyType::kAKG;
static const std::map<KernelType, kernel::OpImplyType> kKernelImplyTypeMap{
{KernelType::TBE_KERNEL, kernel::OpImplyType::kTBE},
{KernelType::AKG_KERNEL, kernel::OpImplyType::kAKG},
{KernelType::AICPU_KERNEL, kernel::OpImplyType::kAICPU}};
auto it = kKernelImplyTypeMap.find(*kernel_type);
kernel::OpImplyType imply_type = kernel::OpImplyType::kAKG;
if (it != kKernelImplyTypeMap.end()) {
imply_type = it->second;
}
auto op_info_ptr = mindspore::kernel::OpLib::FindOp(op_name, imply_type);
// Only process Custom op that does not has reg info
if (op_info_ptr != nullptr) {


+ 1
- 0
mindspore/ccsrc/utils/utils.h View File

@@ -503,6 +503,7 @@ constexpr auto kAttrCustAicpu = "cust_aicpu";
constexpr auto kCustomTypeAOT = "aot";
constexpr auto kCustomTypePyfunc = "pyfunc";
constexpr auto kCustomTypeTbe = "tbe";
constexpr auto kCustomTypeAICPU = "aicpu";
const std::set<std::string> kCustomTypeAkg = {"ir_builder", "tvm_compute", "hybrid"};

// primal attr key name


+ 36
- 3
mindspore/ops/operations/custom_ops.py View File

@@ -230,6 +230,31 @@ class Custom(ops.PrimitiveWithInfer):
... res = self.square_with_bias(x, 1.0)
... return res
>>>
>>> # Example, func_type = "aicpu"
>>> resize_bilinear_op_info = CustomRegOp("ResizeBilinear") \
... .fusion_type("OPAQUE") \
... .input(0, "input", "required") \
... .output(1, "output", "required") \
... .attr("align_corners", "required", "bool") \
... .attr("cust_aicpu", "optional", "str", "aicpu_kernels") \
... .dtype_format(DataType.F32_Default, DataType.F32_Default) \
... .dtype_format(DataType.F16_Default, DataType.F32_Default) \
... .target("Ascend") \
... .get_op_info()
>>>
>>> @custom_info_register(resize_bilinear_op_info)
... def resize_bilinear_aicpu():
... return
>>>
>>> class AicpuNet(Cell):
... def __init__(self):
... super(AicpuNet, self).__init__()
... self.resize_bilinear_op = ops.Custom(resize_bilinear_aicpu, out_shape=[1, 1, 9, 9], \
... out_dtype=mstype.float32, func_type="aicpu")
... def construct(self, x):
... res = self.resize_bilinear_op(x, True, "aicpu_kernels")
... return res
>>>
>>> # Example, func_type = "aot"
>>> class AOTSingleOutputNet(Cell):
... def __init__(self, func, out_shapes, out_types, reg=None):
@@ -257,7 +282,7 @@ class Custom(ops.PrimitiveWithInfer):
ops.PrimitiveWithInfer.__init__(self, "Custom")

self.supported_targets = ["Ascend", "GPU", "CPU"]
self.supported_func_type = ["akg", "tbe", "aot", "pyfunc"]
self.supported_func_type = ["akg", "tbe", "aicpu", "aot", "pyfunc"]
self.func = func
self.func_type = func_type
self.func_name = ""
@@ -368,6 +393,8 @@ class Custom(ops.PrimitiveWithInfer):
reg_info = info
if reg_info is None and hasattr(self.func, "reg_info"):
reg_info = getattr(self.func, "reg_info")
if self.func_type == "aicpu" and reg_info is None:
raise ValueError("custom aicpu ops must set reg_info, but current reg_info is None.")
reg_info_list = self._get_expanded_list(reg_info)
for reg_info in reg_info_list:
if not isinstance(reg_info, (str, dict)):
@@ -433,11 +460,17 @@ class Custom(ops.PrimitiveWithInfer):
else:
Custom.registered_func[self.func] = [target]

def _get_op_name(self, reg_info):
if self.func_type == "aicpu":
self.uniq_name = reg_info["op_name"]
self.add_prim_attr("uniq_name", self.uniq_name)
return self.uniq_name

def _reformat_reg_info(self, reg_info, target):
"""Reformat registration information."""
if not isinstance(reg_info, dict):
raise TypeError("reg_info should be of type dict, but got {}".format(type(reg_info)))
reg_info["op_name"] = self.uniq_name
reg_info["op_name"] = self._get_op_name(reg_info)
reg_info["imply_type"] = self._get_imply_type(reg_info, target)
if not isinstance(reg_info.get("fusion_type"), str) or not reg_info["fusion_type"].strip():
reg_info["fusion_type"] = "OPAQUE"
@@ -489,7 +522,7 @@ class Custom(ops.PrimitiveWithInfer):
reg_info["imply_type"].strip():
return reg_info["imply_type"]
# Infer imply_type from func_type
func_type_to_imply_type = {"akg": "AKG", "tbe": "TBE", "aot": target, "pyfunc": target}
func_type_to_imply_type = {"akg": "AKG", "tbe": "TBE", "aicpu": "AiCPU", "aot": target, "pyfunc": target}
return func_type_to_imply_type.get(self.func_type, "AKG")

def _save_attr(self, reg_info):


+ 0
- 32
mindspore/ops/primitive.py View File

@@ -648,38 +648,6 @@ def prim_attr_register(fn):
return deco


def custom_aicpu_register(custom_aicpu_so="mindspore_aicpu_kernels"):
"""Register custom aicpu attribute.

Args:
custom_aicpu_so (str): Path of the dynamic library loaded by the aicpu ops.
Default: "mindspore_aicpu_kernels"
"""

def deco(fn):
def wrapper(self, *args, **kwargs):
if not isinstance(custom_aicpu_so, str):
raise ValueError(f"custom_aicpu_so must be a str, but got {custom_aicpu_so}")
class_name = self.__class__.__name__
if hasattr(self.__class__, "substitute_name"):
class_name = self.__class__.substitute_name
if isinstance(self, PrimitiveWithInfer):
PrimitiveWithInfer.__init__(self, class_name)
elif isinstance(self, PrimitiveWithCheck):
PrimitiveWithCheck.__init__(self, class_name)
else:
Primitive.__init__(self, self.__class__.__name__)
attr_name = "cust_aicpu"
self.add_prim_attr(attr_name, custom_aicpu_so)
self.init_attrs[attr_name] = custom_aicpu_so
ret = fn(self, *args, **kwargs)
return ret

return wrapper

return deco


def constexpr(fn=None, get_instance=True, name=None):
"""
Creates a PrimitiveWithInfer operator that can infer the value at compile time. We can use it to define a function


Loading…
Cancel
Save