Browse Source

!13272 add full name to tune result file

From: @laiyongqiang
Reviewed-by: @jjfeing,@zhoufeng54
Signed-off-by: @zhoufeng54
tags/v1.2.0-rc1
mindspore-ci-bot Gitee 4 years ago
parent
commit
a4c6a5cea0
3 changed files with 17 additions and 18 deletions
  1. +12
    -1
      mindspore/_extends/parallel_compile/tbe_compiler/re_construct_json.py
  2. +5
    -15
      mindspore/_extends/parallel_compile/tbe_compiler/tuner.py
  3. +0
    -2
      mindspore/ops/_op_impl/tbe/less_equal.py

+ 12
- 1
mindspore/_extends/parallel_compile/tbe_compiler/re_construct_json.py View File

@@ -63,7 +63,7 @@ def create_inout_desc(ori_json):
out_list = []
for _, item in enumerate(ori_json):
item[0]["data_type"] = item[0]["dtype"] if "dtype" in item[0] else 0
if "ori_format" in item[0] or "ori_shape"in item[0]:
if "ori_format" in item[0] or "ori_shape" in item[0]:
item[0]["L1_addr_offset"] = 0
item[0]["L1_fusion_type"] = -1
item[0]["L1_workspace_size"] = -1
@@ -111,6 +111,7 @@ def create_compute_op(ori_json):
"input_desc": create_inout_desc(ori_json["inputs"]) if "inputs" in ori_json else "null",
"module_name": ori_json["module_name"],
"name": full_name,
"ori_name": [full_name],
"output_desc": create_inout_desc(ori_json["outputs"]) if "outputs" in ori_json else "null",
"output_data_desc": create_inout_desc(ori_json["outputs"]) if "outputs" in ori_json else "null",
"pattern": pattern,
@@ -147,6 +148,15 @@ def single_to_fusion(json_file, tune_mode):
return res


def add_ori_name_to_fusion(json_info):
"""Add ori_name to fusion json"""
full_name = json_info["fusion_op"]["full_name"]
ops = json_info["fusion_op"]["op_list"]
for op in ops:
if op["type"] != "Data":
op["ori_name"] = [full_name]


def fusion_to_fusion(json_str, tune_mode):
"""
Add l1_size for fusion json
@@ -158,6 +168,7 @@ def fusion_to_fusion(json_str, tune_mode):
json_info = json.loads(json_str)
json_info["fusion_op"]["l1_size"] = -1
json_info["SocInfo"]["autoTilingMode"] = tune_mode
add_ori_name_to_fusion(json_info)
end_file = json_info["fusion_op"]
end_file["SocInfo"] = json_info["SocInfo"]
res = json.dumps(end_file, ensure_ascii=False)


+ 5
- 15
mindspore/_extends/parallel_compile/tbe_compiler/tuner.py View File

@@ -41,6 +41,7 @@ PLATFORM_FLAG = ["ascend310", "ascend910", "Hi3796CV300ES", "ascend710", "ascend

class TbeTuner:
"""tbe tuner for ga tune or rl tune"""

def __init__(self, offline_tune, tune_mode):
self.offline_tune = offline_tune
self.tune_init = False
@@ -286,6 +287,7 @@ class TbeTuner:
converted_json = single_to_fusion(json.dumps(json_info), tune_mode="RL")
op_type = json_info['op_info']['name']
kernel_name = json_info['op_info']['kernel_name']
full_name = json_info['op_info']['full_name']
tune_mode = "RL"
set_current_op_name(kernel_name)
# todo build with build_single_op_from_c
@@ -307,25 +309,13 @@ class TbeTuner:
job_type = RL_ONLINE
graph_id = 0
l1size = 0 # todo need to verify
ret = dispatch_single_tune_task(graph_id, task_id, l1size, base_kernel, kernel_name, op_module_name,
ret = dispatch_single_tune_task(graph_id, task_id, l1size, base_kernel, kernel_name, full_name,
op_module_name + "@" + op_module_name, op_type, op_type, op_args)

self.module_list[op_module_name] = 1
self.fusion_need_sync += 1
return ret, job_type, json.dumps(compile_info)

def get_op_module_names(self, json_info):
"""
Get op module names from op info json
:param json_info: op's info
:return: op module names
"""
op_module_name = ""
for op in json_info["fusion_op"]["op_list"]:
if "module_name" in op:
op_module_name = op_module_name + op["module_name"] + ","
return op_module_name[:-1]

def fusion_rl_tune(self, task_id, json_info):
"""
RL tune for fusion op
@@ -336,6 +326,7 @@ class TbeTuner:
if 'fusion_op' not in json_info or not json_info['fusion_op']:
raise ValueError("Json string Errors, key:fusion_op not found.")
kernel_name = json_info["fusion_op"]["fusion_op_name"]
full_name = json_info["fusion_op"]["full_name"]
set_current_op_name(kernel_name)
converted_json = fusion_to_fusion(json.dumps(json_info), tune_mode="RL")
job_type = RL_COMPILE
@@ -355,8 +346,7 @@ class TbeTuner:
job_type = RL_ONLINE
graph_id = 0
l1size = 0
op_model_name = self.get_op_module_names(json_info)
ret = dispatch_fusion_tune_task(graph_id, task_id, l1size, base_kernel, kernel_name, op_model_name,
ret = dispatch_fusion_tune_task(graph_id, task_id, l1size, base_kernel, kernel_name, full_name,
converted_json)
return ret, job_type



+ 0
- 2
mindspore/ops/_op_impl/tbe/less_equal.py View File

@@ -23,8 +23,6 @@ less_equal_op_info = TBERegOp("LessEqual") \
.compute_cost(10) \
.kernel_name("less_equal") \
.partial_flag(True) \
.attr("begin_norm_axis", "required", "int", "all") \
.attr("begin_params_axis", "required", "int", "all") \
.input(0, "x1", False, "required", "all") \
.input(1, "x2", False, "required", "all") \
.output(0, "y", False, "required", "all") \


Loading…
Cancel
Save