Merge pull request !4258 from leopz/fix_doctags/v0.7.0-beta
| @@ -68,7 +68,7 @@ using mindspore::abstract::AbstractTuplePtr; | |||||
| const char IR_TYPE_ANF[] = "anf_ir"; | const char IR_TYPE_ANF[] = "anf_ir"; | ||||
| const char IR_TYPE_ONNX[] = "onnx_ir"; | const char IR_TYPE_ONNX[] = "onnx_ir"; | ||||
| const char IR_TYPE_BINARY[] = "binary_ir"; | |||||
| const char IR_TYPE_MINDIR[] = "mind_ir"; | |||||
| ExecutorPyPtr ExecutorPy::executor_ = nullptr; | ExecutorPyPtr ExecutorPy::executor_ = nullptr; | ||||
| std::mutex ExecutorPy::instance_lock_; | std::mutex ExecutorPy::instance_lock_; | ||||
| @@ -222,7 +222,7 @@ py::bytes ExecutorPy::GetFuncGraphProto(const std::string &phase, const std::str | |||||
| return proto_str; | return proto_str; | ||||
| } | } | ||||
| if (ir_type == IR_TYPE_BINARY) { | |||||
| if (ir_type == IR_TYPE_MINDIR) { | |||||
| std::string proto_str = GetBinaryProtoString(fg_ptr); | std::string proto_str = GetBinaryProtoString(fg_ptr); | ||||
| if (proto_str.empty()) { | if (proto_str.empty()) { | ||||
| MS_LOG(EXCEPTION) << "Graph proto is empty."; | MS_LOG(EXCEPTION) << "Graph proto is empty."; | ||||
| @@ -445,15 +445,17 @@ def export(network, *inputs, file_name, mean=127.5, std_dev=127.5, file_format=' | |||||
| file_name (str): File name of model to export. | file_name (str): File name of model to export. | ||||
| mean (int): Input data mean. Default: 127.5. | mean (int): Input data mean. Default: 127.5. | ||||
| std_dev (int, float): Input data variance. Default: 127.5. | std_dev (int, float): Input data variance. Default: 127.5. | ||||
| file_format (str): MindSpore currently supports 'GEIR', 'ONNX' and 'BINARY' format for exported | |||||
| file_format (str): MindSpore currently supports 'GEIR', 'ONNX' and 'MINDIR' format for exported | |||||
| quantization aware model. Default: 'GEIR'. | quantization aware model. Default: 'GEIR'. | ||||
| - GEIR: Graph Engine Intermidiate Representation. An intermidiate representation format of | - GEIR: Graph Engine Intermidiate Representation. An intermidiate representation format of | ||||
| Ascend model. | Ascend model. | ||||
| - BINARY: Binary format for model. An intermidiate representation format for models. | |||||
| - MINDIR: MindSpore Native Intermidiate Representation for Anf. An intermidiate representation format | |||||
| for MindSpore models. | |||||
| Recommended suffix for output file is '.mindir'. | |||||
| """ | """ | ||||
| supported_device = ["Ascend", "GPU"] | supported_device = ["Ascend", "GPU"] | ||||
| supported_formats = ['GEIR', 'BINARY'] | |||||
| supported_formats = ['GEIR', 'MINDIR'] | |||||
| mean = validator.check_type("mean", mean, (int, float)) | mean = validator.check_type("mean", mean, (int, float)) | ||||
| std_dev = validator.check_type("std_dev", std_dev, (int, float)) | std_dev = validator.check_type("std_dev", std_dev, (int, float)) | ||||
| @@ -453,17 +453,19 @@ def export(net, *inputs, file_name, file_format='GEIR'): | |||||
| net (Cell): MindSpore network. | net (Cell): MindSpore network. | ||||
| inputs (Tensor): Inputs of the `net`. | inputs (Tensor): Inputs of the `net`. | ||||
| file_name (str): File name of model to export. | file_name (str): File name of model to export. | ||||
| file_format (str): MindSpore currently supports 'GEIR', 'ONNX' and 'BINARY' format for exported model. | |||||
| file_format (str): MindSpore currently supports 'GEIR', 'ONNX' and 'MINDIR' format for exported model. | |||||
| - GEIR: Graph Engine Intermidiate Representation. An intermidiate representation format of | - GEIR: Graph Engine Intermidiate Representation. An intermidiate representation format of | ||||
| Ascend model. | Ascend model. | ||||
| - ONNX: Open Neural Network eXchange. An open format built to represent machine learning models. | - ONNX: Open Neural Network eXchange. An open format built to represent machine learning models. | ||||
| - BINARY: Binary format for model. An intermidiate representation format for models. | |||||
| - MINDIR: MindSpore Native Intermidiate Representation for Anf. An intermidiate representation format | |||||
| for MindSpore models. | |||||
| Recommended suffix for output file is '.mindir'. | |||||
| """ | """ | ||||
| logger.info("exporting model file:%s format:%s.", file_name, file_format) | logger.info("exporting model file:%s format:%s.", file_name, file_format) | ||||
| check_input_data(*inputs, data_class=Tensor) | check_input_data(*inputs, data_class=Tensor) | ||||
| supported_formats = ['GEIR', 'ONNX', 'BINARY'] | |||||
| supported_formats = ['GEIR', 'ONNX', 'MINDIR'] | |||||
| if file_format not in supported_formats: | if file_format not in supported_formats: | ||||
| raise ValueError(f'Illegal file format {file_format}, it must be one of {supported_formats}') | raise ValueError(f'Illegal file format {file_format}, it must be one of {supported_formats}') | ||||
| # switch network mode to infer when it is training | # switch network mode to infer when it is training | ||||
| @@ -485,10 +487,10 @@ def export(net, *inputs, file_name, file_format='GEIR'): | |||||
| with open(file_name, 'wb') as f: | with open(file_name, 'wb') as f: | ||||
| os.chmod(file_name, stat.S_IWUSR | stat.S_IRUSR) | os.chmod(file_name, stat.S_IWUSR | stat.S_IRUSR) | ||||
| f.write(onnx_stream) | f.write(onnx_stream) | ||||
| elif file_format == 'BINARY': # file_format is 'BINARY' | |||||
| phase_name = 'export.binary' | |||||
| elif file_format == 'MINDIR': # file_format is 'MINDIR' | |||||
| phase_name = 'export.mindir' | |||||
| graph_id, _ = _executor.compile(net, *inputs, phase=phase_name, do_convert=False) | graph_id, _ = _executor.compile(net, *inputs, phase=phase_name, do_convert=False) | ||||
| onnx_stream = _executor._get_func_graph_proto(graph_id, 'binary_ir') | |||||
| onnx_stream = _executor._get_func_graph_proto(graph_id, 'mind_ir') | |||||
| with open(file_name, 'wb') as f: | with open(file_name, 'wb') as f: | ||||
| os.chmod(file_name, stat.S_IWUSR | stat.S_IRUSR) | os.chmod(file_name, stat.S_IWUSR | stat.S_IRUSR) | ||||
| f.write(onnx_stream) | f.write(onnx_stream) | ||||
| @@ -36,7 +36,7 @@ y = np.ones(4).astype(np.float32) | |||||
| def export_net(): | def export_net(): | ||||
| add = Net() | add = Net() | ||||
| output = add(Tensor(x), Tensor(y)) | output = add(Tensor(x), Tensor(y)) | ||||
| export(add, Tensor(x), Tensor(y), file_name='tensor_add.pb', file_format='BINARY') | |||||
| export(add, Tensor(x), Tensor(y), file_name='tensor_add.pb', file_format='MINDIR') | |||||
| print(x) | print(x) | ||||
| print(y) | print(y) | ||||
| print(output.asnumpy()) | print(output.asnumpy()) | ||||
| @@ -62,14 +62,14 @@ def export_add_model(): | |||||
| net = AddNet() | net = AddNet() | ||||
| x = np.ones(4).astype(np.float32) | x = np.ones(4).astype(np.float32) | ||||
| y = np.ones(4).astype(np.float32) | y = np.ones(4).astype(np.float32) | ||||
| export(net, Tensor(x), Tensor(y), file_name='add.pb', file_format='BINARY') | |||||
| export(net, Tensor(x), Tensor(y), file_name='add.pb', file_format='MINDIR') | |||||
| def export_bert_model(): | def export_bert_model(): | ||||
| net = BertModel(bert_net_cfg, False) | net = BertModel(bert_net_cfg, False) | ||||
| input_ids = np.random.randint(0, 1000, size=(2, 32), dtype=np.int32) | input_ids = np.random.randint(0, 1000, size=(2, 32), dtype=np.int32) | ||||
| segment_ids = np.zeros((2, 32), dtype=np.int32) | segment_ids = np.zeros((2, 32), dtype=np.int32) | ||||
| input_mask = np.zeros((2, 32), dtype=np.int32) | input_mask = np.zeros((2, 32), dtype=np.int32) | ||||
| export(net, Tensor(input_ids), Tensor(segment_ids), Tensor(input_mask), file_name='bert.pb', file_format='BINARY') | |||||
| export(net, Tensor(input_ids), Tensor(segment_ids), Tensor(input_mask), file_name='bert.pb', file_format='MINDIR') | |||||
| if __name__ == '__main__': | if __name__ == '__main__': | ||||
| export_add_model() | export_add_model() | ||||
| @@ -322,10 +322,10 @@ def test_export(): | |||||
| @non_graph_engine | @non_graph_engine | ||||
| def test_binary_export(): | |||||
| def test_mindir_export(): | |||||
| net = MYNET() | net = MYNET() | ||||
| input_data = Tensor(np.random.randint(0, 255, [1, 3, 224, 224]).astype(np.float32)) | input_data = Tensor(np.random.randint(0, 255, [1, 3, 224, 224]).astype(np.float32)) | ||||
| export(net, input_data, file_name="./me_binary_export.pb", file_format="BINARY") | |||||
| export(net, input_data, file_name="./me_binary_export.mindir", file_format="MINDIR") | |||||
| class PrintNet(nn.Cell): | class PrintNet(nn.Cell): | ||||