You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

export.py 2.1 kB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849
  1. # Copyright 2020 Huawei Technologies Co., Ltd
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. # ============================================================================
  15. """
  16. ##############export checkpoint file into air, onnx, mindir models#################
  17. python export.py
  18. """
  19. import numpy as np
  20. from mindspore import Tensor, load_checkpoint, load_param_into_net, export, context
  21. from utils.config import config
  22. from src.textcnn import TextCNN
  23. from src.dataset import MovieReview, SST2, Subjectivity
  24. context.set_context(mode=context.GRAPH_MODE, device_target=config.device_target)
  25. if config.device_target == "Ascend":
  26. context.set_context(device_id=config.device_id)
  27. if __name__ == '__main__':
  28. if config.dataset == 'MR':
  29. instance = MovieReview(root_dir=config.data_path, maxlen=config.word_len, split=0.9)
  30. elif config.dataset == 'SUBJ':
  31. instance = Subjectivity(root_dir=config.data_path, maxlen=config.word_len, split=0.9)
  32. elif config.dataset == 'SST2':
  33. instance = SST2(root_dir=config.data_path, maxlen=config.word_len, split=0.9)
  34. else:
  35. raise ValueError("dataset is not support.")
  36. net = TextCNN(vocab_len=instance.get_dict_len(), word_len=config.word_len,
  37. num_classes=config.num_classes, vec_length=config.vec_length)
  38. param_dict = load_checkpoint(config.ckpt_file)
  39. load_param_into_net(net, param_dict)
  40. input_arr = Tensor(np.ones([config.batch_size, config.word_len], np.int32))
  41. export(net, input_arr, file_name=config.file_name, file_format=config.file_format)