You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

test_compile_cache.py 6.7 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162
  1. # Copyright 2021 Huawei Technologies Co., Ltd
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. # ============================================================================
  15. import os
  16. import re
  17. import shutil
  18. import subprocess
  19. import pytest
  20. import numpy as np
  21. match_output = re.compile(r'[{](.*?)[}]', re.S)
  22. match_num = re.compile(r'\d+\.?\d*', re.S)
  23. def run_twice_with_same_network(file_name, cache_path, log_file_name_first, log_file_name_second):
  24. # Clear compile cache folder and log files
  25. if os.path.exists(cache_path):
  26. shutil.rmtree(cache_path)
  27. if os.path.exists(log_file_name_first):
  28. os.remove(log_file_name_first)
  29. if os.path.exists(log_file_name_second):
  30. os.remove(log_file_name_second)
  31. assert not os.path.exists(cache_path)
  32. assert not os.path.exists(log_file_name_first)
  33. assert not os.path.exists(log_file_name_second)
  34. # First run without compile cache
  35. cmd_first = f"GLOG_v=2 python " + file_name + " '" + cache_path + "' > " + log_file_name_first + " 2>&1"
  36. subprocess.check_output(cmd_first, shell=True)
  37. assert os.path.exists(log_file_name_first)
  38. assert os.path.exists(cache_path)
  39. with open(log_file_name_first, "r") as f_first:
  40. data_first = f_first.read()
  41. assert "Check the consistency of dependency files hash failed. Execute all the compilation actions." in data_first
  42. # Take out the result of the first run
  43. match_output_first = re.findall(match_output, data_first)
  44. assert len(match_output_first) == 2
  45. nums_first = re.findall(match_num, match_output_first[0])
  46. array_first = np.array([float(x) for x in nums_first])
  47. shape_first = re.findall(match_num, match_output_first[1])
  48. array_shape_first = np.array([int(x) for x in shape_first])
  49. # Second run with compile cache
  50. cmd_second = cmd_first = f"GLOG_v=2 python " + file_name + " '" + cache_path + "' > " + log_file_name_second +\
  51. " 2>&1"
  52. subprocess.check_output(cmd_second, shell=True)
  53. assert os.path.exists(log_file_name_second)
  54. with open(log_file_name_second, "r") as f_second:
  55. data_second = f_second.read()
  56. assert "Use the compilation cache and execute the backend actions only. Be aware of correctness risks." in \
  57. data_second
  58. # Take out the result of the second run
  59. match_output_second = re.findall(match_output, data_second)
  60. assert len(match_output_second) == 2
  61. nums_second = re.findall(match_num, match_output_second[0])
  62. array_second = np.array([float(x) for x in nums_second])
  63. shape_second = re.findall(match_num, match_output_second[1])
  64. array_shape_second = np.array([int(x) for x in shape_second])
  65. assert np.allclose(array_first, array_second, 0.0001, 0.0001)
  66. assert (array_shape_first == array_shape_second).all()
  67. # Clean files
  68. os.remove(log_file_name_first)
  69. os.remove(log_file_name_second)
  70. shutil.rmtree(cache_path)
  71. def run_twice_with_different_networks(file_name_first, file_name_second, cache_path, log_file_name_first,
  72. log_file_name_second):
  73. # Clear compile cache folder
  74. if os.path.exists(cache_path):
  75. shutil.rmtree(cache_path)
  76. assert not os.path.exists(cache_path)
  77. # First run without compile cache
  78. cmd_first = f"GLOG_v=2 python " + file_name_first + " '" + cache_path + "' > " + log_file_name_first + " 2>&1"
  79. subprocess.check_output(cmd_first, shell=True)
  80. assert os.path.exists(log_file_name_first)
  81. assert os.path.exists(cache_path)
  82. with open(log_file_name_first, "r") as f_first:
  83. data_first = f_first.read()
  84. assert "Check the consistency of dependency files hash failed. Execute all the compilation actions." in data_first
  85. # Second run with compile cache
  86. cmd_second = f"GLOG_v=2 python " + file_name_second + " '" + cache_path + "' > " + log_file_name_second + " 2>&1"
  87. subprocess.check_output(cmd_second, shell=True)
  88. assert os.path.exists(log_file_name_second)
  89. with open(log_file_name_second, "r") as f_second:
  90. data_second = f_second.read()
  91. assert "Check the consistency of dependency files hash failed. Execute all the compilation actions." in data_second
  92. # Clean log files
  93. os.remove(log_file_name_first)
  94. os.remove(log_file_name_second)
  95. shutil.rmtree(cache_path)
  96. @pytest.mark.level0
  97. @pytest.mark.platform_x86_ascend_training
  98. @pytest.mark.platform_arm_ascend_training
  99. @pytest.mark.env_onecard
  100. def test_compile_cache_load_weights():
  101. """
  102. Feature: Compile cache.
  103. Description: Test whether the compile cache can load the value of parameters successfully.
  104. Expectation: success.
  105. """
  106. run_twice_with_same_network("run_network_with_weights.py", "./weight", "weight_first.txt", "weight_second.txt")
  107. @pytest.mark.level0
  108. @pytest.mark.platform_x86_ascend_training
  109. @pytest.mark.platform_arm_ascend_training
  110. @pytest.mark.env_onecard
  111. def test_compile_cache_lenet():
  112. """
  113. Feature: Compile cache.
  114. Description: Test whether the regular compile cache function can run successfully.
  115. Expectation: success.
  116. """
  117. run_twice_with_same_network("run_lenet.py", "./lenet", "lenet_first.txt", "lenet_second.txt")
  118. @pytest.mark.level0
  119. @pytest.mark.platform_x86_ascend_training
  120. @pytest.mark.platform_arm_ascend_training
  121. @pytest.mark.env_onecard
  122. def test_compile_cache_net_with_control_flow():
  123. """
  124. Feature: Compile cache.
  125. Description: Test whether the compile cache can load ref type parameter correctly.
  126. Expectation: success.
  127. """
  128. run_twice_with_same_network("run_network_with_control_flow.py", "./control_flow", "control_net_first.txt",
  129. "control_net_second.txt")
  130. @pytest.mark.level0
  131. @pytest.mark.platform_x86_ascend_training
  132. @pytest.mark.platform_arm_ascend_training
  133. @pytest.mark.env_onecard
  134. def test_compile_cache_auto_detect():
  135. """
  136. Feature: Compile cache.
  137. Description: Test whether the compile cache auto-detection function can run successfully.
  138. Expectation: success.
  139. """
  140. run_twice_with_different_networks("run_lenet.py", "run_network_with_weights.py", "./lenet_auto_detect",
  141. "auto_detect_first.txt", "auto_detect_second.txt")