Browse Source

avoid brief cache too often

tags/v1.0.0
Li Hongzhang 5 years ago
parent
commit
bb5fb9b1e1
1 changed files with 8 additions and 4 deletions
  1. +8
    -4
      mindinsight/datavisual/data_transform/data_manager.py

+ 8
- 4
mindinsight/datavisual/data_transform/data_manager.py View File

@@ -862,11 +862,10 @@ class DataManager:
"""
logger.info("Start to load data")
DataManager.check_reload_interval(reload_interval)
thread = threading.Thread(target=self._load_data_in_thread_wrapper,
thread = threading.Thread(target=self._load_data_in_thread,
name='start_load_data_thread',
args=(reload_interval,),
daemon=True)
thread.daemon = True
thread.start()
return thread

@@ -884,7 +883,7 @@ class DataManager:
if reload_interval < 0:
raise ParamValueError("The value of reload interval should be >= 0.")

def _load_data_in_thread_wrapper(self, reload_interval):
def _load_data_in_thread(self, reload_interval):
"""Wrapper for load data in thread."""
if self._load_data_lock.locked():
return
@@ -911,8 +910,13 @@ class DataManager:
max_processes_cnt=settings.MAX_PROCESSES_COUNT) as computing_resource_mgr:
with computing_resource_mgr.get_executor() as executor:
self._brief_cache.update_cache(executor)
brief_cache_update = time.time()
for _ in self._detail_cache.update_cache(executor):
self._brief_cache.update_cache(executor)
update_interval = time.time() - brief_cache_update
logger.debug('Loading one round of detail cache taking %ss.', update_interval)
if update_interval > 3: # Use 3 seconds as threshold to avoid updating too often
self._brief_cache.update_cache(executor)
brief_cache_update += update_interval
executor.wait_all_tasks_finish()
with self._status_mutex:
if not self._brief_cache.has_content() and not self._detail_cache.has_content():


Loading…
Cancel
Save