You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

summary_watcher.py 26 kB

5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575
  1. # Copyright 2020-2021 Huawei Technologies Co., Ltd
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. # ============================================================================
  15. """Summary watcher module."""
  16. import json
  17. import os
  18. import re
  19. import datetime
  20. from pathlib import Path
  21. from mindinsight.datavisual.common.log import logger
  22. from mindinsight.datavisual.common.validation import Validation
  23. from mindinsight.datavisual.utils.tools import Counter
  24. from mindinsight.datavisual.utils.utils import contains_null_byte
  25. from mindinsight.datavisual.common.exceptions import MaxCountExceededError
  26. from mindinsight.utils.exceptions import FileSystemPermissionError
  27. LINEAGE_SUMMARY_SUFFIX = '_lineage'
  28. EXPLAIN_SUMMARY_SUFFIX = '_explain'
  29. DUMP_FILE_PREFIX = 'dump_'
  30. class SummaryWatcher:
  31. """SummaryWatcher class."""
  32. SUMMARY_FILENAME_REGEX = r'summary\.(?P<timestamp>\d+)'
  33. PB_FILENAME_REGEX = r'\.pb$'
  34. PROFILER_DIRECTORY_REGEX = r'^profiler'
  35. CLUSTER_PROFILER_DIRECTORY_REGEX = r'^cluster_profiler$'
  36. MAX_SUMMARY_DIR_COUNT = 999
  37. # scan at most 20000 files/directories (approximately 1 seconds)
  38. # if overall is False in SummaryWatcher.list_summary_directories
  39. # to avoid long-time blocking
  40. MAX_SCAN_COUNT = 20000
  41. def __init__(self):
  42. self._analyzers = []
  43. def register_folder_analyzer(self, analyzer):
  44. """Register folder analyzer."""
  45. self._analyzers.append(analyzer)
  46. def list_summary_directories(self, summary_base_dir, overall=True, list_explain=False):
  47. """
  48. List summary directories within base directory.
  49. Args:
  50. summary_base_dir (str): Path of summary base directory.
  51. overall (bool): Limit the total num of scanning if overall is False.
  52. list_explain (bool): Indicates whether to list only the mindexplain folder.
  53. Default is False, means not to list mindexplain folder.
  54. Returns:
  55. list, list of summary directory info, each of which including the following attributes.
  56. - relative_path (str): Relative path of summary directory, referring to settings.SUMMARY_BASE_DIR,
  57. starting with "./".
  58. - create_time (datetime): Creation time of summary file.
  59. - update_time (datetime): Modification time of summary file.
  60. - profiler (dict): profiler info, including profiler subdirectory path, profiler creation time and
  61. profiler modification time.
  62. Examples:
  63. >>> from mindinsight.datavisual.data_transform.summary_watcher import SummaryWatcher
  64. >>> summary_watcher = SummaryWatcher()
  65. >>> directories = summary_watcher.list_summary_directories('/summary/base/dir')
  66. """
  67. if contains_null_byte(summary_base_dir=summary_base_dir):
  68. return []
  69. relative_path = os.path.join('.', '')
  70. if not self._is_valid_summary_directory(summary_base_dir, relative_path):
  71. return []
  72. summary_dict = {}
  73. counter = Counter(max_count=None if overall else self.MAX_SCAN_COUNT)
  74. try:
  75. entries = os.scandir(summary_base_dir)
  76. except PermissionError:
  77. logger.error('Path of summary base directory is not accessible.')
  78. raise FileSystemPermissionError('Path of summary base directory is not accessible.')
  79. # sort in ascending order according to modification time.
  80. entries = [entry for entry in entries if not entry.is_symlink()]
  81. entries = sorted(entries, key=lambda x: x.stat().st_mtime)
  82. for entry in entries:
  83. if len(summary_dict) == self.MAX_SUMMARY_DIR_COUNT:
  84. break
  85. try:
  86. counter.add()
  87. except MaxCountExceededError:
  88. logger.info('Stop further scanning due to overall is False and '
  89. 'number of scanned files exceeds upper limit.')
  90. break
  91. if entry.is_symlink():
  92. pass
  93. elif entry.is_file():
  94. self._update_summary_dict(summary_dict, summary_base_dir, relative_path, entry, list_explain)
  95. elif entry.is_dir():
  96. self._update_summary_dict(summary_dict, summary_base_dir, relative_path, entry, list_explain)
  97. entry_path = os.path.realpath(os.path.join(summary_base_dir, entry.name))
  98. self._scan_subdir_entries(summary_dict, summary_base_dir, entry_path, entry, counter, list_explain)
  99. directories = []
  100. for key, value in summary_dict.items():
  101. directory = {
  102. 'relative_path': key,
  103. **value
  104. }
  105. directories.append(directory)
  106. # sort by update time in descending order and relative path in ascending order
  107. directories.sort(key=lambda x: (-int(x['update_time'].timestamp()), x['relative_path']))
  108. return directories
  109. def _scan_subdir_entries(self, summary_dict, summary_base_dir, entry_path, entry, counter, list_explain):
  110. """
  111. Scan subdir entries.
  112. Args:
  113. summary_dict (dict): Temporary data structure to hold summary directory info.
  114. summary_base_dir (str): Path of summary base directory.
  115. entry_path(str): Path entry.
  116. entry_name (str): Name of entry.
  117. counter (Counter): An instance of CountLimiter.
  118. list_explain (bool): Indicates whether to list only the mindexplain folder.
  119. """
  120. try:
  121. subdir_entries = os.scandir(entry_path)
  122. except PermissionError:
  123. logger.warning('Path of %s under summary base directory is not accessible.', entry.name)
  124. return
  125. # sort in ascending order according to modification time.
  126. subdir_entries = [subdir_entry for subdir_entry in subdir_entries if not subdir_entry.is_symlink()]
  127. subdir_entries = sorted(subdir_entries, key=lambda x: x.stat().st_mtime)
  128. for subdir_entry in subdir_entries:
  129. if len(summary_dict) == self.MAX_SUMMARY_DIR_COUNT:
  130. break
  131. try:
  132. counter.add()
  133. except MaxCountExceededError:
  134. logger.info('Stop further scanning due to overall is False and '
  135. 'number of scanned files exceeds upper limit.')
  136. break
  137. subdir_relative_path = os.path.join('.', entry.name)
  138. if subdir_entry.is_symlink():
  139. pass
  140. self._update_summary_dict(summary_dict, summary_base_dir, subdir_relative_path, subdir_entry, list_explain)
  141. relative_path = './'
  142. self._check_by_analyzers(entry, summary_base_dir, relative_path, summary_dict)
  143. def _is_valid_summary_directory(self, summary_base_dir, relative_path):
  144. """
  145. Check if the given summary directory is valid.
  146. Args:
  147. summary_base_dir (str): Path of summary base directory.
  148. relative_path (str): Relative path of summary directory, referring to summary base directory,
  149. starting with "./" .
  150. Returns:
  151. bool, indicates if summary directory is valid.
  152. """
  153. summary_base_dir = os.path.realpath(summary_base_dir)
  154. summary_directory = os.path.realpath(os.path.join(summary_base_dir, relative_path))
  155. if not os.path.exists(summary_directory):
  156. logger.info('Path of summary directory not exists.')
  157. return False
  158. if not os.path.isdir(summary_directory):
  159. logger.warning('Path of summary directory is not a valid directory.')
  160. return False
  161. try:
  162. Path(summary_directory).relative_to(Path(summary_base_dir))
  163. except ValueError:
  164. logger.warning('Relative path %s is not subdirectory of summary_base_dir', relative_path)
  165. return False
  166. return True
  167. def _update_summary_dict(self, summary_dict, summary_base_dir, relative_path, entry, list_explain):
  168. """
  169. Update summary_dict with ctime and mtime.
  170. Args:
  171. summary_dict (dict): Temporary data structure to hold summary directory info.
  172. summary_base_dir (str): Path of summary base directory.
  173. relative_path (str): Relative path of summary directory, referring to summary base directory,
  174. starting with "./" .
  175. entry (DirEntry): Directory entry instance needed to check with regular expression.
  176. list_explain (bool): Indicates whether to list only the mindexplain folder.
  177. """
  178. try:
  179. ctime, mtime = self._get_stat_time(entry)
  180. except FileNotFoundError:
  181. logger.warning('File %s not found', entry.name)
  182. return
  183. if entry.is_file():
  184. summary_pattern = re.search(self.SUMMARY_FILENAME_REGEX, entry.name)
  185. pb_pattern = re.search(self.PB_FILENAME_REGEX, entry.name)
  186. if not self._is_valid_pattern_result(summary_pattern, pb_pattern, list_explain, entry):
  187. return
  188. timestamp = None
  189. if summary_pattern is not None:
  190. timestamp = int(summary_pattern.groupdict().get('timestamp'))
  191. try:
  192. # extract created time from filename
  193. ctime = datetime.datetime.fromtimestamp(timestamp).astimezone()
  194. except OverflowError:
  195. return
  196. if relative_path not in summary_dict:
  197. summary_dict[relative_path] = _new_entry(ctime, mtime)
  198. job_dict = _get_explain_job_info(summary_base_dir, relative_path, timestamp)
  199. summary_dict[relative_path].update(job_dict)
  200. if summary_dict[relative_path]['create_time'] < ctime:
  201. summary_dict[relative_path].update({'create_time': ctime, 'update_time': mtime})
  202. job_dict = _get_explain_job_info(summary_base_dir, relative_path, timestamp)
  203. summary_dict[relative_path].update(job_dict)
  204. if not summary_pattern:
  205. summary_dict[relative_path]['graph_files'] += 1
  206. elif entry.name.endswith(LINEAGE_SUMMARY_SUFFIX):
  207. summary_dict[relative_path]['lineage_files'] += 1
  208. elif entry.name.endswith(EXPLAIN_SUMMARY_SUFFIX):
  209. summary_dict[relative_path]['explain_files'] += 1
  210. else:
  211. summary_dict[relative_path]['summary_files'] += 1
  212. self._check_by_analyzers(entry, summary_base_dir, relative_path, summary_dict)
  213. elif entry.is_dir():
  214. self._check_by_analyzers(entry, summary_base_dir, relative_path, summary_dict)
  215. if list_explain:
  216. return
  217. cluster_profiler_type, is_cluster_profiler = \
  218. self._find_cluster_profiler_dir(entry, summary_base_dir, relative_path)
  219. profiler_type, is_profiler = self._find_profiler_dir(entry, summary_base_dir, relative_path)
  220. if is_cluster_profiler or is_profiler:
  221. if is_cluster_profiler:
  222. profiler_type = cluster_profiler_type
  223. profiler = {
  224. 'directory': os.path.join('.', entry.name),
  225. 'create_time': ctime,
  226. 'update_time': mtime,
  227. "profiler_type": profiler_type
  228. }
  229. if relative_path in summary_dict:
  230. summary_dict[relative_path]['profiler'] = profiler
  231. else:
  232. summary_dict[relative_path] = _new_entry(ctime, mtime, profiler)
  233. def _check_by_analyzers(self, entry, summary_base_dir, relative_path, summary_dict):
  234. """Check by all analyzers."""
  235. try:
  236. ctime, mtime = self._get_stat_time(entry)
  237. except FileNotFoundError:
  238. logger.warning('File %s not found', entry.name)
  239. return
  240. for analyzer in self._analyzers:
  241. register_info = analyzer.analyze(entry, summary_base_dir, relative_path)
  242. if register_info:
  243. if relative_path not in summary_dict:
  244. summary_dict[relative_path] = _new_entry(ctime, mtime)
  245. summary_dict[relative_path].update(register_info)
  246. def _get_stat_time(self, entry):
  247. """Get ctime and mtime."""
  248. stat = entry.stat()
  249. ctime = datetime.datetime.fromtimestamp(stat.st_ctime).astimezone()
  250. mtime = datetime.datetime.fromtimestamp(stat.st_mtime).astimezone()
  251. return ctime, mtime
  252. def _find_profiler_dir(self, entry, summary_base_dir, relative_path):
  253. """Find profiler dir by the given relative path."""
  254. profiler_pattern = re.search(self.PROFILER_DIRECTORY_REGEX, entry.name)
  255. full_dir_path = os.path.join(summary_base_dir, relative_path, entry.name)
  256. is_valid_profiler_dir, profiler_type = self._is_valid_profiler_directory(full_dir_path)
  257. if profiler_pattern is None or not is_valid_profiler_dir:
  258. return profiler_type, False
  259. return profiler_type, True
  260. def _find_cluster_profiler_dir(self, entry, summary_base_dir, relative_path):
  261. """Find profiler cluster dir by the given relative path."""
  262. cluster_profiler_pattern = re.search(self.CLUSTER_PROFILER_DIRECTORY_REGEX, entry.name)
  263. full_dir_path = os.path.join(summary_base_dir, relative_path, entry.name)
  264. is_valid_cluster_profiler_dir, profiler_type = self._is_valid_cluster_profiler_directory(full_dir_path)
  265. if cluster_profiler_pattern is None or not is_valid_cluster_profiler_dir:
  266. return profiler_type, False
  267. return profiler_type, True
  268. def _is_valid_pattern_result(self, summary_pattern, pb_pattern, list_explain, entry):
  269. """Check the pattern result is valid."""
  270. if summary_pattern is None and pb_pattern is None:
  271. return False
  272. if list_explain and not entry.name.endswith(EXPLAIN_SUMMARY_SUFFIX):
  273. return False
  274. if not list_explain and entry.name.endswith(EXPLAIN_SUMMARY_SUFFIX):
  275. return False
  276. return True
  277. def is_summary_directory(self, summary_base_dir, relative_path):
  278. """
  279. Check if the given summary directory is valid.
  280. Args:
  281. summary_base_dir (str): Path of summary base directory.
  282. relative_path (str): Relative path of summary directory, referring to summary base directory,
  283. starting with "./" .
  284. Returns:
  285. bool, indicates if the given summary directory is valid.
  286. Examples:
  287. >>> from mindinsight.datavisual.data_transform.summary_watcher import SummaryWatcher
  288. >>> summary_watcher = SummaryWatcher()
  289. >>> summaries = summary_watcher.is_summary_directory('/summary/base/dir', './job-01')
  290. """
  291. if contains_null_byte(summary_base_dir=summary_base_dir, relative_path=relative_path):
  292. return False
  293. if not self._is_valid_summary_directory(summary_base_dir, relative_path):
  294. return False
  295. summary_directory = os.path.realpath(os.path.join(summary_base_dir, relative_path))
  296. try:
  297. entries = os.scandir(summary_directory)
  298. except PermissionError:
  299. logger.error('Path of summary base directory is not accessible.')
  300. raise FileSystemPermissionError('Path of summary base directory is not accessible.')
  301. for entry in entries:
  302. if entry.is_symlink():
  303. continue
  304. summary_pattern = re.search(self.SUMMARY_FILENAME_REGEX, entry.name)
  305. if summary_pattern is not None and entry.is_file():
  306. return True
  307. pb_pattern = re.search(self.PB_FILENAME_REGEX, entry.name)
  308. if pb_pattern is not None and entry.is_file():
  309. return True
  310. if entry.is_dir():
  311. profiler_pattern = re.search(self.PROFILER_DIRECTORY_REGEX, entry.name)
  312. cluster_profiler_pattern = re.search(self.CLUSTER_PROFILER_DIRECTORY_REGEX, entry.name)
  313. if profiler_pattern is not None or cluster_profiler_pattern is not None:
  314. full_path = os.path.realpath(os.path.join(summary_directory, entry.name))
  315. if self._is_valid_profiler_directory(full_path)[0] or \
  316. self._is_valid_cluster_profiler_directory(full_path)[0]:
  317. return True
  318. if os.path.exists(os.path.join(summary_directory, os.path.join(entry.name, ".metadata"))):
  319. return True
  320. return False
  321. def _is_valid_profiler_directory(self, directory):
  322. profiler_type = ""
  323. try:
  324. from mindinsight.profiler.common.util import analyse_device_list_from_profiler_dir
  325. device_list, profiler_type = analyse_device_list_from_profiler_dir(directory)
  326. except ImportError:
  327. device_list = []
  328. return bool(device_list), profiler_type
  329. def _is_valid_cluster_profiler_directory(self, directory):
  330. """Determine whether it is a valid cluster profiler."""
  331. cluster_profiler_type = 'cluster'
  332. entries = os.scandir(directory)
  333. for entry in entries:
  334. if entry.is_symlink():
  335. continue
  336. if entry.is_dir():
  337. full_path = os.path.join(directory, entry.name, 'profiler')
  338. is_profile, profiler_type = self._is_valid_profiler_directory(full_path)
  339. if is_profile:
  340. return is_profile, cluster_profiler_type + '_' + profiler_type
  341. return False, cluster_profiler_type
  342. def list_summary_directories_by_pagination(self, summary_base_dir, offset=0, limit=10):
  343. """
  344. List summary directories within base directory.
  345. Args:
  346. summary_base_dir (str): Path of summary base directory.
  347. offset (int): An offset for page. Ex, offset is 0, mean current page is 1. Default value is 0.
  348. limit (int): The max data items for per page. Default value is 10.
  349. Returns:
  350. tuple[total, directories], total indicates the overall number of summary directories and directories
  351. indicate list of summary directory info including the following attributes.
  352. - relative_path (str): Relative path of summary directory, referring to settings.SUMMARY_BASE_DIR,
  353. starting with "./".
  354. - create_time (datetime): Creation time of summary file.
  355. - update_time (datetime): Modification time of summary file.
  356. Raises:
  357. ParamValueError, if offset < 0 or limit is out of valid value range.
  358. ParamTypeError, if offset or limit is not valid integer.
  359. Examples:
  360. >>> from mindinsight.datavisual.data_transform.summary_watcher import SummaryWatcher
  361. >>> summary_watcher = SummaryWatcher()
  362. >>> total, directories = summary_watcher.list_summary_directories_by_pagination(
  363. '/summary/base/dir', offset=0, limit=10)
  364. """
  365. offset = Validation.check_offset(offset=offset)
  366. limit = Validation.check_limit(limit, min_value=1, max_value=999)
  367. directories = self.list_summary_directories(summary_base_dir, overall=False)
  368. return len(directories), directories[offset * limit:(offset + 1) * limit]
  369. def list_summaries(self, summary_base_dir, relative_path='./'):
  370. """
  371. Get info of latest summary file within the given summary directory.
  372. Args:
  373. summary_base_dir (str): Path of summary base directory.
  374. relative_path (str): Relative path of summary directory, referring to summary base directory,
  375. starting with "./" .
  376. Returns:
  377. list, list of summary file including the following attributes.
  378. - file_name (str): Summary file name.
  379. - create_time (datetime): Creation time of summary file.
  380. - update_time (datetime): Modification time of summary file.
  381. Examples:
  382. >>> from mindinsight.datavisual.data_transform.summary_watcher import SummaryWatcher
  383. >>> summary_watcher = SummaryWatcher()
  384. >>> summaries = summary_watcher.list_summaries('/summary/base/dir', './job-01')
  385. """
  386. if contains_null_byte(summary_base_dir=summary_base_dir, relative_path=relative_path):
  387. return []
  388. if not self._is_valid_summary_directory(summary_base_dir, relative_path):
  389. return []
  390. summaries = []
  391. summary_directory = os.path.realpath(os.path.join(summary_base_dir, relative_path))
  392. try:
  393. entries = os.scandir(summary_directory)
  394. except PermissionError:
  395. logger.error('Path of summary directory is not accessible.')
  396. raise FileSystemPermissionError('Path of summary directory is not accessible.')
  397. for entry in entries:
  398. if entry.is_symlink() or not entry.is_file():
  399. continue
  400. pattern = re.search(self.SUMMARY_FILENAME_REGEX, entry.name)
  401. if pattern is None:
  402. continue
  403. timestamp = int(pattern.groupdict().get('timestamp'))
  404. try:
  405. # extract created time from filename
  406. ctime = datetime.datetime.fromtimestamp(timestamp).astimezone()
  407. except OverflowError:
  408. continue
  409. try:
  410. stat = entry.stat()
  411. except FileNotFoundError:
  412. logger.warning('File %s not found.', entry.name)
  413. continue
  414. mtime = datetime.datetime.fromtimestamp(stat.st_mtime).astimezone()
  415. summaries.append({
  416. 'file_name': entry.name,
  417. 'create_time': ctime,
  418. 'update_time': mtime,
  419. })
  420. # sort by update time in descending order and filename in ascending order
  421. summaries.sort(key=lambda x: (-int(x['update_time'].timestamp()), x['file_name']))
  422. return summaries
  423. def list_explain_directories(self, summary_base_dir, offset=0, limit=None):
  424. """
  425. List explain directories within base directory.
  426. Args:
  427. summary_base_dir (str): Path of summary base directory.
  428. offset (int): An offset for page. Ex, offset is 0, mean current page is 1. Default value is 0.
  429. limit (int): The max data items for per page. Default value is 10.
  430. Returns:
  431. tuple[total, directories], total indicates the overall number of explain directories and directories
  432. indicate list of summary directory info including the following attributes.
  433. - relative_path (str): Relative path of summary directory, referring to settings.SUMMARY_BASE_DIR,
  434. starting with "./".
  435. - create_time (datetime): Creation time of summary file.
  436. - update_time (datetime): Modification time of summary file.
  437. Raises:
  438. ParamValueError, if offset < 0 or limit is out of valid value range.
  439. ParamTypeError, if offset or limit is not valid integer.
  440. Examples:
  441. >>> from mindinsight.datavisual.data_transform.summary_watcher import SummaryWatcher
  442. >>> summary_watcher = SummaryWatcher()
  443. >>> total, directories = summary_watcher.list_explain_directories('/summary/base/dir', offset=0, limit=10)
  444. """
  445. offset = Validation.check_offset(offset=offset)
  446. limit = Validation.check_limit(limit, min_value=1, max_value=999, default_value=None)
  447. directories = self.list_summary_directories(summary_base_dir, overall=False, list_explain=True)
  448. if limit is None:
  449. return len(directories), directories
  450. return len(directories), directories[offset * limit:(offset + 1) * limit]
  451. def _new_entry(ctime, mtime, profiler=None):
  452. """Create a new entry."""
  453. return {
  454. 'create_time': ctime,
  455. 'update_time': mtime,
  456. 'summary_files': 0,
  457. 'lineage_files': 0,
  458. 'explain_files': 0,
  459. 'graph_files': 0,
  460. 'profiler': profiler,
  461. 'dump_dir': None
  462. }
  463. def _get_explain_job_info(summary_base_dir, relative_path, timestamp):
  464. """Get explain job info."""
  465. if timestamp is None:
  466. job_dict = {"saliency_map": False, "hierarchical_occlusion": False}
  467. return job_dict
  468. json_path = os.path.join(summary_base_dir, relative_path.lstrip("./"), f"_explain_{timestamp}",
  469. "manifest.json")
  470. if os.path.exists(json_path):
  471. with open(json_path, "r") as f:
  472. job_dict = json.load(f)
  473. return job_dict
  474. # Set default value to make it compatible with previous version
  475. job_dict = {"saliency_map": True, "hierarchical_occlusion": False}
  476. return job_dict