You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

test_graph_kernels.py 19 kB

5 years ago
5 years ago
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489
  1. """Tests of graph kernels.
  2. """
  3. import pytest
  4. import multiprocessing
  5. import numpy as np
  6. def chooseDataset(ds_name):
  7. """Choose dataset according to name.
  8. """
  9. from gklearn.utils import Dataset
  10. dataset = Dataset()
  11. # no node labels (and no edge labels).
  12. if ds_name == 'Alkane':
  13. dataset.load_predefined_dataset(ds_name)
  14. dataset.trim_dataset(edge_required=False)
  15. irrelevant_labels = {'node_attrs': ['x', 'y', 'z'], 'edge_labels': ['bond_stereo']}
  16. dataset.remove_labels(**irrelevant_labels)
  17. dataset.cut_graphs(range(1, 10))
  18. # node symbolic labels.
  19. elif ds_name == 'Acyclic':
  20. dataset.load_predefined_dataset(ds_name)
  21. dataset.trim_dataset(edge_required=False)
  22. irrelevant_labels = {'node_attrs': ['x', 'y', 'z'], 'edge_labels': ['bond_stereo']}
  23. dataset.remove_labels(**irrelevant_labels)
  24. # node non-symbolic labels.
  25. elif ds_name == 'Letter-med':
  26. dataset.load_predefined_dataset(ds_name)
  27. dataset.trim_dataset(edge_required=False)
  28. # node symbolic and non-symbolic labels (and edge symbolic labels).
  29. elif ds_name == 'AIDS':
  30. dataset.load_predefined_dataset(ds_name)
  31. dataset.trim_dataset(edge_required=False)
  32. # edge non-symbolic labels (no node labels).
  33. elif ds_name == 'Fingerprint_edge':
  34. dataset.load_predefined_dataset('Fingerprint')
  35. dataset.trim_dataset(edge_required=True)
  36. irrelevant_labels = {'edge_attrs': ['orient', 'angle']}
  37. dataset.remove_labels(**irrelevant_labels)
  38. # edge non-symbolic labels (and node non-symbolic labels).
  39. elif ds_name == 'Fingerprint':
  40. dataset.load_predefined_dataset(ds_name)
  41. dataset.trim_dataset(edge_required=True)
  42. # edge symbolic and non-symbolic labels (and node symbolic and non-symbolic labels).
  43. elif ds_name == 'Cuneiform':
  44. dataset.load_predefined_dataset(ds_name)
  45. dataset.trim_dataset(edge_required=True)
  46. dataset.cut_graphs(range(0, 3))
  47. return dataset
  48. def test_list_graph_kernels():
  49. """
  50. """
  51. from gklearn.kernels import GRAPH_KERNELS, list_of_graph_kernels
  52. assert list_of_graph_kernels() == [i for i in GRAPH_KERNELS]
  53. @pytest.mark.parametrize('ds_name', ['Alkane', 'AIDS'])
  54. @pytest.mark.parametrize('weight,compute_method', [(0.01, 'geo'), (1, 'exp')])
  55. @pytest.mark.parametrize('parallel', ['imap_unordered', None])
  56. def test_CommonWalk(ds_name, parallel, weight, compute_method):
  57. """Test common walk kernel.
  58. """
  59. from gklearn.kernels import CommonWalk
  60. import networkx as nx
  61. dataset = chooseDataset(ds_name)
  62. dataset.load_graphs([g for g in dataset.graphs if nx.number_of_nodes(g) > 1])
  63. try:
  64. graph_kernel = CommonWalk(node_labels=dataset.node_labels,
  65. edge_labels=dataset.edge_labels,
  66. ds_infos=dataset.get_dataset_infos(keys=['directed']),
  67. weight=weight,
  68. compute_method=compute_method)
  69. gram_matrix, run_time = graph_kernel.compute(dataset.graphs,
  70. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  71. kernel_list, run_time = graph_kernel.compute(dataset.graphs[0], dataset.graphs[1:],
  72. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  73. kernel, run_time = graph_kernel.compute(dataset.graphs[0], dataset.graphs[1],
  74. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  75. except Exception as exception:
  76. assert False, exception
  77. @pytest.mark.parametrize('ds_name', ['Alkane', 'AIDS'])
  78. @pytest.mark.parametrize('remove_totters', [False]) #[True, False])
  79. @pytest.mark.parametrize('parallel', ['imap_unordered', None])
  80. def test_Marginalized(ds_name, parallel, remove_totters):
  81. """Test marginalized kernel.
  82. """
  83. from gklearn.kernels import Marginalized
  84. dataset = chooseDataset(ds_name)
  85. try:
  86. graph_kernel = Marginalized(node_labels=dataset.node_labels,
  87. edge_labels=dataset.edge_labels,
  88. ds_infos=dataset.get_dataset_infos(keys=['directed']),
  89. p_quit=0.5,
  90. n_iteration=2,
  91. remove_totters=remove_totters)
  92. gram_matrix, run_time = graph_kernel.compute(dataset.graphs,
  93. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  94. kernel_list, run_time = graph_kernel.compute(dataset.graphs[0], dataset.graphs[1:],
  95. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  96. kernel, run_time = graph_kernel.compute(dataset.graphs[0], dataset.graphs[1],
  97. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  98. except Exception as exception:
  99. assert False, exception
  100. @pytest.mark.parametrize('ds_name', ['Acyclic'])
  101. @pytest.mark.parametrize('parallel', ['imap_unordered', None])
  102. def test_SylvesterEquation(ds_name, parallel):
  103. """Test sylvester equation kernel.
  104. """
  105. from gklearn.kernels import SylvesterEquation
  106. dataset = chooseDataset(ds_name)
  107. try:
  108. graph_kernel = SylvesterEquation(
  109. ds_infos=dataset.get_dataset_infos(keys=['directed']),
  110. weight=1e-3,
  111. p=None,
  112. q=None,
  113. edge_weight=None)
  114. gram_matrix, run_time = graph_kernel.compute(dataset.graphs,
  115. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  116. kernel_list, run_time = graph_kernel.compute(dataset.graphs[0], dataset.graphs[1:],
  117. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  118. kernel, run_time = graph_kernel.compute(dataset.graphs[0], dataset.graphs[1],
  119. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  120. except Exception as exception:
  121. assert False, exception
  122. @pytest.mark.parametrize('ds_name', ['Acyclic', 'AIDS'])
  123. @pytest.mark.parametrize('parallel', ['imap_unordered', None])
  124. def test_ConjugateGradient(ds_name, parallel):
  125. """Test conjugate gradient kernel.
  126. """
  127. from gklearn.kernels import ConjugateGradient
  128. from gklearn.utils.kernels import deltakernel, gaussiankernel, kernelproduct
  129. import functools
  130. dataset = chooseDataset(ds_name)
  131. mixkernel = functools.partial(kernelproduct, deltakernel, gaussiankernel)
  132. sub_kernels = {'symb': deltakernel, 'nsymb': gaussiankernel, 'mix': mixkernel}
  133. try:
  134. graph_kernel = ConjugateGradient(
  135. node_labels=dataset.node_labels,
  136. node_attrs=dataset.node_attrs,
  137. edge_labels=dataset.edge_labels,
  138. edge_attrs=dataset.edge_attrs,
  139. ds_infos=dataset.get_dataset_infos(keys=['directed']),
  140. weight=1e-3,
  141. p=None,
  142. q=None,
  143. edge_weight=None,
  144. node_kernels=sub_kernels,
  145. edge_kernels=sub_kernels)
  146. gram_matrix, run_time = graph_kernel.compute(dataset.graphs,
  147. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  148. kernel_list, run_time = graph_kernel.compute(dataset.graphs[0], dataset.graphs[1:],
  149. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  150. kernel, run_time = graph_kernel.compute(dataset.graphs[0], dataset.graphs[1],
  151. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  152. except Exception as exception:
  153. assert False, exception
  154. @pytest.mark.parametrize('ds_name', ['Acyclic', 'AIDS'])
  155. @pytest.mark.parametrize('parallel', ['imap_unordered', None])
  156. def test_FixedPoint(ds_name, parallel):
  157. """Test fixed point kernel.
  158. """
  159. from gklearn.kernels import FixedPoint
  160. from gklearn.utils.kernels import deltakernel, gaussiankernel, kernelproduct
  161. import functools
  162. dataset = chooseDataset(ds_name)
  163. mixkernel = functools.partial(kernelproduct, deltakernel, gaussiankernel)
  164. sub_kernels = {'symb': deltakernel, 'nsymb': gaussiankernel, 'mix': mixkernel}
  165. try:
  166. graph_kernel = FixedPoint(
  167. node_labels=dataset.node_labels,
  168. node_attrs=dataset.node_attrs,
  169. edge_labels=dataset.edge_labels,
  170. edge_attrs=dataset.edge_attrs,
  171. ds_infos=dataset.get_dataset_infos(keys=['directed']),
  172. weight=1e-3,
  173. p=None,
  174. q=None,
  175. edge_weight=None,
  176. node_kernels=sub_kernels,
  177. edge_kernels=sub_kernels)
  178. gram_matrix, run_time = graph_kernel.compute(dataset.graphs,
  179. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  180. kernel_list, run_time = graph_kernel.compute(dataset.graphs[0], dataset.graphs[1:],
  181. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  182. kernel, run_time = graph_kernel.compute(dataset.graphs[0], dataset.graphs[1],
  183. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  184. except Exception as exception:
  185. assert False, exception
  186. @pytest.mark.parametrize('ds_name', ['Acyclic'])
  187. @pytest.mark.parametrize('sub_kernel', ['exp', 'geo'])
  188. @pytest.mark.parametrize('parallel', ['imap_unordered', None])
  189. def test_SpectralDecomposition(ds_name, sub_kernel, parallel):
  190. """Test spectral decomposition kernel.
  191. """
  192. from gklearn.kernels import SpectralDecomposition
  193. dataset = chooseDataset(ds_name)
  194. try:
  195. graph_kernel = SpectralDecomposition(
  196. ds_infos=dataset.get_dataset_infos(keys=['directed']),
  197. weight=1e-3,
  198. p=None,
  199. q=None,
  200. edge_weight=None,
  201. sub_kernel=sub_kernel)
  202. gram_matrix, run_time = graph_kernel.compute(dataset.graphs,
  203. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  204. kernel_list, run_time = graph_kernel.compute(dataset.graphs[0], dataset.graphs[1:],
  205. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  206. kernel, run_time = graph_kernel.compute(dataset.graphs[0], dataset.graphs[1],
  207. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  208. except Exception as exception:
  209. assert False, exception
  210. # @pytest.mark.parametrize(
  211. # 'compute_method,ds_name,sub_kernel',
  212. # [
  213. # ('sylvester', 'Alkane', None),
  214. # ('conjugate', 'Alkane', None),
  215. # ('conjugate', 'AIDS', None),
  216. # ('fp', 'Alkane', None),
  217. # ('fp', 'AIDS', None),
  218. # ('spectral', 'Alkane', 'exp'),
  219. # ('spectral', 'Alkane', 'geo'),
  220. # ]
  221. # )
  222. # @pytest.mark.parametrize('parallel', ['imap_unordered', None])
  223. # def test_RandomWalk(ds_name, compute_method, sub_kernel, parallel):
  224. # """Test random walk kernel.
  225. # """
  226. # from gklearn.kernels import RandomWalk
  227. # from gklearn.utils.kernels import deltakernel, gaussiankernel, kernelproduct
  228. # import functools
  229. #
  230. # dataset = chooseDataset(ds_name)
  231. # mixkernel = functools.partial(kernelproduct, deltakernel, gaussiankernel)
  232. # sub_kernels = {'symb': deltakernel, 'nsymb': gaussiankernel, 'mix': mixkernel}
  233. # # try:
  234. # graph_kernel = RandomWalk(node_labels=dataset.node_labels,
  235. # node_attrs=dataset.node_attrs,
  236. # edge_labels=dataset.edge_labels,
  237. # edge_attrs=dataset.edge_attrs,
  238. # ds_infos=dataset.get_dataset_infos(keys=['directed']),
  239. # compute_method=compute_method,
  240. # weight=1e-3,
  241. # p=None,
  242. # q=None,
  243. # edge_weight=None,
  244. # node_kernels=sub_kernels,
  245. # edge_kernels=sub_kernels,
  246. # sub_kernel=sub_kernel)
  247. # gram_matrix, run_time = graph_kernel.compute(dataset.graphs,
  248. # parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  249. # kernel_list, run_time = graph_kernel.compute(dataset.graphs[0], dataset.graphs[1:],
  250. # parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  251. # kernel, run_time = graph_kernel.compute(dataset.graphs[0], dataset.graphs[1],
  252. # parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  253. # except Exception as exception:
  254. # assert False, exception
  255. @pytest.mark.parametrize('ds_name', ['Alkane', 'Acyclic', 'Letter-med', 'AIDS', 'Fingerprint'])
  256. @pytest.mark.parametrize('parallel', ['imap_unordered', None])
  257. def test_ShortestPath(ds_name, parallel):
  258. """Test shortest path kernel.
  259. """
  260. from gklearn.kernels import ShortestPath
  261. from gklearn.utils.kernels import deltakernel, gaussiankernel, kernelproduct
  262. import functools
  263. dataset = chooseDataset(ds_name)
  264. mixkernel = functools.partial(kernelproduct, deltakernel, gaussiankernel)
  265. sub_kernels = {'symb': deltakernel, 'nsymb': gaussiankernel, 'mix': mixkernel}
  266. try:
  267. graph_kernel = ShortestPath(node_labels=dataset.node_labels,
  268. node_attrs=dataset.node_attrs,
  269. ds_infos=dataset.get_dataset_infos(keys=['directed']),
  270. fcsp=True,
  271. node_kernels=sub_kernels)
  272. gram_matrix1, run_time = graph_kernel.compute(dataset.graphs,
  273. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  274. kernel_list, run_time = graph_kernel.compute(dataset.graphs[0], dataset.graphs[1:],
  275. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  276. kernel, run_time = graph_kernel.compute(dataset.graphs[0], dataset.graphs[1],
  277. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  278. graph_kernel = ShortestPath(node_labels=dataset.node_labels,
  279. node_attrs=dataset.node_attrs,
  280. ds_infos=dataset.get_dataset_infos(keys=['directed']),
  281. fcsp=False,
  282. node_kernels=sub_kernels)
  283. gram_matrix2, run_time = graph_kernel.compute(dataset.graphs,
  284. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  285. kernel_list, run_time = graph_kernel.compute(dataset.graphs[0], dataset.graphs[1:],
  286. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  287. kernel, run_time = graph_kernel.compute(dataset.graphs[0], dataset.graphs[1],
  288. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  289. except Exception as exception:
  290. assert False, exception
  291. assert np.array_equal(gram_matrix1, gram_matrix2)
  292. #@pytest.mark.parametrize('ds_name', ['Alkane', 'Acyclic', 'Letter-med', 'AIDS', 'Fingerprint'])
  293. @pytest.mark.parametrize('ds_name', ['Alkane', 'Acyclic', 'Letter-med', 'AIDS', 'Fingerprint', 'Fingerprint_edge', 'Cuneiform'])
  294. @pytest.mark.parametrize('parallel', ['imap_unordered', None])
  295. def test_StructuralSP(ds_name, parallel):
  296. """Test structural shortest path kernel.
  297. """
  298. from gklearn.kernels import StructuralSP
  299. from gklearn.utils.kernels import deltakernel, gaussiankernel, kernelproduct
  300. import functools
  301. dataset = chooseDataset(ds_name)
  302. mixkernel = functools.partial(kernelproduct, deltakernel, gaussiankernel)
  303. sub_kernels = {'symb': deltakernel, 'nsymb': gaussiankernel, 'mix': mixkernel}
  304. try:
  305. graph_kernel = StructuralSP(node_labels=dataset.node_labels,
  306. edge_labels=dataset.edge_labels,
  307. node_attrs=dataset.node_attrs,
  308. edge_attrs=dataset.edge_attrs,
  309. ds_infos=dataset.get_dataset_infos(keys=['directed']),
  310. fcsp=True,
  311. node_kernels=sub_kernels,
  312. edge_kernels=sub_kernels)
  313. gram_matrix1, run_time = graph_kernel.compute(dataset.graphs,
  314. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True, normalize=False)
  315. kernel_list, run_time = graph_kernel.compute(dataset.graphs[0], dataset.graphs[1:],
  316. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  317. kernel, run_time = graph_kernel.compute(dataset.graphs[0], dataset.graphs[1],
  318. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  319. graph_kernel = StructuralSP(node_labels=dataset.node_labels,
  320. edge_labels=dataset.edge_labels,
  321. node_attrs=dataset.node_attrs,
  322. edge_attrs=dataset.edge_attrs,
  323. ds_infos=dataset.get_dataset_infos(keys=['directed']),
  324. fcsp=False,
  325. node_kernels=sub_kernels,
  326. edge_kernels=sub_kernels)
  327. gram_matrix2, run_time = graph_kernel.compute(dataset.graphs,
  328. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True, normalize=False)
  329. kernel_list, run_time = graph_kernel.compute(dataset.graphs[0], dataset.graphs[1:],
  330. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  331. kernel, run_time = graph_kernel.compute(dataset.graphs[0], dataset.graphs[1],
  332. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  333. except Exception as exception:
  334. assert False, exception
  335. assert np.array_equal(gram_matrix1, gram_matrix2)
  336. @pytest.mark.parametrize('ds_name', ['Alkane', 'AIDS'])
  337. @pytest.mark.parametrize('parallel', ['imap_unordered', None])
  338. #@pytest.mark.parametrize('k_func', ['MinMax', 'tanimoto', None])
  339. @pytest.mark.parametrize('k_func', ['MinMax', 'tanimoto'])
  340. @pytest.mark.parametrize('compute_method', ['trie', 'naive'])
  341. def test_PathUpToH(ds_name, parallel, k_func, compute_method):
  342. """Test path kernel up to length $h$.
  343. """
  344. from gklearn.kernels import PathUpToH
  345. dataset = chooseDataset(ds_name)
  346. try:
  347. graph_kernel = PathUpToH(node_labels=dataset.node_labels,
  348. edge_labels=dataset.edge_labels,
  349. ds_infos=dataset.get_dataset_infos(keys=['directed']),
  350. depth=2, k_func=k_func, compute_method=compute_method)
  351. gram_matrix, run_time = graph_kernel.compute(dataset.graphs,
  352. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  353. kernel_list, run_time = graph_kernel.compute(dataset.graphs[0], dataset.graphs[1:],
  354. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  355. kernel, run_time = graph_kernel.compute(dataset.graphs[0], dataset.graphs[1],
  356. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  357. except Exception as exception:
  358. assert False, exception
  359. @pytest.mark.parametrize('ds_name', ['Alkane', 'AIDS'])
  360. @pytest.mark.parametrize('parallel', ['imap_unordered', None])
  361. def test_Treelet(ds_name, parallel):
  362. """Test treelet kernel.
  363. """
  364. from gklearn.kernels import Treelet
  365. from gklearn.utils.kernels import polynomialkernel
  366. import functools
  367. dataset = chooseDataset(ds_name)
  368. pkernel = functools.partial(polynomialkernel, d=2, c=1e5)
  369. try:
  370. graph_kernel = Treelet(node_labels=dataset.node_labels,
  371. edge_labels=dataset.edge_labels,
  372. ds_infos=dataset.get_dataset_infos(keys=['directed']),
  373. sub_kernel=pkernel)
  374. gram_matrix, run_time = graph_kernel.compute(dataset.graphs,
  375. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  376. kernel_list, run_time = graph_kernel.compute(dataset.graphs[0], dataset.graphs[1:],
  377. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  378. kernel, run_time = graph_kernel.compute(dataset.graphs[0], dataset.graphs[1],
  379. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  380. except Exception as exception:
  381. assert False, exception
  382. @pytest.mark.parametrize('ds_name', ['Acyclic'])
  383. #@pytest.mark.parametrize('base_kernel', ['subtree', 'sp', 'edge'])
  384. # @pytest.mark.parametrize('base_kernel', ['subtree'])
  385. @pytest.mark.parametrize('parallel', ['imap_unordered', None])
  386. def test_WLSubtree(ds_name, parallel):
  387. """Test Weisfeiler-Lehman subtree kernel.
  388. """
  389. from gklearn.kernels import WLSubtree
  390. dataset = chooseDataset(ds_name)
  391. try:
  392. graph_kernel = WLSubtree(node_labels=dataset.node_labels,
  393. edge_labels=dataset.edge_labels,
  394. ds_infos=dataset.get_dataset_infos(keys=['directed']),
  395. height=2)
  396. gram_matrix, run_time = graph_kernel.compute(dataset.graphs,
  397. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  398. kernel_list, run_time = graph_kernel.compute(dataset.graphs[0], dataset.graphs[1:],
  399. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  400. kernel, run_time = graph_kernel.compute(dataset.graphs[0], dataset.graphs[1],
  401. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  402. except Exception as exception:
  403. assert False, exception
  404. if __name__ == "__main__":
  405. test_list_graph_kernels()
  406. # test_spkernel('Alkane', 'imap_unordered')
  407. # test_ShortestPath('Alkane', 'imap_unordered')
  408. # test_StructuralSP('Fingerprint_edge', 'imap_unordered')
  409. # test_StructuralSP('Alkane', None)
  410. # test_StructuralSP('Cuneiform', None)
  411. # test_WLSubtree('Acyclic', 'imap_unordered')
  412. # test_RandomWalk('Acyclic', 'sylvester', None, 'imap_unordered')
  413. # test_RandomWalk('Acyclic', 'conjugate', None, 'imap_unordered')
  414. # test_RandomWalk('Acyclic', 'fp', None, None)
  415. # test_RandomWalk('Acyclic', 'spectral', 'exp', 'imap_unordered')

A Python package for graph kernels, graph edit distances and graph pre-image problem.