| @@ -0,0 +1,64 @@ | |||
| #!/usr/bin/env python3 | |||
| # -*- coding: utf-8 -*- | |||
| """ | |||
| Created on Mon Sep 21 10:34:26 2020 | |||
| @author: ljia | |||
| """ | |||
| from utils import Graph_Kernel_List, Dataset_List, compute_graph_kernel | |||
| from gklearn.utils.graphdataset import load_predefined_dataset | |||
| import logging | |||
| # def get_graphs(ds_name): | |||
| # from gklearn.utils.graph_synthesizer import GraphSynthesizer | |||
| # gsyzer = GraphSynthesizer() | |||
| # graphs = gsyzer.unified_graphs(num_graphs=100, num_nodes=num_nodes, num_edges=int(num_nodes*2), num_node_labels=0, num_edge_labels=0, seed=None, directed=False) | |||
| # return graphs | |||
| def xp_runtimes_of_all_7cores(): | |||
| # Run and save. | |||
| import pickle | |||
| import os | |||
| save_dir = 'outputs/runtimes_of_all_7cores/' | |||
| if not os.path.exists(save_dir): | |||
| os.makedirs(save_dir) | |||
| run_times = {} | |||
| for kernel_name in Graph_Kernel_List: | |||
| print() | |||
| print('Kernel:', kernel_name) | |||
| run_times[kernel_name] = [] | |||
| for ds_name in Dataset_List: | |||
| print() | |||
| print('Dataset:', ds_name) | |||
| # get graphs. | |||
| graphs, _ = load_predefined_dataset(ds_name) | |||
| # Compute Gram matrix. | |||
| try: | |||
| gram_matrix, run_time = compute_graph_kernel(graphs, kernel_name, n_jobs=28) | |||
| run_times[kernel_name].append(run_time) | |||
| except Exception as exp: | |||
| run_times[kernel_name].append('error') | |||
| print('An exception occured when running this experiment:') | |||
| LOG_FILENAME = save_dir + 'error.txt' | |||
| logging.basicConfig(filename=LOG_FILENAME, level=logging.DEBUG) | |||
| logging.exception('') | |||
| print(repr(exp)) | |||
| pickle.dump(run_time, open(save_dir + 'run_time.' + kernel_name + '.' + ds_name + '.pkl', 'wb')) | |||
| # Save all. | |||
| pickle.dump(run_times, open(save_dir + 'run_times.pkl', 'wb')) | |||
| return | |||
| if __name__ == '__main__': | |||
| xp_runtimes_of_all_7cores() | |||
| @@ -6,6 +6,7 @@ Created on Mon Sep 21 10:34:26 2020 | |||
| @author: ljia | |||
| """ | |||
| from utils import Graph_Kernel_List, compute_graph_kernel | |||
| import logging | |||
| def generate_graphs(): | |||
| @@ -39,10 +40,19 @@ def xp_synthesied_graphs_dataset_size(): | |||
| print('Number of graphs:', num_graphs) | |||
| sub_graphs = [g.copy() for g in graphs[0:num_graphs]] | |||
| gram_matrix, run_time = compute_graph_kernel(sub_graphs, kernel_name) | |||
| run_times[kernel_name].append(run_time) | |||
| pickle.dump(run_times, open(save_dir + 'run_time.' + kernel_name + '.' + str(num_graphs) + '.pkl', 'wb')) | |||
| try: | |||
| gram_matrix, run_time = compute_graph_kernel(sub_graphs, kernel_name, n_jobs=1) | |||
| run_times[kernel_name].append(run_time) | |||
| except Exception as exp: | |||
| run_times[kernel_name].append('error') | |||
| print('An exception occured when running this experiment:') | |||
| LOG_FILENAME = save_dir + 'error.txt' | |||
| logging.basicConfig(filename=LOG_FILENAME, level=logging.DEBUG) | |||
| logging.exception('') | |||
| print(repr(exp)) | |||
| pickle.dump(run_time, open(save_dir + 'run_time.' + kernel_name + '.' + str(num_graphs) + '.pkl', 'wb')) | |||
| # Save all. | |||
| pickle.dump(run_times, open(save_dir + 'run_times.pkl', 'wb')) | |||
| @@ -6,6 +6,7 @@ Created on Mon Sep 21 10:34:26 2020 | |||
| @author: ljia | |||
| """ | |||
| from utils import Graph_Kernel_List, compute_graph_kernel | |||
| import logging | |||
| def generate_graphs(degree): | |||
| @@ -39,10 +40,18 @@ def xp_synthesied_graphs_degrees(): | |||
| graphs = generate_graphs(degree) | |||
| # Compute Gram matrix. | |||
| gram_matrix, run_time = compute_graph_kernel(graphs, kernel_name) | |||
| run_times[kernel_name].append(run_time) | |||
| try: | |||
| gram_matrix, run_time = compute_graph_kernel(graphs, kernel_name, n_jobs=1) | |||
| run_times[kernel_name].append(run_time) | |||
| except Exception as exp: | |||
| run_times[kernel_name].append('error') | |||
| print('An exception occured when running this experiment:') | |||
| LOG_FILENAME = save_dir + 'error.txt' | |||
| logging.basicConfig(filename=LOG_FILENAME, level=logging.DEBUG) | |||
| logging.exception('') | |||
| print(repr(exp)) | |||
| pickle.dump(run_times, open(save_dir + 'run_time.' + kernel_name + '.' + str(degree) + '.pkl', 'wb')) | |||
| pickle.dump(run_time, open(save_dir + 'run_time.' + kernel_name + '.' + str(degree) + '.pkl', 'wb')) | |||
| # Save all. | |||
| pickle.dump(run_times, open(save_dir + 'run_times.pkl', 'wb')) | |||
| @@ -6,6 +6,7 @@ Created on Mon Sep 21 10:34:26 2020 | |||
| @author: ljia | |||
| """ | |||
| from utils import Graph_Kernel_List_ESym, compute_graph_kernel | |||
| import logging | |||
| def generate_graphs(num_el_alp): | |||
| @@ -39,10 +40,18 @@ def xp_synthesied_graphs_num_edge_label_alphabet(): | |||
| graphs = generate_graphs(num_el_alp) | |||
| # Compute Gram matrix. | |||
| gram_matrix, run_time = compute_graph_kernel(graphs, kernel_name) | |||
| run_times[kernel_name].append(run_time) | |||
| try: | |||
| gram_matrix, run_time = compute_graph_kernel(graphs, kernel_name, n_jobs=1) | |||
| run_times[kernel_name].append(run_time) | |||
| except Exception as exp: | |||
| run_times[kernel_name].append('error') | |||
| print('An exception occured when running this experiment:') | |||
| LOG_FILENAME = save_dir + 'error.txt' | |||
| logging.basicConfig(filename=LOG_FILENAME, level=logging.DEBUG) | |||
| logging.exception('') | |||
| print(repr(exp)) | |||
| pickle.dump(run_times, open(save_dir + 'run_time.' + kernel_name + '.' + str(num_el_alp) + '.pkl', 'wb')) | |||
| pickle.dump(run_time, open(save_dir + 'run_time.' + kernel_name + '.' + str(num_el_alp) + '.pkl', 'wb')) | |||
| # Save all. | |||
| pickle.dump(run_times, open(save_dir + 'run_times.pkl', 'wb')) | |||
| @@ -6,6 +6,7 @@ Created on Mon Sep 21 10:34:26 2020 | |||
| @author: ljia | |||
| """ | |||
| from utils import Graph_Kernel_List_VSym, compute_graph_kernel | |||
| import logging | |||
| def generate_graphs(num_nl_alp): | |||
| @@ -39,10 +40,18 @@ def xp_synthesied_graphs_num_node_label_alphabet(): | |||
| graphs = generate_graphs(num_nl_alp) | |||
| # Compute Gram matrix. | |||
| gram_matrix, run_time = compute_graph_kernel(graphs, kernel_name) | |||
| run_times[kernel_name].append(run_time) | |||
| try: | |||
| gram_matrix, run_time = compute_graph_kernel(graphs, kernel_name, n_jobs=1) | |||
| run_times[kernel_name].append(run_time) | |||
| except Exception as exp: | |||
| run_times[kernel_name].append('error') | |||
| print('An exception occured when running this experiment:') | |||
| LOG_FILENAME = save_dir + 'error.txt' | |||
| logging.basicConfig(filename=LOG_FILENAME, level=logging.DEBUG) | |||
| logging.exception('') | |||
| print(repr(exp)) | |||
| pickle.dump(run_times, open(save_dir + 'run_time.' + kernel_name + '.' + str(num_nl_alp) + '.pkl', 'wb')) | |||
| pickle.dump(run_time, open(save_dir + 'run_time.' + kernel_name + '.' + str(num_nl_alp) + '.pkl', 'wb')) | |||
| # Save all. | |||
| pickle.dump(run_times, open(save_dir + 'run_times.pkl', 'wb')) | |||
| @@ -6,6 +6,7 @@ Created on Mon Sep 21 10:34:26 2020 | |||
| @author: ljia | |||
| """ | |||
| from utils import Graph_Kernel_List, compute_graph_kernel | |||
| import logging | |||
| def generate_graphs(num_nodes): | |||
| @@ -39,10 +40,18 @@ def xp_synthesied_graphs_num_nodes(): | |||
| graphs = generate_graphs(num_nodes) | |||
| # Compute Gram matrix. | |||
| gram_matrix, run_time = compute_graph_kernel(graphs, kernel_name) | |||
| run_times[kernel_name].append(run_time) | |||
| try: | |||
| gram_matrix, run_time = compute_graph_kernel(graphs, kernel_name, n_jobs=1) | |||
| run_times[kernel_name].append(run_time) | |||
| except Exception as exp: | |||
| run_times[kernel_name].append('error') | |||
| print('An exception occured when running this experiment:') | |||
| LOG_FILENAME = save_dir + 'error.txt' | |||
| logging.basicConfig(filename=LOG_FILENAME, level=logging.DEBUG) | |||
| logging.exception('') | |||
| print(repr(exp)) | |||
| pickle.dump(run_times, open(save_dir + 'run_time.' + kernel_name + '.' + str(num_nodes) + '.pkl', 'wb')) | |||
| pickle.dump(run_time, open(save_dir + 'run_time.' + kernel_name + '.' + str(num_nodes) + '.pkl', 'wb')) | |||
| # Save all. | |||
| pickle.dump(run_times, open(save_dir + 'run_times.pkl', 'wb')) | |||
| @@ -5,6 +5,9 @@ Created on Tue Sep 22 11:33:28 2020 | |||
| @author: ljia | |||
| """ | |||
| import multiprocessing | |||
| Graph_Kernel_List = ['PathUpToH', 'WLSubtree', 'SylvesterEquation', 'Marginalized', 'ShortestPath', 'Treelet', 'ConjugateGradient', 'FixedPoint', 'SpectralDecomposition', 'StructuralSP', 'CommonWalk'] | |||
| # Graph_Kernel_List = ['CommonWalk', 'Marginalized', 'SylvesterEquation', 'ConjugateGradient', 'FixedPoint', 'SpectralDecomposition', 'ShortestPath', 'StructuralSP', 'PathUpToH', 'Treelet', 'WLSubtree'] | |||
| @@ -21,8 +24,10 @@ Graph_Kernel_List_VCon = ['ShortestPath', 'ConjugateGradient', 'FixedPoint', 'St | |||
| Graph_Kernel_List_ECon = ['ConjugateGradient', 'FixedPoint', 'StructuralSP'] | |||
| def compute_graph_kernel(graphs, kernel_name): | |||
| import multiprocessing | |||
| Dataset_List = ['Alkane', 'Acyclic', 'MAO', 'PAH', 'MUTAG', 'Letter-med', 'ENZYMES', 'AIDS', 'NCI1', 'NCI109', 'DD'] | |||
| def compute_graph_kernel(graphs, kernel_name, n_jobs=multiprocessing.cpu_count()): | |||
| if kernel_name == 'CommonWalk': | |||
| from gklearn.kernels.commonWalkKernel import commonwalkkernel | |||
| @@ -99,7 +104,7 @@ def compute_graph_kernel(graphs, kernel_name): | |||
| params = {'base_kernel': 'subtree', 'height': 5} | |||
| # params['parallel'] = None | |||
| params['n_jobs'] = multiprocessing.cpu_count() | |||
| params['n_jobs'] = n_jobs | |||
| params['verbose'] = True | |||
| results = estimator(graphs, **params) | |||