You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

main.py 8.4 kB

2 years ago
2 years ago
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197
  1. import os
  2. import fire
  3. import copy
  4. import joblib
  5. import zipfile
  6. import numpy as np
  7. from sklearn import svm
  8. from sklearn.datasets import load_digits
  9. from sklearn.model_selection import train_test_split
  10. from shutil import copyfile, rmtree
  11. import learnware
  12. from learnware.market import instantiate_learnware_market, BaseUserInfo
  13. from learnware.reuse import JobSelectorReuser, AveragingReuser
  14. from learnware.specification import generate_rkme_table_spec, RKMETableSpecification
  15. curr_root = os.path.dirname(os.path.abspath(__file__))
  16. user_semantic = {
  17. "Data": {"Values": ["Table"], "Type": "Class"},
  18. "Task": {
  19. "Values": ["Classification"],
  20. "Type": "Class",
  21. },
  22. "Library": {"Values": ["Scikit-learn"], "Type": "Class"},
  23. "Scenario": {"Values": ["Education"], "Type": "Tag"},
  24. "Description": {"Values": "", "Type": "String"},
  25. "Name": {"Values": "", "Type": "String"},
  26. }
  27. class LearnwareMarketWorkflow:
  28. def _init_learnware_market(self):
  29. """initialize learnware market"""
  30. learnware.init()
  31. np.random.seed(2023)
  32. easy_market = instantiate_learnware_market(market_id="sklearn_digits", name="easy", rebuild=True)
  33. return easy_market
  34. def prepare_learnware_randomly(self, learnware_num=5):
  35. self.zip_path_list = []
  36. X, y = load_digits(return_X_y=True)
  37. for i in range(learnware_num):
  38. dir_path = os.path.join(curr_root, "learnware_pool", "svm_%d" % (i))
  39. os.makedirs(dir_path, exist_ok=True)
  40. print("Preparing Learnware: %d" % (i))
  41. data_X, _, data_y, _ = train_test_split(X, y, test_size=0.3, shuffle=True)
  42. clf = svm.SVC(kernel="linear", probability=True)
  43. clf.fit(data_X, data_y)
  44. joblib.dump(clf, os.path.join(dir_path, "svm.pkl"))
  45. spec = generate_rkme_table_spec(X=data_X, gamma=0.1, cuda_idx=0)
  46. spec.save(os.path.join(dir_path, "svm.json"))
  47. init_file = os.path.join(dir_path, "__init__.py")
  48. copyfile(
  49. os.path.join(curr_root, "learnware_example/example_init.py"), init_file
  50. ) # cp example_init.py init_file
  51. yaml_file = os.path.join(dir_path, "learnware.yaml")
  52. copyfile(os.path.join(curr_root, "learnware_example/example.yaml"), yaml_file) # cp example.yaml yaml_file
  53. zip_file = dir_path + ".zip"
  54. # zip -q -r -j zip_file dir_path
  55. with zipfile.ZipFile(zip_file, "w") as zip_obj:
  56. for foldername, subfolders, filenames in os.walk(dir_path):
  57. for filename in filenames:
  58. file_path = os.path.join(foldername, filename)
  59. zip_info = zipfile.ZipInfo(filename)
  60. zip_info.compress_type = zipfile.ZIP_STORED
  61. with open(file_path, "rb") as file:
  62. zip_obj.writestr(zip_info, file.read())
  63. rmtree(dir_path) # rm -r dir_path
  64. self.zip_path_list.append(zip_file)
  65. def test_upload_delete_learnware(self, learnware_num=5, delete=False):
  66. easy_market = self._init_learnware_market()
  67. self.prepare_learnware_randomly(learnware_num)
  68. print("Total Item:", len(easy_market))
  69. for idx, zip_path in enumerate(self.zip_path_list):
  70. semantic_spec = copy.deepcopy(user_semantic)
  71. semantic_spec["Name"]["Values"] = "learnware_%d" % (idx)
  72. semantic_spec["Description"]["Values"] = "test_learnware_number_%d" % (idx)
  73. easy_market.add_learnware(zip_path, semantic_spec)
  74. print("Total Item:", len(easy_market))
  75. curr_inds = easy_market.get_learnware_ids()
  76. print("Available ids After Uploading Learnwares:", curr_inds)
  77. if delete:
  78. for learnware_id in curr_inds:
  79. easy_market.delete_learnware(learnware_id)
  80. curr_inds = easy_market.get_learnware_ids()
  81. print("Available ids After Deleting Learnwares:", curr_inds)
  82. return easy_market
  83. def test_search_semantics(self, learnware_num=5):
  84. easy_market = self.test_upload_delete_learnware(learnware_num, delete=False)
  85. print("Total Item:", len(easy_market))
  86. test_folder = os.path.join(curr_root, "test_semantics")
  87. # unzip -o -q zip_path -d unzip_dir
  88. if os.path.exists(test_folder):
  89. rmtree(test_folder)
  90. os.makedirs(test_folder, exist_ok=True)
  91. with zipfile.ZipFile(self.zip_path_list[0], "r") as zip_obj:
  92. zip_obj.extractall(path=test_folder)
  93. semantic_spec = copy.deepcopy(user_semantic)
  94. semantic_spec["Name"]["Values"] = f"learnware_{learnware_num - 1}"
  95. semantic_spec["Description"]["Values"] = f"test_learnware_number_{learnware_num - 1}"
  96. user_info = BaseUserInfo(semantic_spec=semantic_spec)
  97. _, single_learnware_list, _, _ = easy_market.search_learnware(user_info)
  98. print("User info:", user_info.get_semantic_spec())
  99. print(f"Search result:")
  100. for learnware in single_learnware_list:
  101. print("Choose learnware:", learnware.id, learnware.get_specification().get_semantic_spec())
  102. rmtree(test_folder) # rm -r test_folder
  103. def test_stat_search(self, learnware_num=5):
  104. easy_market = self.test_upload_delete_learnware(learnware_num, delete=False)
  105. print("Total Item:", len(easy_market))
  106. test_folder = os.path.join(curr_root, "test_stat")
  107. for idx, zip_path in enumerate(self.zip_path_list):
  108. unzip_dir = os.path.join(test_folder, f"{idx}")
  109. # unzip -o -q zip_path -d unzip_dir
  110. if os.path.exists(unzip_dir):
  111. rmtree(unzip_dir)
  112. os.makedirs(unzip_dir, exist_ok=True)
  113. with zipfile.ZipFile(zip_path, "r") as zip_obj:
  114. zip_obj.extractall(path=unzip_dir)
  115. user_spec = RKMETableSpecification()
  116. user_spec.load(os.path.join(unzip_dir, "svm.json"))
  117. user_info = BaseUserInfo(semantic_spec=user_semantic, stat_info={"RKMETableSpecification": user_spec})
  118. (
  119. sorted_score_list,
  120. single_learnware_list,
  121. mixture_score,
  122. mixture_learnware_list,
  123. ) = easy_market.search_learnware(user_info)
  124. print(f"search result of user{idx}:")
  125. for score, learnware in zip(sorted_score_list, single_learnware_list):
  126. print(f"score: {score}, learnware_id: {learnware.id}")
  127. print(f"mixture_score: {mixture_score}\n")
  128. mixture_id = " ".join([learnware.id for learnware in mixture_learnware_list])
  129. print(f"mixture_learnware: {mixture_id}\n")
  130. rmtree(test_folder) # rm -r test_folder
  131. def test_learnware_reuse(self, learnware_num=5):
  132. easy_market = self.test_upload_delete_learnware(learnware_num, delete=False)
  133. print("Total Item:", len(easy_market))
  134. X, y = load_digits(return_X_y=True)
  135. _, data_X, _, data_y = train_test_split(X, y, test_size=0.3, shuffle=True)
  136. stat_spec = generate_rkme_table_spec(X=data_X, gamma=0.1, cuda_idx=0)
  137. user_info = BaseUserInfo(semantic_spec=user_semantic, stat_info={"RKMETableSpecification": stat_spec})
  138. _, _, _, mixture_learnware_list = easy_market.search_learnware(user_info)
  139. # print("Mixture Learnware:", mixture_learnware_list)
  140. # Based on user information, the learnware market returns a list of learnwares (learnware_list)
  141. # Use jobselector reuser to reuse the searched learnwares to make prediction
  142. reuse_job_selector = JobSelectorReuser(learnware_list=mixture_learnware_list)
  143. job_selector_predict_y = reuse_job_selector.predict(user_data=data_X)
  144. # Use averaging ensemble reuser to reuse the searched learnwares to make prediction
  145. reuse_ensemble = AveragingReuser(learnware_list=mixture_learnware_list)
  146. ensemble_predict_y = reuse_ensemble.predict(user_data=data_X)
  147. print("Job Selector Acc:", np.sum(np.argmax(job_selector_predict_y, axis=1) == data_y) / len(data_y))
  148. print("Averaging Selector Acc:", np.sum(np.argmax(ensemble_predict_y, axis=1) == data_y) / len(data_y))
  149. if __name__ == "__main__":
  150. fire.Fire(LearnwareMarketWorkflow)