You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

CnnTextClassification.cs 10 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260
  1. using System;
  2. using System.Collections;
  3. using System.Collections.Generic;
  4. using System.Diagnostics;
  5. using System.IO;
  6. using System.Linq;
  7. using System.Text;
  8. using NumSharp;
  9. using Tensorflow;
  10. using Tensorflow.Keras.Engine;
  11. using Tensorflow.Sessions;
  12. using TensorFlowNET.Examples.Text.cnn_models;
  13. using TensorFlowNET.Examples.TextClassification;
  14. using TensorFlowNET.Examples.Utility;
  15. using static Tensorflow.Python;
  16. namespace TensorFlowNET.Examples
  17. {
  18. /// <summary>
  19. /// https://github.com/dongjun-Lee/text-classification-models-tf
  20. /// </summary>
  21. public class CnnTextClassification : IExample
  22. {
  23. public bool Enabled { get; set; } = true;
  24. public string Name => "CNN Text Classification";
  25. public int? DataLimit = null;
  26. public bool IsImportingGraph { get; set; } = false;
  27. private string dataDir = "word_cnn";
  28. private string dataFileName = "dbpedia_csv.tar.gz";
  29. private const string TRAIN_PATH = "text_classification/dbpedia_csv/train.csv";
  30. private const string TEST_PATH = "text_classification/dbpedia_csv/test.csv";
  31. private const int NUM_CLASS = 14;
  32. private const int BATCH_SIZE = 64;
  33. private const int NUM_EPOCHS = 10;
  34. private const int WORD_MAX_LEN = 100;
  35. private const int CHAR_MAX_LEN = 1014;
  36. protected float loss_value = 0;
  37. int vocabulary_size = 50000;
  38. public bool Run()
  39. {
  40. PrepareData();
  41. var graph = tf.Graph().as_default();
  42. return with(tf.Session(graph), sess =>
  43. {
  44. if (IsImportingGraph)
  45. return RunWithImportedGraph(sess, graph);
  46. else
  47. return RunWithBuiltGraph(sess, graph);
  48. });
  49. }
  50. protected virtual bool RunWithImportedGraph(Session sess, Graph graph)
  51. {
  52. var stopwatch = Stopwatch.StartNew();
  53. Console.WriteLine("Building dataset...");
  54. int[][] x = null;
  55. int[] y = null;
  56. int alphabet_size = 0;
  57. var word_dict = DataHelpers.build_word_dict(TRAIN_PATH);
  58. // vocabulary_size = len(word_dict);
  59. (x, y) = DataHelpers.build_word_dataset(TRAIN_PATH, word_dict, WORD_MAX_LEN);
  60. Console.WriteLine("\tDONE ");
  61. var (train_x, valid_x, train_y, valid_y) = train_test_split(x, y, test_size: 0.15f);
  62. Console.WriteLine("Training set size: " + train_x.len);
  63. Console.WriteLine("Test set size: " + valid_x.len);
  64. Console.WriteLine("Import graph...");
  65. var meta_file = "word_cnn.meta";
  66. tf.train.import_meta_graph(Path.Join("graph", meta_file));
  67. Console.WriteLine("\tDONE " + stopwatch.Elapsed);
  68. sess.run(tf.global_variables_initializer());
  69. var saver = tf.train.Saver(tf.global_variables());
  70. var train_batches = batch_iter(train_x, train_y, BATCH_SIZE, NUM_EPOCHS);
  71. var num_batches_per_epoch = (len(train_x) - 1) / BATCH_SIZE + 1;
  72. double max_accuracy = 0;
  73. Tensor is_training = graph.OperationByName("is_training");
  74. Tensor model_x = graph.OperationByName("x");
  75. Tensor model_y = graph.OperationByName("y");
  76. Tensor loss = graph.OperationByName("loss/Mean");
  77. Operation optimizer = graph.OperationByName("loss/Adam");
  78. Tensor global_step = graph.OperationByName("Variable");
  79. Tensor accuracy = graph.OperationByName("accuracy/accuracy");
  80. stopwatch = Stopwatch.StartNew();
  81. int i = 0;
  82. foreach (var (x_batch, y_batch, total) in train_batches)
  83. {
  84. i++;
  85. var train_feed_dict = new FeedDict
  86. {
  87. [model_x] = x_batch,
  88. [model_y] = y_batch,
  89. [is_training] = true,
  90. };
  91. var result = sess.run(new ITensorOrOperation[] { optimizer, global_step, loss }, train_feed_dict);
  92. loss_value = result[2];
  93. var step = (int)result[1];
  94. if (step % 10 == 0)
  95. {
  96. var estimate = TimeSpan.FromSeconds((stopwatch.Elapsed.TotalSeconds / i) * total);
  97. Console.WriteLine($"Training on batch {i}/{total} loss: {loss_value}. Estimated training time: {estimate}");
  98. }
  99. if (step % 100 == 0)
  100. {
  101. // Test accuracy with validation data for each epoch.
  102. var valid_batches = batch_iter(valid_x, valid_y, BATCH_SIZE, 1);
  103. var (sum_accuracy, cnt) = (0.0f, 0);
  104. foreach (var (valid_x_batch, valid_y_batch, total_validation_batches) in valid_batches)
  105. {
  106. var valid_feed_dict = new FeedDict
  107. {
  108. [model_x] = valid_x_batch,
  109. [model_y] = valid_y_batch,
  110. [is_training] = false
  111. };
  112. var result1 = sess.run(accuracy, valid_feed_dict);
  113. float accuracy_value = result1;
  114. sum_accuracy += accuracy_value;
  115. cnt += 1;
  116. }
  117. var valid_accuracy = sum_accuracy / cnt;
  118. print($"\nValidation Accuracy = {valid_accuracy}\n");
  119. // Save model
  120. if (valid_accuracy > max_accuracy)
  121. {
  122. max_accuracy = valid_accuracy;
  123. saver.save(sess, $"{dataDir}/word_cnn.ckpt", global_step: step);
  124. print("Model is saved.\n");
  125. }
  126. }
  127. }
  128. return false;
  129. }
  130. protected virtual bool RunWithBuiltGraph(Session session, Graph graph)
  131. {
  132. Console.WriteLine("Building dataset...");
  133. var (x, y, alphabet_size) = DataHelpers.build_char_dataset("train", "word_cnn", CHAR_MAX_LEN, DataLimit);
  134. var (train_x, valid_x, train_y, valid_y) = train_test_split(x, y, test_size: 0.15f);
  135. ITextClassificationModel model = null;
  136. // todo train the model
  137. return false;
  138. }
  139. // TODO: this originally is an SKLearn utility function. it randomizes train and test which we don't do here
  140. private (NDArray, NDArray, NDArray, NDArray) train_test_split(NDArray x, NDArray y, float test_size = 0.3f)
  141. {
  142. Console.WriteLine("Splitting in Training and Testing data...");
  143. int len = x.shape[0];
  144. //int classes = y.Data<int>().Distinct().Count();
  145. //int samples = len / classes;
  146. int train_size = (int)Math.Round(len * (1 - test_size));
  147. var train_x = x[new Slice(stop: train_size), new Slice()];
  148. var valid_x = x[new Slice(start: train_size), new Slice()];
  149. var train_y = y[new Slice(stop: train_size)];
  150. var valid_y = y[new Slice(start: train_size)];
  151. Console.WriteLine("\tDONE");
  152. return (train_x, valid_x, train_y, valid_y);
  153. }
  154. private static void FillWithShuffledLabels(int[][] x, int[] y, int[][] shuffled_x, int[] shuffled_y, Random random, Dictionary<int, HashSet<int>> labels)
  155. {
  156. int i = 0;
  157. var label_keys = labels.Keys.ToArray();
  158. while (i < shuffled_x.Length)
  159. {
  160. var key = label_keys[random.Next(label_keys.Length)];
  161. var set = labels[key];
  162. var index = set.First();
  163. if (set.Count == 0)
  164. {
  165. labels.Remove(key); // remove the set as it is empty
  166. label_keys = labels.Keys.ToArray();
  167. }
  168. shuffled_x[i] = x[index];
  169. shuffled_y[i] = y[index];
  170. i++;
  171. }
  172. }
  173. private IEnumerable<(NDArray, NDArray, int)> batch_iter(NDArray inputs, NDArray outputs, int batch_size, int num_epochs)
  174. {
  175. var num_batches_per_epoch = (len(inputs) - 1) / batch_size + 1;
  176. var total_batches = num_batches_per_epoch * num_epochs;
  177. foreach (var epoch in range(num_epochs))
  178. {
  179. foreach (var batch_num in range(num_batches_per_epoch))
  180. {
  181. var start_index = batch_num * batch_size;
  182. var end_index = Math.Min((batch_num + 1) * batch_size, len(inputs));
  183. if (end_index <= start_index)
  184. break;
  185. yield return (inputs[new Slice(start_index, end_index)], outputs[new Slice(start_index, end_index)], total_batches);
  186. }
  187. }
  188. }
  189. public void PrepareData()
  190. {
  191. // full dataset https://github.com/le-scientifique/torchDatasets/raw/master/dbpedia_csv.tar.gz
  192. var url = "https://raw.githubusercontent.com/SciSharp/TensorFlow.NET/master/data/dbpedia_subset.zip";
  193. Web.Download(url, dataDir, "dbpedia_subset.zip");
  194. Compress.UnZip(Path.Combine(dataDir, "dbpedia_subset.zip"), Path.Combine(dataDir, "dbpedia_csv"));
  195. if (IsImportingGraph)
  196. {
  197. // download graph meta data
  198. var meta_file = "word_cnn.meta";
  199. var meta_path = Path.Combine("graph", meta_file);
  200. if (File.GetLastWriteTime(meta_path) < new DateTime(2019, 05, 11))
  201. {
  202. // delete old cached file which contains errors
  203. Console.WriteLine("Discarding cached file: " + meta_path);
  204. File.Delete(meta_path);
  205. }
  206. url = "https://raw.githubusercontent.com/SciSharp/TensorFlow.NET/master/graph/" + meta_file;
  207. Web.Download(url, "graph", meta_file);
  208. }
  209. }
  210. public Graph ImportGraph()
  211. {
  212. throw new NotImplementedException();
  213. }
  214. public Graph BuildGraph()
  215. {
  216. throw new NotImplementedException();
  217. }
  218. public bool Train()
  219. {
  220. throw new NotImplementedException();
  221. }
  222. public bool Predict()
  223. {
  224. throw new NotImplementedException();
  225. }
  226. }
  227. }