You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

TextClassificationTrain.cs 13 kB

6 years ago
6 years ago
6 years ago
6 years ago
6 years ago
6 years ago
6 years ago
6 years ago
6 years ago
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289
  1. using System;
  2. using System.Collections;
  3. using System.Collections.Generic;
  4. using System.Diagnostics;
  5. using System.IO;
  6. using System.Linq;
  7. using System.Text;
  8. using NumSharp;
  9. using Tensorflow;
  10. using Tensorflow.Keras.Engine;
  11. using Tensorflow.Sessions;
  12. using TensorFlowNET.Examples.Text.cnn_models;
  13. using TensorFlowNET.Examples.TextClassification;
  14. using TensorFlowNET.Examples.Utility;
  15. using static Tensorflow.Python;
  16. namespace TensorFlowNET.Examples.CnnTextClassification
  17. {
  18. /// <summary>
  19. /// https://github.com/dongjun-Lee/text-classification-models-tf
  20. /// </summary>
  21. public class TextClassificationTrain : IExample
  22. {
  23. public int Priority => 100;
  24. public bool Enabled { get; set; } = false;
  25. public string Name => "Text Classification";
  26. public int? DataLimit = null;
  27. public bool ImportGraph { get; set; } = true;
  28. public bool UseSubset = true; // <----- set this true to use a limited subset of dbpedia
  29. private string dataDir = "text_classification";
  30. private string dataFileName = "dbpedia_csv.tar.gz";
  31. public string model_name = "vd_cnn"; // word_cnn | char_cnn | vd_cnn | word_rnn | att_rnn | rcnn
  32. private const string TRAIN_PATH = "text_classification/dbpedia_csv/train.csv";
  33. private const string SUBSET_PATH = "text_classification/dbpedia_csv/dbpedia_6400.csv";
  34. private const string TEST_PATH = "text_classification/dbpedia_csv/test.csv";
  35. private const int CHAR_MAX_LEN = 1014;
  36. private const int WORD_MAX_LEN = 1014;
  37. private const int NUM_CLASS = 14;
  38. private const int BATCH_SIZE = 64;
  39. private const int NUM_EPOCHS = 10;
  40. protected float loss_value = 0;
  41. public bool Run()
  42. {
  43. PrepareData();
  44. var graph = tf.Graph().as_default();
  45. return with(tf.Session(graph), sess =>
  46. {
  47. if (ImportGraph)
  48. return RunWithImportedGraph(sess, graph);
  49. else
  50. return RunWithBuiltGraph(sess, graph);
  51. });
  52. }
  53. protected virtual bool RunWithImportedGraph(Session sess, Graph graph)
  54. {
  55. var stopwatch = Stopwatch.StartNew();
  56. Console.WriteLine("Building dataset...");
  57. var path = UseSubset ? SUBSET_PATH : TRAIN_PATH;
  58. var (x, y, alphabet_size) = DataHelpers.build_char_dataset(path, model_name, CHAR_MAX_LEN, DataLimit = null, shuffle:!UseSubset);
  59. Console.WriteLine("\tDONE ");
  60. var (train_x, valid_x, train_y, valid_y) = train_test_split(x, y, test_size: 0.15f);
  61. Console.WriteLine("Training set size: " + train_x.len);
  62. Console.WriteLine("Test set size: " + valid_x.len);
  63. Console.WriteLine("Import graph...");
  64. var meta_file = model_name + ".meta";
  65. tf.train.import_meta_graph(Path.Join("graph", meta_file));
  66. Console.WriteLine("\tDONE " + stopwatch.Elapsed);
  67. sess.run(tf.global_variables_initializer());
  68. var train_batches = batch_iter(train_x, train_y, BATCH_SIZE, NUM_EPOCHS);
  69. var num_batches_per_epoch = (len(train_x) - 1) / BATCH_SIZE + 1;
  70. double max_accuracy = 0;
  71. Tensor is_training = graph.get_tensor_by_name("is_training:0");
  72. Tensor model_x = graph.get_tensor_by_name("x:0");
  73. Tensor model_y = graph.get_tensor_by_name("y:0");
  74. Tensor loss = graph.get_tensor_by_name("loss/value:0");
  75. Tensor optimizer = graph.get_tensor_by_name("loss/optimizer:0");
  76. Tensor global_step = graph.get_tensor_by_name("global_step:0");
  77. Tensor accuracy = graph.get_tensor_by_name("accuracy/value:0");
  78. stopwatch = Stopwatch.StartNew();
  79. int i = 0;
  80. foreach (var (x_batch, y_batch, total) in train_batches)
  81. {
  82. i++;
  83. var train_feed_dict = new FeedDict
  84. {
  85. [model_x] = x_batch,
  86. [model_y] = y_batch,
  87. [is_training] = true,
  88. };
  89. //Console.WriteLine("x: " + x_batch.ToString() + "\n");
  90. //Console.WriteLine("y: " + y_batch.ToString());
  91. // original python:
  92. //_, step, loss = sess.run([model.optimizer, model.global_step, model.loss], feed_dict = train_feed_dict)
  93. var result = sess.run(new ITensorOrOperation[] { optimizer, global_step, loss }, train_feed_dict);
  94. loss_value = result[2];
  95. var step = (int)result[1];
  96. if (step % 10 == 0 || step < 10)
  97. {
  98. var estimate = TimeSpan.FromSeconds((stopwatch.Elapsed.TotalSeconds / i) * total);
  99. Console.WriteLine($"Training on batch {i}/{total}. Estimated training time: {estimate}");
  100. Console.WriteLine($"Step {step} loss: {loss_value}");
  101. }
  102. if (step % 100 == 0)
  103. {
  104. // # Test accuracy with validation data for each epoch.
  105. var valid_batches = batch_iter(valid_x, valid_y, BATCH_SIZE, 1);
  106. var (sum_accuracy, cnt) = (0.0f, 0);
  107. foreach (var (valid_x_batch, valid_y_batch, total_validation_batches) in valid_batches)
  108. {
  109. var valid_feed_dict = new FeedDict
  110. {
  111. [model_x] = valid_x_batch,
  112. [model_y] = valid_y_batch,
  113. [is_training] = false
  114. };
  115. var result1 = sess.run(accuracy, valid_feed_dict);
  116. float accuracy_value = result1;
  117. sum_accuracy += accuracy_value;
  118. cnt += 1;
  119. }
  120. var valid_accuracy = sum_accuracy / cnt;
  121. print($"\nValidation Accuracy = {valid_accuracy}\n");
  122. // # Save model
  123. // if valid_accuracy > max_accuracy:
  124. // max_accuracy = valid_accuracy
  125. // saver.save(sess, "{0}/{1}.ckpt".format(args.model, args.model), global_step = step)
  126. // print("Model is saved.\n")
  127. }
  128. }
  129. return false;
  130. }
  131. protected virtual bool RunWithBuiltGraph(Session session, Graph graph)
  132. {
  133. Console.WriteLine("Building dataset...");
  134. var (x, y, alphabet_size) = DataHelpers.build_char_dataset("train", model_name, CHAR_MAX_LEN, DataLimit);
  135. var (train_x, valid_x, train_y, valid_y) = train_test_split(x, y, test_size: 0.15f);
  136. ITextClassificationModel model = null;
  137. switch (model_name) // word_cnn | char_cnn | vd_cnn | word_rnn | att_rnn | rcnn
  138. {
  139. case "word_cnn":
  140. case "char_cnn":
  141. case "word_rnn":
  142. case "att_rnn":
  143. case "rcnn":
  144. throw new NotImplementedException();
  145. break;
  146. case "vd_cnn":
  147. model = new VdCnn(alphabet_size, CHAR_MAX_LEN, NUM_CLASS);
  148. break;
  149. }
  150. // todo train the model
  151. return false;
  152. }
  153. // TODO: this originally is an SKLearn utility function. it randomizes train and test which we don't do here
  154. private (NDArray, NDArray, NDArray, NDArray) train_test_split(NDArray x, NDArray y, float test_size = 0.3f)
  155. {
  156. Console.WriteLine("Splitting in Training and Testing data...");
  157. int len = x.shape[0];
  158. //int classes = y.Data<int>().Distinct().Count();
  159. //int samples = len / classes;
  160. int train_size = (int)Math.Round(len * (1 - test_size));
  161. var train_x = x[new Slice(stop: train_size), new Slice()];
  162. var valid_x = x[new Slice(start: train_size + 1), new Slice()];
  163. var train_y = y[new Slice(stop: train_size)];
  164. var valid_y = y[new Slice(start: train_size + 1)];
  165. Console.WriteLine("\tDONE");
  166. return (train_x, valid_x, train_y, valid_y);
  167. }
  168. //private (int[][], int[][], int[], int[]) train_test_split(int[][] x, int[] y, float test_size = 0.3f)
  169. //{
  170. // Console.WriteLine("Splitting in Training and Testing data...");
  171. // var stopwatch = Stopwatch.StartNew();
  172. // int len = x.Length;
  173. // int train_size = int.Parse((len * (1 - test_size)).ToString());
  174. // var random = new Random(17);
  175. // // we collect indices of labels
  176. // var labels = new Dictionary<int, HashSet<int>>();
  177. // var shuffled_indices = random.Shuffle<int>(range(len).ToArray());
  178. // foreach (var i in shuffled_indices)
  179. // {
  180. // var label = y[i];
  181. // if (!labels.ContainsKey(i))
  182. // labels[label] = new HashSet<int>();
  183. // labels[label].Add(i);
  184. // }
  185. // var train_x = new int[train_size][];
  186. // var valid_x = new int[len - train_size][];
  187. // var train_y = new int[train_size];
  188. // var valid_y = new int[len - train_size];
  189. // FillWithShuffledLabels(x, y, train_x, train_y, random, labels);
  190. // FillWithShuffledLabels(x, y, valid_x, valid_y, random, labels);
  191. // Console.WriteLine("\tDONE " + stopwatch.Elapsed);
  192. // return (train_x, valid_x, train_y, valid_y);
  193. //}
  194. private static void FillWithShuffledLabels(int[][] x, int[] y, int[][] shuffled_x, int[] shuffled_y, Random random, Dictionary<int, HashSet<int>> labels)
  195. {
  196. int i = 0;
  197. var label_keys = labels.Keys.ToArray();
  198. while (i < shuffled_x.Length)
  199. {
  200. var key = label_keys[random.Next(label_keys.Length)];
  201. var set = labels[key];
  202. var index = set.First();
  203. if (set.Count == 0)
  204. {
  205. labels.Remove(key); // remove the set as it is empty
  206. label_keys = labels.Keys.ToArray();
  207. }
  208. shuffled_x[i] = x[index];
  209. shuffled_y[i] = y[index];
  210. i++;
  211. }
  212. }
  213. private IEnumerable<(NDArray, NDArray, int)> batch_iter(NDArray inputs, NDArray outputs, int batch_size, int num_epochs)
  214. {
  215. var num_batches_per_epoch = (len(inputs) - 1) / batch_size + 1;
  216. var total_batches = num_batches_per_epoch * num_epochs;
  217. foreach (var epoch in range(num_epochs))
  218. {
  219. foreach (var batch_num in range(num_batches_per_epoch))
  220. {
  221. var start_index = batch_num * batch_size;
  222. var end_index = Math.Min((batch_num + 1) * batch_size, len(inputs));
  223. if (end_index <= start_index)
  224. break;
  225. yield return (inputs[new Slice(start_index, end_index)], outputs[new Slice(start_index, end_index)], total_batches);
  226. }
  227. }
  228. }
  229. public void PrepareData()
  230. {
  231. if (UseSubset)
  232. {
  233. var url = "https://raw.githubusercontent.com/SciSharp/TensorFlow.NET/master/data/dbpedia_subset.zip";
  234. Web.Download(url, dataDir, "dbpedia_subset.zip");
  235. Compress.UnZip(Path.Combine(dataDir, "dbpedia_subset.zip"), Path.Combine(dataDir, "dbpedia_csv"));
  236. }
  237. else
  238. {
  239. string url = "https://github.com/le-scientifique/torchDatasets/raw/master/dbpedia_csv.tar.gz";
  240. Web.Download(url, dataDir, dataFileName);
  241. Compress.ExtractTGZ(Path.Join(dataDir, dataFileName), dataDir);
  242. }
  243. if (ImportGraph)
  244. {
  245. // download graph meta data
  246. var meta_file = model_name + ".meta";
  247. var meta_path = Path.Combine("graph", meta_file);
  248. if (File.GetLastWriteTime(meta_path) < new DateTime(2019, 05, 11))
  249. {
  250. // delete old cached file which contains errors
  251. Console.WriteLine("Discarding cached file: " + meta_path);
  252. File.Delete(meta_path);
  253. }
  254. var url = "https://raw.githubusercontent.com/SciSharp/TensorFlow.NET/master/graph/" + meta_file;
  255. Web.Download(url, "graph", meta_file);
  256. }
  257. }
  258. }
  259. }

tensorflow框架的.NET版本,提供了丰富的特性和API,可以借此很方便地在.NET平台下搭建深度学习训练与推理流程。