You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

CnnTextClassification.cs 13 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321
  1. using System;
  2. using System.Collections;
  3. using System.Collections.Generic;
  4. using System.Diagnostics;
  5. using System.IO;
  6. using System.Linq;
  7. using System.Text;
  8. using Newtonsoft.Json;
  9. using NumSharp;
  10. using Tensorflow;
  11. using Tensorflow.Sessions;
  12. using TensorFlowNET.Examples.Utility;
  13. using static Tensorflow.Python;
  14. namespace TensorFlowNET.Examples
  15. {
  16. /// <summary>
  17. /// https://github.com/dongjun-Lee/text-classification-models-tf
  18. /// </summary>
  19. public class CnnTextClassification : IExample
  20. {
  21. public bool Enabled { get; set; } = true;
  22. public string Name => "CNN Text Classification";
  23. public int? DataLimit = null;
  24. public bool IsImportingGraph { get; set; } = true;
  25. private const string dataDir = "word_cnn";
  26. private string dataFileName = "dbpedia_csv.tar.gz";
  27. private const string TRAIN_PATH = "word_cnn/dbpedia_csv/train.csv";
  28. private const string TEST_PATH = "word_cnn/dbpedia_csv/test.csv";
  29. private const int NUM_CLASS = 14;
  30. private const int BATCH_SIZE = 64;
  31. private const int NUM_EPOCHS = 10;
  32. private const int WORD_MAX_LEN = 100;
  33. private const int CHAR_MAX_LEN = 1014;
  34. protected float loss_value = 0;
  35. int vocabulary_size = 50000;
  36. NDArray train_x, valid_x, train_y, valid_y;
  37. public bool Run()
  38. {
  39. PrepareData();
  40. return Train();
  41. }
  42. // TODO: this originally is an SKLearn utility function. it randomizes train and test which we don't do here
  43. private (NDArray, NDArray, NDArray, NDArray) train_test_split(NDArray x, NDArray y, float test_size = 0.3f)
  44. {
  45. Console.WriteLine("Splitting in Training and Testing data...");
  46. int len = x.shape[0];
  47. //int classes = y.Data<int>().Distinct().Count();
  48. //int samples = len / classes;
  49. int train_size = (int)Math.Round(len * (1 - test_size));
  50. train_x = x[new Slice(stop: train_size), new Slice()];
  51. valid_x = x[new Slice(start: train_size), new Slice()];
  52. train_y = y[new Slice(stop: train_size)];
  53. valid_y = y[new Slice(start: train_size)];
  54. Console.WriteLine("\tDONE");
  55. train_x = np.Load<int[,]>(Path.Join("word_cnn", "train_x.npy"));
  56. valid_x = np.Load<int[,]>(Path.Join("word_cnn", "valid_x.npy"));
  57. train_y = np.Load<int[]>(Path.Join("word_cnn", "train_y.npy"));
  58. valid_y = np.Load<int[]>(Path.Join("word_cnn", "valid_y.npy"));
  59. return (train_x, valid_x, train_y, valid_y);
  60. }
  61. private static void FillWithShuffledLabels(int[][] x, int[] y, int[][] shuffled_x, int[] shuffled_y, Random random, Dictionary<int, HashSet<int>> labels)
  62. {
  63. int i = 0;
  64. var label_keys = labels.Keys.ToArray();
  65. while (i < shuffled_x.Length)
  66. {
  67. var key = label_keys[random.Next(label_keys.Length)];
  68. var set = labels[key];
  69. var index = set.First();
  70. if (set.Count == 0)
  71. {
  72. labels.Remove(key); // remove the set as it is empty
  73. label_keys = labels.Keys.ToArray();
  74. }
  75. shuffled_x[i] = x[index];
  76. shuffled_y[i] = y[index];
  77. i++;
  78. }
  79. }
  80. private IEnumerable<(NDArray, NDArray, int)> batch_iter(NDArray inputs, NDArray outputs, int batch_size, int num_epochs)
  81. {
  82. var num_batches_per_epoch = (len(inputs) - 1) / batch_size + 1;
  83. var total_batches = num_batches_per_epoch * num_epochs;
  84. foreach (var epoch in range(num_epochs))
  85. {
  86. foreach (var batch_num in range(num_batches_per_epoch))
  87. {
  88. var start_index = batch_num * batch_size;
  89. var end_index = Math.Min((batch_num + 1) * batch_size, len(inputs));
  90. if (end_index <= start_index)
  91. break;
  92. yield return (inputs[new Slice(start_index, end_index)], outputs[new Slice(start_index, end_index)], total_batches);
  93. }
  94. }
  95. }
  96. public void PrepareData()
  97. {
  98. // full dataset https://github.com/le-scientifique/torchDatasets/raw/master/dbpedia_csv.tar.gz
  99. var url = "https://raw.githubusercontent.com/SciSharp/TensorFlow.NET/master/data/dbpedia_subset.zip";
  100. Web.Download(url, dataDir, "dbpedia_subset.zip");
  101. Compress.UnZip(Path.Combine(dataDir, "dbpedia_subset.zip"), Path.Combine(dataDir, "dbpedia_csv"));
  102. Console.WriteLine("Building dataset...");
  103. int alphabet_size = 0;
  104. var word_dict = DataHelpers.build_word_dict(TRAIN_PATH);
  105. //vocabulary_size = len(word_dict);
  106. var (x, y) = DataHelpers.build_word_dataset(TRAIN_PATH, word_dict, WORD_MAX_LEN);
  107. Console.WriteLine("\tDONE ");
  108. var (train_x, valid_x, train_y, valid_y) = train_test_split(x, y, test_size: 0.15f);
  109. Console.WriteLine("Training set size: " + train_x.len);
  110. Console.WriteLine("Test set size: " + valid_x.len);
  111. }
  112. public Graph ImportGraph()
  113. {
  114. var graph = tf.Graph().as_default();
  115. // download graph meta data
  116. var meta_file = "word_cnn.meta";
  117. var meta_path = Path.Combine("graph", meta_file);
  118. if (File.GetLastWriteTime(meta_path) < new DateTime(2019, 05, 11))
  119. {
  120. // delete old cached file which contains errors
  121. Console.WriteLine("Discarding cached file: " + meta_path);
  122. if(File.Exists(meta_path))
  123. File.Delete(meta_path);
  124. }
  125. var url = "https://raw.githubusercontent.com/SciSharp/TensorFlow.NET/master/graph/" + meta_file;
  126. Web.Download(url, "graph", meta_file);
  127. Console.WriteLine("Import graph...");
  128. tf.train.import_meta_graph(Path.Join("graph", meta_file));
  129. Console.WriteLine("\tDONE ");
  130. return graph;
  131. }
  132. public Graph BuildGraph()
  133. {
  134. var graph = tf.Graph().as_default();
  135. var embedding_size = 128;
  136. var learning_rate = 0.001f;
  137. var filter_sizes = new int[3, 4, 5];
  138. var num_filters = 100;
  139. var document_max_len = 100;
  140. var x = tf.placeholder(tf.int32, new TensorShape(-1, document_max_len), name: "x");
  141. var y = tf.placeholder(tf.int32, new TensorShape(-1), name: "y");
  142. var is_training = tf.placeholder(tf.@bool, new TensorShape(), name: "is_training");
  143. var global_step = tf.Variable(0, trainable: false);
  144. var keep_prob = tf.where(is_training, 0.5f, 1.0f);
  145. Tensor x_emb = null;
  146. with(tf.name_scope("embedding"), scope =>
  147. {
  148. var init_embeddings = tf.random_uniform(new int[] { vocabulary_size, embedding_size });
  149. var embeddings = tf.get_variable("embeddings", initializer: init_embeddings);
  150. x_emb = tf.nn.embedding_lookup(embeddings, x);
  151. x_emb = tf.expand_dims(x_emb, -1);
  152. });
  153. var pooled_outputs = new List<Tensor>();
  154. for (int len = 0; len < filter_sizes.Rank; len++)
  155. {
  156. int filter_size = filter_sizes.GetLength(len);
  157. var conv = tf.layers.conv2d(
  158. x_emb,
  159. filters: num_filters,
  160. kernel_size: new int[] { filter_size, embedding_size },
  161. strides: new int[] { 1, 1 },
  162. padding: "VALID",
  163. activation: tf.nn.relu());
  164. var pool = tf.layers.max_pooling2d(
  165. conv,
  166. pool_size: new[] { document_max_len - filter_size + 1, 1 },
  167. strides: new[] { 1, 1 },
  168. padding: "VALID");
  169. pooled_outputs.Add(pool);
  170. }
  171. var h_pool = tf.concat(pooled_outputs, 3);
  172. var h_pool_flat = tf.reshape(h_pool, new TensorShape(-1, num_filters * filter_sizes.Rank));
  173. Tensor h_drop = null;
  174. with(tf.name_scope("dropout"), delegate
  175. {
  176. h_drop = tf.nn.dropout(h_pool_flat, keep_prob);
  177. });
  178. Tensor logits = null;
  179. Tensor predictions = null;
  180. with(tf.name_scope("output"), delegate
  181. {
  182. logits = tf.layers.dense(h_drop, NUM_CLASS);
  183. predictions = tf.argmax(logits, -1, output_type: tf.int32);
  184. });
  185. with(tf.name_scope("loss"), delegate
  186. {
  187. var sscel = tf.nn.sparse_softmax_cross_entropy_with_logits(logits: logits, labels: y);
  188. var loss = tf.reduce_mean(sscel);
  189. var adam = tf.train.AdamOptimizer(learning_rate);
  190. var optimizer = adam.minimize(loss, global_step: global_step);
  191. });
  192. with(tf.name_scope("accuracy"), delegate
  193. {
  194. var correct_predictions = tf.equal(predictions, y);
  195. var accuracy = tf.reduce_mean(tf.cast(correct_predictions, TF_DataType.TF_FLOAT), name: "accuracy");
  196. });
  197. return graph;
  198. }
  199. private bool Train(Session sess, Graph graph)
  200. {
  201. var stopwatch = Stopwatch.StartNew();
  202. sess.run(tf.global_variables_initializer());
  203. var saver = tf.train.Saver(tf.global_variables());
  204. var train_batches = batch_iter(train_x, train_y, BATCH_SIZE, NUM_EPOCHS);
  205. var num_batches_per_epoch = (len(train_x) - 1) / BATCH_SIZE + 1;
  206. double max_accuracy = 0;
  207. Tensor is_training = graph.OperationByName("is_training");
  208. Tensor model_x = graph.OperationByName("x");
  209. Tensor model_y = graph.OperationByName("y");
  210. Tensor loss = graph.OperationByName("loss/Mean");
  211. Operation optimizer = graph.OperationByName("loss/Adam");
  212. Tensor global_step = graph.OperationByName("Variable");
  213. Tensor accuracy = graph.OperationByName("accuracy/accuracy");
  214. stopwatch = Stopwatch.StartNew();
  215. int i = 0;
  216. foreach (var (x_batch, y_batch, total) in train_batches)
  217. {
  218. i++;
  219. var train_feed_dict = new FeedDict
  220. {
  221. [model_x] = x_batch,
  222. [model_y] = y_batch,
  223. [is_training] = true,
  224. };
  225. var result = sess.run(new ITensorOrOperation[] { optimizer, global_step, loss }, train_feed_dict);
  226. loss_value = result[2];
  227. var step = (int)result[1];
  228. if (step % 10 == 0)
  229. {
  230. var estimate = TimeSpan.FromSeconds((stopwatch.Elapsed.TotalSeconds / i) * total);
  231. Console.WriteLine($"Training on batch {i}/{total} loss: {loss_value}. Estimated training time: {estimate}");
  232. }
  233. if (step % 100 == 0)
  234. {
  235. // Test accuracy with validation data for each epoch.
  236. var valid_batches = batch_iter(valid_x, valid_y, BATCH_SIZE, 1);
  237. var (sum_accuracy, cnt) = (0.0f, 0);
  238. foreach (var (valid_x_batch, valid_y_batch, total_validation_batches) in valid_batches)
  239. {
  240. var valid_feed_dict = new FeedDict
  241. {
  242. [model_x] = valid_x_batch,
  243. [model_y] = valid_y_batch,
  244. [is_training] = false
  245. };
  246. var result1 = sess.run(accuracy, valid_feed_dict);
  247. float accuracy_value = result1;
  248. sum_accuracy += accuracy_value;
  249. cnt += 1;
  250. }
  251. var valid_accuracy = sum_accuracy / cnt;
  252. print($"\nValidation Accuracy = {valid_accuracy}\n");
  253. // Save model
  254. if (valid_accuracy > max_accuracy)
  255. {
  256. max_accuracy = valid_accuracy;
  257. saver.save(sess, $"{dataDir}/word_cnn.ckpt", global_step: step);
  258. print("Model is saved.\n");
  259. }
  260. }
  261. }
  262. return max_accuracy > 0.8;
  263. }
  264. public bool Train()
  265. {
  266. var graph = IsImportingGraph ? ImportGraph() : BuildGraph();
  267. // string json = JsonConvert.SerializeObject(graph, Formatting.Indented);
  268. return with(tf.Session(graph), sess => Train(sess, graph));
  269. }
  270. public bool Predict()
  271. {
  272. throw new NotImplementedException();
  273. }
  274. }
  275. }