You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

NaiveBayesClassifier.cs 8.7 kB

6 years ago
6 years ago
6 years ago
6 years ago
6 years ago
6 years ago
6 years ago
6 years ago
6 years ago
6 years ago
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196
  1. using System;
  2. using System.Collections.Generic;
  3. using System.Text;
  4. using Tensorflow;
  5. using NumSharp;
  6. using System.Linq;
  7. using static Tensorflow.Python;
  8. namespace TensorFlowNET.Examples
  9. {
  10. /// <summary>
  11. /// https://github.com/nicolov/naive_bayes_tensorflow
  12. /// </summary>
  13. public class NaiveBayesClassifier : IExample
  14. {
  15. public bool Enabled { get; set; } = true;
  16. public string Name => "Naive Bayes Classifier";
  17. public bool IsImportingGraph { get; set; } = false;
  18. public NDArray X, y;
  19. public Normal dist { get; set; }
  20. public bool Run()
  21. {
  22. PrepareData();
  23. fit(X, y);
  24. // Create a regular grid and classify each point
  25. float x_min = X.amin(0).Data<float>(0) - 0.5f;
  26. float y_min = X.amin(0).Data<float>(1) - 0.5f;
  27. float x_max = X.amax(0).Data<float>(0) + 0.5f;
  28. float y_max = X.amax(0).Data<float>(1) + 0.5f;
  29. var (xx, yy) = np.meshgrid(np.linspace(x_min, x_max, 30), np.linspace(y_min, y_max, 30));
  30. with(tf.Session(), sess =>
  31. {
  32. var samples = np.vstack<float>(xx.ravel(), yy.ravel());
  33. samples = np.transpose(samples);
  34. var array = np.Load<double[,]>("H:\\PythonApplication1\\PythonApplication1\\data.npy");
  35. samples = np.array(array).astype(np.float32);
  36. var Z = sess.run(predict(samples));
  37. });
  38. return true;
  39. }
  40. public void fit(NDArray X, NDArray y)
  41. {
  42. var unique_y = y.unique<int>();
  43. var dic = new Dictionary<int, List<List<float>>>();
  44. // Init uy in dic
  45. foreach (int uy in unique_y.Data<int>())
  46. {
  47. dic.Add(uy, new List<List<float>>());
  48. }
  49. // Separate training points by class
  50. // Shape : nb_classes * nb_samples * nb_features
  51. int maxCount = 0;
  52. for (int i = 0; i < y.size; i++)
  53. {
  54. var curClass = y[i];
  55. var l = dic[curClass];
  56. var pair = new List<float>();
  57. pair.Add(X[i,0]);
  58. pair.Add(X[i, 1]);
  59. l.Add(pair);
  60. if (l.Count > maxCount)
  61. {
  62. maxCount = l.Count;
  63. }
  64. dic[curClass] = l;
  65. }
  66. float[,,] points = new float[dic.Count, maxCount, X.shape[1]];
  67. foreach (KeyValuePair<int, List<List<float>>> kv in dic)
  68. {
  69. int j = (int) kv.Key;
  70. for (int i = 0; i < maxCount; i++)
  71. {
  72. for (int k = 0; k < X.shape[1]; k++)
  73. {
  74. points[j, i, k] = kv.Value[i][k];
  75. }
  76. }
  77. }
  78. var points_by_class = np.array(points);
  79. // estimate mean and variance for each class / feature
  80. // shape : nb_classes * nb_features
  81. var cons = tf.constant(points_by_class);
  82. var tup = tf.nn.moments(cons, new int[]{1});
  83. var mean = tup.Item1;
  84. var variance = tup.Item2;
  85. // Create a 3x2 univariate normal distribution with the
  86. // Known mean and variance
  87. var dist = tf.distributions.Normal(mean, tf.sqrt(variance));
  88. this.dist = dist;
  89. }
  90. public Tensor predict(NDArray X)
  91. {
  92. if (dist == null)
  93. {
  94. throw new ArgumentNullException("cant not find the model (normal distribution)!");
  95. }
  96. int nb_classes = (int) dist.scale().shape[0];
  97. int nb_features = (int)dist.scale().shape[1];
  98. // Conditional probabilities log P(x|c) with shape
  99. // (nb_samples, nb_classes)
  100. var t1= ops.convert_to_tensor(X, TF_DataType.TF_FLOAT);
  101. var t2 = ops.convert_to_tensor(new int[] { 1, nb_classes });
  102. Tensor tile = tf.tile(t1, t2);
  103. var t3 = ops.convert_to_tensor(new int[] { -1, nb_classes, nb_features });
  104. Tensor r = tf.reshape(tile, t3);
  105. var cond_probs = tf.reduce_sum(dist.log_prob(r), 2);
  106. // uniform priors
  107. float[] tem = new float[nb_classes];
  108. for (int i = 0; i < tem.Length; i++)
  109. {
  110. tem[i] = 1.0f / nb_classes;
  111. }
  112. var priors = np.log(np.array<float>(tem));
  113. // posterior log probability, log P(c) + log P(x|c)
  114. var joint_likelihood = tf.add(ops.convert_to_tensor(priors, TF_DataType.TF_FLOAT), cond_probs);
  115. // normalize to get (log)-probabilities
  116. var norm_factor = tf.reduce_logsumexp(joint_likelihood, new int[] { 1 }, keepdims: true);
  117. var log_prob = joint_likelihood - norm_factor;
  118. // exp to get the actual probabilities
  119. return tf.exp(log_prob);
  120. }
  121. public void PrepareData()
  122. {
  123. #region Training data
  124. X = np.array(new float[,] {
  125. {5.1f, 3.5f}, {4.9f, 3.0f}, {4.7f, 3.2f}, {4.6f, 3.1f}, {5.0f, 3.6f}, {5.4f, 3.9f},
  126. {4.6f, 3.4f}, {5.0f, 3.4f}, {4.4f, 2.9f}, {4.9f, 3.1f}, {5.4f, 3.7f}, {4.8f, 3.4f},
  127. {4.8f, 3.0f}, {4.3f, 3.0f}, {5.8f, 4.0f}, {5.7f, 4.4f}, {5.4f, 3.9f}, {5.1f, 3.5f},
  128. {5.7f, 3.8f}, {5.1f, 3.8f}, {5.4f, 3.4f}, {5.1f, 3.7f}, {5.1f, 3.3f}, {4.8f, 3.4f},
  129. {5.0f, 3.0f}, {5.0f, 3.4f}, {5.2f, 3.5f}, {5.2f, 3.4f}, {4.7f, 3.2f}, {4.8f, 3.1f},
  130. {5.4f, 3.4f}, {5.2f, 4.1f}, {5.5f, 4.2f}, {4.9f, 3.1f}, {5.0f, 3.2f}, {5.5f, 3.5f},
  131. {4.9f, 3.6f}, {4.4f, 3.0f}, {5.1f, 3.4f}, {5.0f, 3.5f}, {4.5f, 2.3f}, {4.4f, 3.2f},
  132. {5.0f, 3.5f}, {5.1f, 3.8f}, {4.8f, 3.0f}, {5.1f, 3.8f}, {4.6f, 3.2f}, {5.3f, 3.7f},
  133. {5.0f, 3.3f}, {7.0f, 3.2f}, {6.4f, 3.2f}, {6.9f, 3.1f}, {5.5f, 2.3f}, {6.5f, 2.8f},
  134. {5.7f, 2.8f}, {6.3f, 3.3f}, {4.9f, 2.4f}, {6.6f, 2.9f}, {5.2f, 2.7f}, {5.0f, 2.0f},
  135. {5.9f, 3.0f}, {6.0f, 2.2f}, {6.1f, 2.9f}, {5.6f, 2.9f}, {6.7f, 3.1f}, {5.6f, 3.0f},
  136. {5.8f, 2.7f}, {6.2f, 2.2f}, {5.6f, 2.5f}, {5.9f, 3.0f}, {6.1f, 2.8f}, {6.3f, 2.5f},
  137. {6.1f, 2.8f}, {6.4f, 2.9f}, {6.6f, 3.0f}, {6.8f, 2.8f}, {6.7f, 3.0f}, {6.0f, 2.9f},
  138. {5.7f, 2.6f}, {5.5f, 2.4f}, {5.5f, 2.4f}, {5.8f, 2.7f}, {6.0f, 2.7f}, {5.4f, 3.0f},
  139. {6.0f, 3.4f}, {6.7f, 3.1f}, {6.3f, 2.3f}, {5.6f, 3.0f}, {5.5f, 2.5f}, {5.5f, 2.6f},
  140. {6.1f, 3.0f}, {5.8f, 2.6f}, {5.0f, 2.3f}, {5.6f, 2.7f}, {5.7f, 3.0f}, {5.7f, 2.9f},
  141. {6.2f, 2.9f}, {5.1f, 2.5f}, {5.7f, 2.8f}, {6.3f, 3.3f}, {5.8f, 2.7f}, {7.1f, 3.0f},
  142. {6.3f, 2.9f}, {6.5f, 3.0f}, {7.6f, 3.0f}, {4.9f, 2.5f}, {7.3f, 2.9f}, {6.7f, 2.5f},
  143. {7.2f, 3.6f}, {6.5f, 3.2f}, {6.4f, 2.7f}, {6.8f, 3.0f}, {5.7f, 2.5f}, {5.8f, 2.8f},
  144. {6.4f, 3.2f}, {6.5f, 3.0f}, {7.7f, 3.8f}, {7.7f, 2.6f}, {6.0f, 2.2f}, {6.9f, 3.2f},
  145. {5.6f, 2.8f}, {7.7f, 2.8f}, {6.3f, 2.7f}, {6.7f, 3.3f}, {7.2f, 3.2f}, {6.2f, 2.8f},
  146. {6.1f, 3.0f}, {6.4f, 2.8f}, {7.2f, 3.0f}, {7.4f, 2.8f}, {7.9f, 3.8f}, {6.4f, 2.8f},
  147. {6.3f, 2.8f}, {6.1f, 2.6f}, {7.7f, 3.0f}, {6.3f, 3.4f}, {6.4f, 3.1f}, {6.0f, 3.0f},
  148. {6.9f, 3.1f}, {6.7f, 3.1f}, {6.9f, 3.1f}, {5.8f, 2.7f}, {6.8f, 3.2f}, {6.7f, 3.3f},
  149. {6.7f, 3.0f}, {6.3f, 2.5f}, {6.5f, 3.0f}, {6.2f, 3.4f}, {5.9f, 3.0f}, {5.8f, 3.0f}});
  150. y = np.array(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
  151. 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
  152. 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
  153. 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
  154. 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
  155. 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
  156. 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2);
  157. #endregion
  158. }
  159. public Graph ImportGraph()
  160. {
  161. throw new NotImplementedException();
  162. }
  163. public Graph BuildGraph()
  164. {
  165. throw new NotImplementedException();
  166. }
  167. public bool Train()
  168. {
  169. throw new NotImplementedException();
  170. }
  171. public bool Predict()
  172. {
  173. throw new NotImplementedException();
  174. }
  175. }
  176. }