You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

ternary_dense.py 3.7 kB

4 years ago
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109
  1. #! /usr/bin/python
  2. # -*- coding: utf-8 -*-
  3. import tensorlayer as tl
  4. from tensorlayer import logging
  5. from tensorlayer.layers.core import Module
  6. from tensorlayer.layers.utils import compute_alpha, ternary_operation
  7. __all__ = [
  8. 'TernaryDense',
  9. ]
  10. class TernaryDense(Module):
  11. """The :class:`TernaryDense` class is a ternary fully connected layer, which weights are either -1 or 1 or 0 while inference.
  12. # TODO The TernaryDense only supports TensorFlow backend.
  13. Note that, the bias vector would not be tenaried.
  14. Parameters
  15. ----------
  16. n_units : int
  17. The number of units of this layer.
  18. act : activation function
  19. The activation function of this layer, usually set to ``tf.act.sign`` or apply :class:`SignLayer` after :class:`BatchNormLayer`.
  20. use_gemm : boolean
  21. If True, use gemm instead of ``tf.matmul`` for inference. (TODO).
  22. W_init : initializer
  23. The initializer for the weight matrix.
  24. b_init : initializer or None
  25. The initializer for the bias vector. If None, skip biases.
  26. in_channels: int
  27. The number of channels of the previous layer.
  28. If None, it will be automatically detected when the layer is forwarded for the first time.
  29. name : None or str
  30. A unique layer name.
  31. """
  32. def __init__(
  33. self,
  34. n_units=100,
  35. act=None,
  36. use_gemm=False,
  37. W_init=tl.initializers.truncated_normal(stddev=0.05),
  38. b_init=tl.initializers.constant(value=0.0),
  39. in_channels=None,
  40. name=None, #'ternary_dense',
  41. ):
  42. super().__init__(name, act=act)
  43. self.n_units = n_units
  44. self.use_gemm = use_gemm
  45. self.W_init = W_init
  46. self.b_init = b_init
  47. self.in_channels = in_channels
  48. if self.in_channels is not None:
  49. self.build((None, self.in_channels))
  50. self._built = True
  51. logging.info(
  52. "TernaryDense %s: %d %s" %
  53. (self.name, n_units, self.act.__name__ if self.act is not None else 'No Activation')
  54. )
  55. def __repr__(self):
  56. actstr = self.act.__name__ if self.act is not None else 'No Activation'
  57. s = ('{classname}(n_units={n_units}, ' + actstr)
  58. if self.in_channels is not None:
  59. s += ', in_channels=\'{in_channels}\''
  60. if self.name is not None:
  61. s += ', name=\'{name}\''
  62. s += ')'
  63. return s.format(classname=self.__class__.__name__, **self.__dict__)
  64. def build(self, inputs_shape):
  65. if len(inputs_shape) != 2:
  66. raise Exception("The input dimension must be rank 2, please reshape or flatten it")
  67. if self.in_channels is None:
  68. self.in_channels = inputs_shape[1]
  69. if self.use_gemm:
  70. raise Exception("TODO. The current version use tf.matmul for inferencing.")
  71. n_in = inputs_shape[-1]
  72. self.W = self._get_weights(var_name="weights", shape=(n_in, self.n_units), init=self.W_init)
  73. if self.b_init is not None:
  74. self.b = self._get_weights(var_name="biases", shape=(self.n_units), init=self.b_init)
  75. def forward(self, inputs):
  76. if self._forward_state == False:
  77. if self._built == False:
  78. self.build(tl.get_tensor_shape(inputs))
  79. self._built = True
  80. self._forward_state = True
  81. alpha = compute_alpha(self.W)
  82. W_ = ternary_operation(self.W)
  83. W_ = tl.ops.multiply(alpha, W_)
  84. outputs = tl.ops.matmul(inputs, W_)
  85. if self.b_init is not None:
  86. outputs = tl.ops.bias_add(outputs, self.b, name='bias_add')
  87. if self.act:
  88. outputs = self.act(outputs)
  89. return outputs

TensorLayer3.0 是一款兼容多种深度学习框架为计算后端的深度学习库。计划兼容TensorFlow, Pytorch, MindSpore, Paddle.