You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

deprecated.py 11 kB

4 years ago
4 years ago
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439
  1. #! /usr/bin/python
  2. # -*- coding: utf-8 -*-
  3. __all__ = []
  4. class NonExistingLayerError(Exception):
  5. pass
  6. # activation.py
  7. __all__ += [
  8. 'PReluLayer',
  9. 'PRelu6Layer',
  10. 'PTRelu6Layer',
  11. ]
  12. __log__ = '\n Hint: 1) downgrade TL from version 3.x to 2.x. 2) check the documentation of TF version 2.x and TL version 3.x'
  13. def PReluLayer(*args, **kwargs):
  14. raise NonExistingLayerError("PReluLayer(net, name='a') --> PRelu(name='a')(net))" + __log__)
  15. def PRelu6Layer(*args, **kwargs):
  16. raise NonExistingLayerError("PRelu6Layer(net, name='a') --> PRelu6(name='a')(net))" + __log__)
  17. def PTRelu6Layer(*args, **kwargs):
  18. raise NonExistingLayerError("PTRelu6Layer(net, name='a') --> PTRelu(name='a')(net))" + __log__)
  19. # convolution/atrous_conv.py
  20. __all__ += [
  21. 'AtrousConv1dLayer',
  22. 'AtrousConv2dLayer',
  23. 'AtrousDeConv2dLayer',
  24. ]
  25. def AtrousConv1dLayer(*args, **kwargs):
  26. raise NonExistingLayerError("use `tl.layers.Conv1d` with dilation instead" + __log__)
  27. def AtrousConv2dLayer(*args, **kwargs):
  28. raise NonExistingLayerError("use `tl.layers.Conv2d` with dilation instead" + __log__)
  29. def AtrousDeConv2dLayer(*args, **kwargs):
  30. # raise NonExistingLayerError("AtrousDeConv2dLayer(net, name='a') --> AtrousDeConv2d(name='a')(net)")
  31. raise NonExistingLayerError("use `tl.layers.DeConv2d` with dilation instead" + __log__)
  32. # dense/base_dense.py
  33. __all__ += [
  34. 'DenseLayer',
  35. ]
  36. def DenseLayer(*args, **kwargs):
  37. raise NonExistingLayerError("DenseLayer(net, name='a') --> Dense(name='a')(net)" + __log__)
  38. # dense/binary_dense.py
  39. __all__ += [
  40. 'BinaryDenseLayer',
  41. ]
  42. def BinaryDenseLayer(*args, **kwargs):
  43. raise NonExistingLayerError("BinaryDenseLayer(net, name='a') --> BinaryDense(name='a')(net)" + __log__)
  44. # dense/dorefa_dense.py
  45. __all__ += [
  46. 'DorefaDenseLayer',
  47. ]
  48. def DorefaDenseLayer(*args, **kwargs):
  49. raise NonExistingLayerError("DorefaDenseLayer(net, name='a') --> DorefaDense(name='a')(net)" + __log__)
  50. # dense/dropconnect.py
  51. __all__ += [
  52. 'DropconnectDenseLayer',
  53. ]
  54. def DropconnectDenseLayer(*args, **kwargs):
  55. raise NonExistingLayerError("DropconnectDenseLayer(net, name='a') --> DropconnectDense(name='a')(net)" + __log__)
  56. # dense/quan_dense_bn.py
  57. __all__ += [
  58. 'QuanDenseLayerWithBN',
  59. ]
  60. def QuanDenseLayerWithBN(*args, **kwargs):
  61. raise NonExistingLayerError("QuanDenseLayerWithBN(net, name='a') --> QuanDenseWithBN(name='a')(net)" + __log__)
  62. # dense/ternary_dense.py
  63. __all__ += [
  64. 'TernaryDenseLayer',
  65. ]
  66. def TernaryDenseLayer(*args, **kwargs):
  67. raise NonExistingLayerError("TernaryDenseLayer(net, name='a') --> TernaryDense(name='a')(net)" + __log__)
  68. # dropout.py
  69. __all__ += [
  70. 'DropoutLayer',
  71. ]
  72. def DropoutLayer(*args, **kwargs):
  73. raise NonExistingLayerError(
  74. "DropoutLayer(net, is_train=True, name='a') --> Dropout(name='a')(net, is_train=True)" + __log__
  75. )
  76. # extend.py
  77. __all__ += [
  78. 'ExpandDimsLayer',
  79. 'TileLayer',
  80. ]
  81. def ExpandDimsLayer(*args, **kwargs):
  82. raise NonExistingLayerError("ExpandDimsLayer(net, name='a') --> ExpandDims(name='a')(net)" + __log__)
  83. def TileLayer(*args, **kwargs):
  84. raise NonExistingLayerError("TileLayer(net, name='a') --> Tile(name='a')(net)" + __log__)
  85. # image_resampling.py
  86. __all__ += [
  87. 'UpSampling2dLayer',
  88. 'DownSampling2dLayer',
  89. ]
  90. def UpSampling2dLayer(*args, **kwargs):
  91. raise NonExistingLayerError("UpSampling2dLayer(net, name='a') --> UpSampling2d(name='a')(net)" + __log__)
  92. def DownSampling2dLayer(*args, **kwargs):
  93. raise NonExistingLayerError("DownSampling2dLayer(net, name='a') --> DownSampling2d(name='a')(net)" + __log__)
  94. # importer.py
  95. __all__ += [
  96. 'SlimNetsLayer',
  97. 'KerasLayer',
  98. ]
  99. def SlimNetsLayer(*args, **kwargs):
  100. raise NonExistingLayerError("SlimNetsLayer(net, name='a') --> SlimNets(name='a')(net)" + __log__)
  101. def KerasLayer(*args, **kwargs):
  102. raise NonExistingLayerError("KerasLayer(net, name='a') --> Keras(name='a')(net)" + __log__)
  103. # inputs.py
  104. __all__ += [
  105. 'InputLayer',
  106. ]
  107. def InputLayer(*args, **kwargs):
  108. raise NonExistingLayerError("InputLayer(x, name='a') --> Input(name='a')(x)" + __log__)
  109. # embedding.py
  110. __all__ += [
  111. 'OneHotInputLayer',
  112. 'Word2vecEmbeddingInputlayer',
  113. 'EmbeddingInputlayer',
  114. 'AverageEmbeddingInputlayer',
  115. ]
  116. def OneHotInputLayer(*args, **kwargs):
  117. raise NonExistingLayerError(
  118. "Not longer Input layer: OneHotInputLayer(x, name='a') --> OneHot(name='a')(layer)" + __log__
  119. )
  120. def Word2vecEmbeddingInputlayer(*args, **kwargs):
  121. raise NonExistingLayerError(
  122. "Not longer Input layer: Word2vecEmbeddingInputlayer(x, name='a') --> Word2vecEmbedding(name='a')(layer)" +
  123. __log__
  124. )
  125. def EmbeddingInputlayer(*args, **kwargs):
  126. raise NonExistingLayerError(
  127. "Not longer Input layer: EmbeddingInputlayer(x, name='a') --> Embedding(name='a')(layer)" + __log__
  128. )
  129. def AverageEmbeddingInputlayer(*args, **kwargs):
  130. raise NonExistingLayerError(
  131. "Not longer Input layer: AverageEmbeddingInputlayer(x, name='a') --> AverageEmbedding(name='a')(layer)" +
  132. __log__
  133. )
  134. # lambda.py
  135. __all__ += [
  136. 'LambdaLayer',
  137. 'ElementwiseLambdaLayer',
  138. ]
  139. def LambdaLayer(*args, **kwargs):
  140. raise NonExistingLayerError(
  141. "LambdaLayer(x, lambda x: 2*x, name='a') --> Lambda(lambda x: 2*x, name='a')(x)" + __log__
  142. )
  143. def ElementwiseLambdaLayer(*args, **kwargs):
  144. raise NonExistingLayerError(
  145. "ElementwiseLambdaLayer(x, ..., name='a') --> ElementwiseLambda(..., name='a')(x)" + __log__
  146. )
  147. # merge.py
  148. __all__ += [
  149. 'ConcatLayer',
  150. 'ElementwiseLayer',
  151. ]
  152. def ConcatLayer(*args, **kwargs):
  153. raise NonExistingLayerError("ConcatLayer(x, ..., name='a') --> Concat(..., name='a')(x)" + __log__)
  154. def ElementwiseLayer(*args, **kwargs):
  155. raise NonExistingLayerError("ElementwiseLayer(x, ..., name='a') --> Elementwise(..., name='a')(x)" + __log__)
  156. # noise.py
  157. __all__ += [
  158. 'GaussianNoiseLayer',
  159. ]
  160. def GaussianNoiseLayer(*args, **kwargs):
  161. raise NonExistingLayerError("GaussianNoiseLayer(x, ..., name='a') --> GaussianNoise(..., name='a')(x)" + __log__)
  162. # normalization.py
  163. __all__ += [
  164. 'BatchNormLayer',
  165. 'InstanceNormLayer',
  166. 'LayerNormLayer',
  167. 'LocalResponseNormLayer',
  168. 'GroupNormLayer',
  169. 'SwitchNormLayer',
  170. ]
  171. def BatchNormLayer(*args, **kwargs):
  172. raise NonExistingLayerError(
  173. "BatchNormLayer(x, is_train=True, name='a') --> BatchNorm(name='a')(x, is_train=True)" + __log__
  174. )
  175. def InstanceNormLayer(*args, **kwargs):
  176. raise NonExistingLayerError("InstanceNormLayer(x, name='a') --> InstanceNorm(name='a')(x)" + __log__)
  177. def LayerNormLayer(*args, **kwargs):
  178. raise NonExistingLayerError("LayerNormLayer(x, name='a') --> LayerNorm(name='a')(x)" + __log__)
  179. def LocalResponseNormLayer(*args, **kwargs):
  180. raise NonExistingLayerError("LocalResponseNormLayer(x, name='a') --> LocalResponseNorm(name='a')(x)" + __log__)
  181. def GroupNormLayer(*args, **kwargs):
  182. raise NonExistingLayerError("GroupNormLayer(x, name='a') --> GroupNorm(name='a')(x)" + __log__)
  183. def SwitchNormLayer(*args, **kwargs):
  184. raise NonExistingLayerError("SwitchNormLayer(x, name='a') --> SwitchNorm(name='a')(x)" + __log__)
  185. # quantize_layer.py
  186. __all__ += [
  187. 'SignLayer',
  188. ]
  189. def SignLayer(*args, **kwargs):
  190. raise NonExistingLayerError("SignLayer(x, name='a') --> Sign(name='a')(x)" + __log__)
  191. # recurrent/lstm_layers.py
  192. __all__ += [
  193. 'ConvLSTMLayer',
  194. ]
  195. def ConvLSTMLayer(*args, **kwargs):
  196. raise NonExistingLayerError("ConvLSTMLayer(x, name='a') --> ConvLSTM(name='a')(x)" + __log__)
  197. # recurrent/rnn_dynamic_layers.py
  198. __all__ += [
  199. 'DynamicRNNLayer',
  200. 'BiDynamicRNNLayer',
  201. ]
  202. def DynamicRNNLayer(*args, **kwargs):
  203. raise NonExistingLayerError(
  204. "DynamicRNNLayer(x, is_train=True, name='a') --> DynamicRNN(name='a')(x, is_train=True)" + __log__
  205. )
  206. def BiDynamicRNNLayer(*args, **kwargs):
  207. raise NonExistingLayerError(
  208. "BiDynamicRNNLayer(x, is_train=True, name='a') --> BiDynamicRNN(name='a')(x, is_train=True)" + __log__
  209. )
  210. # recurrent/rnn_layers.py
  211. __all__ += [
  212. 'RNNLayer',
  213. 'BiRNNLayer',
  214. ]
  215. def RNNLayer(*args, **kwargs):
  216. raise NonExistingLayerError("RNNLayer(x, name='a') --> RNN(name='a')(x)" + __log__)
  217. def BiRNNLayer(*args, **kwargs):
  218. raise NonExistingLayerError(
  219. "BiRNNLayer(x, is_train=True, name='a') --> BiRNN(name='a')(x, is_train=True)" + __log__
  220. )
  221. # reshape.py
  222. __all__ += [
  223. 'FlattenLayer',
  224. 'ReshapeLayer',
  225. 'TransposeLayer',
  226. ]
  227. def FlattenLayer(*args, **kwargs):
  228. raise NonExistingLayerError("FlattenLayer(x, name='a') --> Flatten(name='a')(x)" + __log__)
  229. def ReshapeLayer(*args, **kwargs):
  230. raise NonExistingLayerError("ReshapeLayer(x, name='a') --> Reshape(name='a')(x)" + __log__)
  231. def TransposeLayer(*args, **kwargs):
  232. raise NonExistingLayerError("TransposeLayer(x, name='a') --> Transpose(name='a')(x)" + __log__)
  233. # scale.py
  234. __all__ += [
  235. 'ScaleLayer',
  236. ]
  237. def ScaleLayer(*args, **kwargs):
  238. raise NonExistingLayerError("ScaleLayer(x, name='a') --> Scale(name='a')(x)" + __log__)
  239. # spatial_transformer.py
  240. __all__ += ['SpatialTransformer2dAffineLayer']
  241. def SpatialTransformer2dAffineLayer(*args, **kwargs):
  242. raise NonExistingLayerError(
  243. "SpatialTransformer2dAffineLayer(x1, x2, name='a') --> SpatialTransformer2dAffine(name='a')(x1, x2)" + __log__
  244. )
  245. # stack.py
  246. __all__ += [
  247. 'StackLayer',
  248. 'UnStackLayer',
  249. ]
  250. def StackLayer(*args, **kwargs):
  251. raise NonExistingLayerError("StackLayer(x1, x2, name='a') --> Stack(name='a')(x1, x2)" + __log__)
  252. def UnStackLayer(*args, **kwargs):
  253. raise NonExistingLayerError("UnStackLayer(x1, x2, name='a') --> UnStack(name='a')(x1, x2)" + __log__)
  254. # time_distributed.py
  255. __all__ += [
  256. 'TimeDistributedLayer',
  257. ]
  258. def TimeDistributedLayer(*args, **kwargs):
  259. # raise NonExistingLayerError("TimeDistributedLayer(x1, x2, name='a') --> TimeDistributed(name='a')(x1, x2)")
  260. raise NonExistingLayerError("TimeDistributedLayer is removed for TF 2.0, please use eager mode instead." + __log__)
  261. __all__ += ['ModelLayer']
  262. def ModelLayer(*args, **kwargs):
  263. raise NonExistingLayerError("ModelLayer is removed for TensorLayer 3.0.")
  264. __all__ += ['Seq2seqLuongAttention']
  265. def Seq2seqLuongAttention(*args, **kwargs):
  266. raise NonExistingLayerError("Seq2seqLuongAttention is removed for TensorLayer 3.0.")
  267. __all__ += ['cross_entropy']
  268. def cross_entropy(*args, **kwargs):
  269. raise NonExistingLayerError(
  270. "cross_entropy(output, target) --> softmax_cross_entropy_with_logits(output, target)" + __log__
  271. )

TensorLayer3.0 是一款兼容多种深度学习框架为计算后端的深度学习库。计划兼容TensorFlow, Pytorch, MindSpore, Paddle.