Browse Source

Merge pull request #2 from SciSharp/master

Syncing original repo
tags/v0.10
Harish Kulkarni GitHub 6 years ago
parent
commit
6ed27e6ed0
No known key found for this signature in database GPG Key ID: 4AEE18F83AFDEB23
39 changed files with 1168 additions and 120 deletions
  1. +1
    -0
      README.md
  2. +8
    -2
      TensorFlow.NET.sln
  3. +370
    -0
      docs/assets/tf.net.architecture.svg
  4. +24
    -0
      src/SciSharp.TensorFlow.Redist/CommonPackage.props
  5. +188
    -0
      src/SciSharp.TensorFlow.Redist/SciSharp.TensorFlow.Redist.nupkgproj
  6. +1
    -0
      src/SciSharp.TensorFlow.Redist/libtensorflow-cpu-darwin-x86_64-1.14.0.tar.gz.sha
  7. +1
    -0
      src/SciSharp.TensorFlow.Redist/libtensorflow-cpu-linux-x86_64-1.14.0.tar.gz.sha
  8. +1
    -0
      src/SciSharp.TensorFlow.Redist/libtensorflow-cpu-windows-x86_64-1.14.0.zip.sha
  9. +26
    -0
      src/SciSharp.TensorFlow.Redist/redist.nuspec
  10. +282
    -19
      src/TensorFlowNET.Core/Tensors/Tensor.Creation.cs
  11. +2
    -1
      src/TensorFlowNET.Core/Tensors/Tensor.cs
  12. +24
    -5
      src/TensorFlowNET.Core/Tensors/dtypes.cs
  13. +4
    -0
      src/TensorFlowNET.Core/Variables/_VariableScopeStore.cs
  14. +40
    -0
      src/TensorFlowNet.Benchmarks/Program.cs
  15. +55
    -0
      src/TensorFlowNet.Benchmarks/TensorCreation.cs
  16. +22
    -0
      src/TensorFlowNet.Benchmarks/TensorFlowNet.Benchmark.csproj
  17. BIN
      src/TensorFlowNet.Benchmarks/tensorflow.dll
  18. +0
    -0
      test/TensorFlowNET.Examples/AudioProcessing/README.md
  19. +87
    -56
      test/TensorFlowNET.Examples/ImageProcessing/DigitRecognitionCNN.cs
  20. +0
    -0
      test/TensorFlowNET.Examples/ImageProcessing/DigitRecognitionNN.cs
  21. +0
    -0
      test/TensorFlowNET.Examples/ImageProcessing/ImageBackgroundRemoval.cs
  22. +0
    -0
      test/TensorFlowNET.Examples/ImageProcessing/ImageRecognitionInception.cs
  23. +0
    -0
      test/TensorFlowNET.Examples/ImageProcessing/InceptionArchGoogLeNet.cs
  24. +28
    -36
      test/TensorFlowNET.Examples/ImageProcessing/ObjectDetection.cs
  25. +0
    -0
      test/TensorFlowNET.Examples/ImageProcessing/RetrainImageClassifier.cs
  26. +0
    -0
      test/TensorFlowNET.Examples/TextProcessing/BinaryTextClassification.cs
  27. +0
    -0
      test/TensorFlowNET.Examples/TextProcessing/CnnTextClassification.cs
  28. +0
    -0
      test/TensorFlowNET.Examples/TextProcessing/DataHelpers.cs
  29. +0
    -0
      test/TensorFlowNET.Examples/TextProcessing/NER/BiLstmCrfNer.cs
  30. +0
    -0
      test/TensorFlowNET.Examples/TextProcessing/NER/CRF.cs
  31. +0
    -0
      test/TensorFlowNET.Examples/TextProcessing/NER/LstmCrfNer.cs
  32. +0
    -0
      test/TensorFlowNET.Examples/TextProcessing/NamedEntityRecognition.cs
  33. +0
    -0
      test/TensorFlowNET.Examples/TextProcessing/Word2Vec.cs
  34. +0
    -0
      test/TensorFlowNET.Examples/TextProcessing/cnn_models/CharCnn.cs
  35. +0
    -0
      test/TensorFlowNET.Examples/TextProcessing/cnn_models/ITextModel.cs
  36. +0
    -0
      test/TensorFlowNET.Examples/TextProcessing/cnn_models/VdCnn.cs
  37. +0
    -0
      test/TensorFlowNET.Examples/TextProcessing/cnn_models/WordCnn.cs
  38. +1
    -1
      test/TensorFlowNET.Examples/Utility/Datasets.cs
  39. +3
    -0
      test/TensorFlowNET.UnitTest/ExamplesTests/ExamplesTest.cs

+ 1
- 0
README.md View File

@@ -149,6 +149,7 @@ Example runner will download all the required files like training data and model
* [Object Detection](test/TensorFlowNET.Examples/ImageProcess/ObjectDetection.cs)
* [Text Classification](test/TensorFlowNET.Examples/TextProcess/BinaryTextClassification.cs)
* [CNN Text Classification](test/TensorFlowNET.Examples/TextProcess/cnn_models/VdCnn.cs)
* [MNIST CNN](test/TensorFlowNET.Examples/ImageProcess/DigitRecognitionCNN.cs)
* [Named Entity Recognition](test/TensorFlowNET.Examples/TextProcess/NER)
* [Transfer Learning for Image Classification in InceptionV3](test/TensorFlowNET.Examples/ImageProcess/RetrainImageClassifier.cs)



+ 8
- 2
TensorFlow.NET.sln View File

@@ -1,7 +1,7 @@

Microsoft Visual Studio Solution File, Format Version 12.00
# Visual Studio Version 16
VisualStudioVersion = 16.0.29025.244
# Visual Studio 15
VisualStudioVersion = 15.0.28307.645
MinimumVisualStudioVersion = 10.0.40219.1
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "TensorFlowNET.UnitTest", "test\TensorFlowNET.UnitTest\TensorFlowNET.UnitTest.csproj", "{029A8CF1-CF95-4DCB-98AA-9D3D96A83B3E}"
EndProject
@@ -17,6 +17,8 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Keras.UnitTest", "test\Kera
EndProject
Project("{6EC3EE1D-3C4E-46DD-8F32-0CC8E7565705}") = "TensorFlowNET.Examples.FSharp", "test\TensorFlowNET.Examples.FSharp\TensorFlowNET.Examples.FSharp.fsproj", "{62BC3801-F0D3-44A9-A0AC-712F40C8F961}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "TensorFlowNet.Benchmark", "src\TensorFlowNet.Benchmarks\TensorFlowNet.Benchmark.csproj", "{68861442-971A-4196-876E-C9330F0B3C54}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
@@ -51,6 +53,10 @@ Global
{62BC3801-F0D3-44A9-A0AC-712F40C8F961}.Debug|Any CPU.Build.0 = Debug|Any CPU
{62BC3801-F0D3-44A9-A0AC-712F40C8F961}.Release|Any CPU.ActiveCfg = Release|Any CPU
{62BC3801-F0D3-44A9-A0AC-712F40C8F961}.Release|Any CPU.Build.0 = Release|Any CPU
{68861442-971A-4196-876E-C9330F0B3C54}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{68861442-971A-4196-876E-C9330F0B3C54}.Debug|Any CPU.Build.0 = Debug|Any CPU
{68861442-971A-4196-876E-C9330F0B3C54}.Release|Any CPU.ActiveCfg = Release|Any CPU
{68861442-971A-4196-876E-C9330F0B3C54}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE


+ 370
- 0
docs/assets/tf.net.architecture.svg View File

@@ -0,0 +1,370 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!-- Created with Inkscape (http://www.inkscape.org/) -->

<svg
xmlns:dc="http://purl.org/dc/elements/1.1/"
xmlns:cc="http://creativecommons.org/ns#"
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
xmlns:svg="http://www.w3.org/2000/svg"
xmlns="http://www.w3.org/2000/svg"
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
width="1026.6978"
height="328.57144"
viewBox="0 0 271.64711 86.934526"
version="1.1"
id="svg8"
sodipodi:docname="tf.net.architecture.svg"
inkscape:export-filename="tf.net.logo512.png"
inkscape:export-xdpi="138.45558"
inkscape:export-ydpi="138.45558"
inkscape:version="0.92.3 (2405546, 2018-03-11)">
<defs
id="defs2" />
<sodipodi:namedview
id="base"
pagecolor="#ffffff"
bordercolor="#666666"
borderopacity="1.0"
inkscape:pageopacity="0.0"
inkscape:pageshadow="2"
inkscape:zoom="0.98994949"
inkscape:cx="554.13667"
inkscape:cy="161.1291"
inkscape:document-units="mm"
inkscape:current-layer="layer1"
showgrid="false"
inkscape:snap-global="false"
showguides="false"
units="px"
inkscape:guide-bbox="true"
inkscape:window-width="1920"
inkscape:window-height="1017"
inkscape:window-x="1912"
inkscape:window-y="-8"
inkscape:window-maximized="1"
fit-margin-top="0"
fit-margin-left="0"
fit-margin-right="0"
fit-margin-bottom="0">
<sodipodi:guide
position="-115.16765,123.92938"
orientation="1,0"
id="guide929"
inkscape:locked="false" />
</sodipodi:namedview>
<metadata
id="metadata5">
<rdf:RDF>
<cc:Work
rdf:about="">
<dc:format>image/svg+xml</dc:format>
<dc:type
rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
<dc:title></dc:title>
</cc:Work>
</rdf:RDF>
</metadata>
<g
inkscape:label="Layer 1"
inkscape:groupmode="layer"
id="layer1"
transform="translate(8.3858147,-332.35208)">
<g
id="g1279">
<rect
y="332.35208"
x="-8.3858147"
height="86.934525"
width="271.64713"
id="rect1023"
style="opacity:1;vector-effect:none;fill:#ffffff;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" />
<text
id="text843"
y="347.40942"
x="18.269608"
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:9.87777805px;line-height:1.25;font-family:Calibri;-inkscape-font-specification:Calibri;letter-spacing:0px;word-spacing:0px;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.26458332"
xml:space="preserve"><tspan
style="font-size:9.87777805px;stroke-width:0.26458332"
y="347.40942"
x="18.269608"
id="tspan841"
sodipodi:role="line">TensorFlow</tspan></text>
<g
transform="translate(0.53205782,8.3134011)"
id="g1009">
<rect
rx="2.5349789"
style="opacity:1;vector-effect:none;fill:none;fill-opacity:1;fill-rule:nonzero;stroke:#865fc5;stroke-width:1.08041525;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
id="rect845"
width="73.277794"
height="23.348486"
x="3.8228469"
y="379.87094"
ry="2.5349789" />
<text
xml:space="preserve"
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:6.3499999px;line-height:1.25;font-family:Calibri;-inkscape-font-specification:Calibri;text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.18378384"
x="40.438488"
y="388.66986"
id="text849"><tspan
sodipodi:role="line"
x="41.157825"
y="388.66986"
style="font-size:6.3499999px;text-align:center;text-anchor:middle;stroke-width:0.18378384"
id="tspan851">Tensor Computation </tspan><tspan
sodipodi:role="line"
x="40.438488"
y="396.60736"
style="font-size:6.3499999px;text-align:center;text-anchor:middle;stroke-width:0.18378384"
id="tspan933">Layer (C++)</tspan></text>
</g>
<g
transform="translate(0.75958878,1.0971239)"
id="g1003">
<text
xml:space="preserve"
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:6.3499999px;line-height:1.25;font-family:Calibri;-inkscape-font-specification:Calibri;text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.18378386"
x="40.093136"
y="363.23035"
id="text849-6"><tspan
sodipodi:role="line"
x="40.812473"
y="363.23035"
style="font-size:6.3499999px;text-align:center;text-anchor:middle;stroke-width:0.18378386"
id="tspan891">Graph Manipulation </tspan><tspan
sodipodi:role="line"
x="40.093136"
y="371.16785"
style="font-size:6.3499999px;text-align:center;text-anchor:middle;stroke-width:0.18378386"
id="tspan937">Layer (Python)</tspan></text>
<rect
rx="2.5349789"
style="opacity:1;vector-effect:none;fill:none;fill-opacity:1;fill-rule:nonzero;stroke:#865fc5;stroke-width:1.08041525;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
id="rect845-1"
width="73.277794"
height="23.348486"
x="3.5953159"
y="353.88745"
ry="2.5349789" />
</g>
<text
id="text843-5"
y="347.40942"
x="94.650604"
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:9.87777805px;line-height:1.25;font-family:Calibri;-inkscape-font-specification:Calibri;letter-spacing:0px;word-spacing:0px;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.26458332"
xml:space="preserve"><tspan
style="font-size:9.87777805px;stroke-width:0.26458332"
y="347.40942"
x="94.650604"
id="tspan841-2"
sodipodi:role="line">TensorFlow.NET</tspan></text>
<g
transform="translate(-6.8241284,1.7065599)"
id="g1015">
<rect
rx="2.5349789"
style="opacity:1;vector-effect:none;fill:none;fill-opacity:1;fill-rule:nonzero;stroke:#661f76;stroke-width:1.08041525;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
id="rect845-1-8"
width="73.277794"
height="23.348486"
x="96.586525"
y="353.27802"
ry="2.5349789" />
<text
xml:space="preserve"
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:6.3499999px;line-height:1.25;font-family:Calibri;-inkscape-font-specification:Calibri;text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.18378386"
x="133.686"
y="363.2756"
id="text849-6-2"><tspan
sodipodi:role="line"
x="134.40533"
y="363.2756"
style="font-size:6.3499999px;text-align:center;text-anchor:middle;stroke-width:0.18378386"
id="tspan891-9">Graph Manipulation </tspan><tspan
sodipodi:role="line"
x="133.686"
y="371.2131"
style="font-size:6.3499999px;text-align:center;text-anchor:middle;stroke-width:0.18378386"
id="tspan937-1">Layer (C#)</tspan></text>
</g>
<g
transform="translate(-9.932153,9.1901064)"
id="g1021">
<rect
rx="2.5349789"
style="opacity:1;vector-effect:none;fill:none;fill-opacity:1;fill-rule:nonzero;stroke:#661f76;stroke-width:1.08041525;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
id="rect845-2"
width="73.277794"
height="23.348486"
x="99.69455"
y="379.26151"
ry="2.5349789" />
<text
xml:space="preserve"
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:6.3499999px;line-height:1.25;font-family:Calibri;-inkscape-font-specification:Calibri;text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.18378386"
x="136.3102"
y="388.29663"
id="text849-7"><tspan
sodipodi:role="line"
x="137.02953"
y="388.29663"
style="font-size:6.3499999px;text-align:center;text-anchor:middle;stroke-width:0.18378386"
id="tspan851-4">Tensor Computation </tspan><tspan
sodipodi:role="line"
x="136.3102"
y="396.23413"
style="font-size:6.3499999px;text-align:center;text-anchor:middle;stroke-width:0.18378386"
id="tspan933-3">Layer (C++)</tspan></text>
</g>
<g
transform="translate(20.221726,-55.562499)"
id="g1116">
<rect
style="opacity:1;vector-effect:none;fill:none;fill-opacity:1;fill-rule:nonzero;stroke:#865fc5;stroke-width:1.08041525;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
id="rect845-24"
width="73.277794"
height="9.9850101"
x="-15.866821"
y="433.88846"
ry="2.5349789"
rx="2.5349789" />
<text
xml:space="preserve"
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:6.3499999px;line-height:1.25;font-family:Calibri;-inkscape-font-specification:Calibri;text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.18378384"
x="20.748819"
y="440.51651"
id="text849-5"><tspan
sodipodi:role="line"
x="20.748819"
y="440.51651"
style="font-size:6.3499999px;text-align:center;text-anchor:middle;stroke-width:0.18378384"
id="tspan851-42">C++ API (Python)</tspan><tspan
sodipodi:role="line"
x="20.748819"
y="448.45401"
style="font-size:6.3499999px;text-align:center;text-anchor:middle;stroke-width:0.18378384"
id="tspan933-9" /></text>
</g>
<g
transform="translate(105.62922,-55.3248)"
id="g1116-7">
<rect
style="opacity:1;vector-effect:none;fill:none;fill-opacity:1;fill-rule:nonzero;stroke:#661f76;stroke-width:1.08041525;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
id="rect845-24-6"
width="73.277794"
height="9.9850101"
x="-15.866821"
y="433.88846"
ry="2.5349789"
rx="2.5349789" />
<text
xml:space="preserve"
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:6.3499999px;line-height:1.25;font-family:Calibri;-inkscape-font-specification:Calibri;text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.18378384"
x="20.748819"
y="440.51651"
id="text849-5-7"><tspan
sodipodi:role="line"
x="20.748819"
y="440.51651"
style="font-size:6.3499999px;text-align:center;text-anchor:middle;stroke-width:0.18378384"
id="tspan851-42-5">C++ API (C#)</tspan><tspan
sodipodi:role="line"
x="20.748819"
y="448.45401"
style="font-size:6.3499999px;text-align:center;text-anchor:middle;stroke-width:0.18378384"
id="tspan933-9-9" /></text>
</g>
<g
transform="translate(76.872688,8.5224453)"
id="g1021-2">
<rect
rx="2.5349789"
style="opacity:1;vector-effect:none;fill:none;fill-opacity:1;fill-rule:nonzero;stroke:#661f76;stroke-width:1.08041525;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
id="rect845-2-3"
width="73.277794"
height="23.348486"
x="99.69455"
y="379.26151"
ry="2.5349789" />
<text
xml:space="preserve"
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:6.3499999px;line-height:1.25;font-family:Calibri;-inkscape-font-specification:Calibri;text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.18378386"
x="136.3102"
y="388.29663"
id="text849-7-1"><tspan
sodipodi:role="line"
x="137.02953"
y="388.29663"
style="font-size:6.3499999px;text-align:center;text-anchor:middle;stroke-width:0.18378386"
id="tspan851-4-6">Tensor Computation </tspan><tspan
sodipodi:role="line"
x="136.3102"
y="396.23413"
style="font-size:6.3499999px;text-align:center;text-anchor:middle;stroke-width:0.18378386"
id="tspan933-3-6">Layer (C++)</tspan></text>
</g>
<g
transform="translate(192.43406,-55.992467)"
id="g1116-7-2">
<rect
style="opacity:1;vector-effect:none;fill:none;fill-opacity:1;fill-rule:nonzero;stroke:#661f76;stroke-width:1.08041525;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
id="rect845-24-6-4"
width="73.277794"
height="9.9850101"
x="-15.866821"
y="433.88846"
ry="2.5349789"
rx="2.5349789" />
<text
xml:space="preserve"
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:6.3499999px;line-height:1.25;font-family:Calibri;-inkscape-font-specification:Calibri;text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.18378384"
x="20.748819"
y="440.51651"
id="text849-5-7-7"><tspan
sodipodi:role="line"
x="20.748819"
y="440.51651"
style="font-size:6.3499999px;text-align:center;text-anchor:middle;stroke-width:0.18378384"
id="tspan851-42-5-9">C++ API (C#)</tspan><tspan
sodipodi:role="line"
x="20.748819"
y="448.45401"
style="font-size:6.3499999px;text-align:center;text-anchor:middle;stroke-width:0.18378384"
id="tspan933-9-9-0" /></text>
</g>
<text
id="text843-5-3"
y="346.74863"
x="179.03905"
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:9.87777805px;line-height:1.25;font-family:Calibri;-inkscape-font-specification:Calibri;letter-spacing:0px;word-spacing:0px;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.26458332"
xml:space="preserve"><tspan
style="font-size:9.87777805px;stroke-width:0.26458332"
y="346.74863"
x="179.03905"
id="tspan841-2-9"
sodipodi:role="line">TensorFlowSharp</tspan><tspan
id="tspan1205"
style="font-size:9.87777805px;stroke-width:0.26458332"
y="359.09586"
x="179.03905"
sodipodi:role="line" /></text>
<text
id="text843-5-3-5"
y="355.52545"
x="192.88992"
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:7.11484385px;line-height:1.25;font-family:Calibri;-inkscape-font-specification:Calibri;letter-spacing:0px;word-spacing:0px;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.26458332"
xml:space="preserve"><tspan
style="font-size:7.11484385px;stroke-width:0.26458332"
y="355.52545"
x="192.88992"
id="tspan841-2-9-8"
sodipodi:role="line">(by Microsoft)</tspan><tspan
id="tspan1205-0"
style="font-size:7.11484385px;stroke-width:0.26458332"
y="364.41901"
x="192.88992"
sodipodi:role="line" /></text>
</g>
</g>
</svg>

+ 24
- 0
src/SciSharp.TensorFlow.Redist/CommonPackage.props View File

@@ -0,0 +1,24 @@
<Project xmlns="http://schemas.microsoft.com/developer/msbuild/2003">

<!--
NuGet packages.config doesn't support native assemblies automatically,
so copy the native assemblies to the output directory.
-->
<ItemGroup Condition="Exists('packages.config') OR
Exists('$(MSBuildProjectName).packages.config') OR
Exists('packages.$(MSBuildProjectName).config')">
<Content Include="$(MSBuildThisFileDirectory)\..\..\runtimes\win-x64\native\*.dll"
Condition="'$(PlatformTarget)' == 'x64'">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
<Visible>false</Visible>
<Link>%(Filename)%(Extension)</Link>
</Content>
<Content Include="$(MSBuildThisFileDirectory)\..\..\runtimes\win-x86\native\*.dll"
Condition="'$(PlatformTarget)' == 'x86'">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
<Visible>false</Visible>
<Link>%(Filename)%(Extension)</Link>
</Content>
</ItemGroup>

</Project>

+ 188
- 0
src/SciSharp.TensorFlow.Redist/SciSharp.TensorFlow.Redist.nupkgproj View File

@@ -0,0 +1,188 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<ProjDir>$(MSBuildThisFileDirectory)</ProjDir>
<BinDir>$(ProjDir)bin\</BinDir>
<ObjDir>$(ProjDir)obj\</ObjDir>

<TargetArchitecture Condition="'$(TargetArchitecture)' == ''">x64</TargetArchitecture>
<TargetFramework>netstandard2.0</TargetFramework>
<TensorFlowVersion>1.14.0</TensorFlowVersion>
<TensorFlowMajorVersion>1</TensorFlowMajorVersion>

<PackageAssetsPath>$(BinDir)packages\</PackageAssetsPath>
<PackageId>$(MSBuildProjectName)</PackageId>
<PackageVersion>$(TensorFlowVersion)</PackageVersion>

<NoBuild>true</NoBuild>
<IncludeBuildOutput>false</IncludeBuildOutput>

<NuspecFile>Redist.nuspec</NuspecFile>
<NuspecProperties>packageId=$(PackageId);version=$(PackageVersion)</NuspecProperties>
<NuspecBasePath>$(ProjDir)</NuspecBasePath>

<GenerateNuspecDependsOn>CopyFilesFromArchive</GenerateNuspecDependsOn>

<PackageRid Condition="'$(OS)' == 'Windows_NT'">win</PackageRid>
<PackageRid Condition="'$(OS)' != 'Windows_NT'">linux</PackageRid>
<PackageRid Condition="$([MSBuild]::IsOSPlatform('osx'))">osx</PackageRid>
<PackageRid>$(PackageRid)-$(TargetArchitecture)</PackageRid>

</PropertyGroup>

<PropertyGroup>
<IncludeMLNetNotices>false</IncludeMLNetNotices>
</PropertyGroup>
<ItemGroup>
<TensorFlowConfig Include="windows"
FileExtension=".zip"
FilesFromArchive="lib\tensorflow.dll;
include\tensorflow\c\LICENSE"
Runtime="win-x64"/>

<TensorFlowConfig Condition="'$(OS)' != 'Windows_NT'"
Include="linux"
FileExtension=".tar.gz"
FilesFromArchive="lib\libtensorflow.so;
lib\libtensorflow_framework.so.$(TensorFlowMajorVersion);
include\tensorflow\c\LICENSE"
Runtime="linux-x64" />

<TensorFlowConfig Condition="'$(OS)' != 'Windows_NT'"
Include="darwin" FileExtension=".tar.gz"
FilesFromArchive="lib\libtensorflow.dylib;
lib\libtensorflow_framework.$(TensorFlowMajorVersion).dylib;
include\tensorflow\c\LICENSE"
Runtime="osx-x64" />

<AdditionalDownloadFile Include="https://raw.githubusercontent.com/tensorflow/tensorflow/master/LICENSE"
DownloadFile="$(BinDir)LICENSE" />
</ItemGroup>

<Target Name="PrepareArchives">
<ItemGroup>
<!-- although we could extract all archives on all machines, mac requires a fixup which can only be run on mac
so we split these per-rid and join during the official build packaging. -->
<TensorFlowArchive
Include="@(TensorFlowConfig->'https://storage.googleapis.com/tensorflow/libtensorflow/libtensorflow-cpu-%(Identity)-x86_64-$(TensorFlowVersion)%(FileExtension)')" />
<!-- set up metdata used by all targets -->
<TensorFlowArchive DownloadFile="$(BinDir)%(FileName)%(Extension)"
DownloadShaFile="$(BinDir)%(FileName)%(Extension).sha"
ExtractDirectory="$(BinDir)%(FileName)"
ExtractSemaphore="$(BinDir)%(FileName)\.extracted"
LocalShaFile="$(MSBuildProjectDirectory)\%(FileName)%(Extension).sha"/>
</ItemGroup>
<Message Importance="high" Text="%(TensorFlowConfig.Runtime)"/>
</Target>

<Target Name="DownloadArchives"
DependsOnTargets="PrepareArchives"
Inputs="$(MSBuildProjectFile)"
Outputs="@(TensorFlowArchive->'%(DownloadFile)');@(AdditionalDownloadFile->'%(DownloadFile)')">
<MakeDir Directories="$(BinDir)" />
<ItemGroup>
<_downloadFiles Include="@(TensorFlowArchive);@(AdditionalDownloadFile)" Url="%(Identity)" DestinationFile="%(DownloadFile)" />
</ItemGroup>
<Message Importance="High" Text="Downloading '%(_downloadFiles.Identity)' to '$(BinDir)'." />
<DownloadFile SourceUrl="%(_downloadFiles.Identity)" DestinationFolder="$(BinDir)">
<Output TaskParameter="DownloadedFile" ItemName="Content" />
</DownloadFile>
</Target>


<Target Name="ValidateAndExtractArchives"
DependsOnTargets="DownloadArchives"
Inputs="@(TensorFlowArchive->'%(DownloadFile)')"
Outputs="@(TensorFlowArchive->'%(ExtractSemaphore)')">

<GetFileHash Files="@(TensorFlowArchive->'%(DownloadFile)')" Algorithm="SHA512">
<Output
TaskParameter="Items"
ItemName="FilesWithHashes" />
</GetFileHash>

<Message Importance="High"
Text="%(FilesWithHashes.Identity): %(FilesWithHashes.FileHash)" />

<ItemGroup>
<TensorFlowArchive>
<DownloadSha>@(FilesWithHashes->'%(FileHash)')</DownloadSha>
<LocalSha>$([System.IO.File]::ReadAllText('%(LocalShaFile)'))</LocalSha>
</TensorFlowArchive>
</ItemGroup>

<!-- If specified we'll update the checked in SHAs with the downloaded ones. -->
<!--<WriteLinesToFile Condition="'$(UpdateSHA)' == 'true'"
File="@(TensorFlowArchive->'%(LocalShaFile)')"
Lines="@(TensorFlowArchive->'%(LocalShaFile)')" />-->

<Error Condition="!Exists('%(TensorFlowArchive.LocalShaFile)')" Text="SHA file '%(TensorFlowArchive.LocalShaFile)' does not exist. Build with /p:UpdateSHA=true to save it." />

<Message Importance="High" Text="@TensorFlowArchive->'%(TensorFlowArchive.DownloadFile) - %(TensorFlowArchive.LocalSha) - %(TensorFlowArchive.DownloadSha)"/>

<!-- Validate that the downloaded SHAs match the expected checked in SHAs -->
<Error Condition="'%(TensorFlowArchive.LocalSha)' != '%(TensorFlowArchive.DownloadSha)'" Text="Downloaded file '%(TensorFlowArchive.DownloadFile)' has unexpected SHA.%0A expected: %(_downloadedTensorFlowArchive.LocalSha)%0A actual: %(_downloadedTensorFlowArchive.DownloadSha)%0ABuild with /p:UpdateSHA=true if you intentionally changed the URL and wish to update the SHAs, otherwise this could indicate an incomplete download or intercerpted URL and should be examined." />


<!-- The archives are valid, lets extract them, ensuring an empty directory -->
<RemoveDir Directories="@(TensorFlowArchive->'%(ExtractDirectory)')" />
<MakeDir Directories="@(TensorFlowArchive->'%(ExtractDirectory)')" />

<Message Importance="High" Text="Decompressing '%(TensorFlowArchive.DownloadFile)' to '%(TensorFlowArchive.ExtractDirectory)'." />

<Unzip Condition="'%(TensorFlowArchive.FileExtension)' == '.zip'"
SourceFiles="%(TensorFlowArchive.DownloadFile)"
DestinationFolder="%(TensorFlowArchive.ExtractDirectory)" />

<Exec Condition="'$(OS)' != 'Windows_NT' AND '%(TensorFlowArchive.FileExtension)' == '.tar.gz'"
WorkingDirectory="$(MSBuildThisFileDirectory)"
Command="tar -xzm --hard-dereference -f %(TensorFlowArchive.DownloadFile) -C %(TensorFlowArchive.ExtractDirectory)" />

<Exec Condition="'$(OS)' != 'Windows_NT'"
Command="chmod -R +w %(TensorFlowArchive.ExtractDirectory)" />

<Touch Files="@(TensorFlowArchive->'%(ExtractSemaphore)')" AlwaysCreate="true" />
</Target>

<!-- Select the files we want to copy out of each archive. -->
<Target Name="GetFilesFromArchive"
DependsOnTargets="ValidateAndExtractArchives" >
<ItemGroup>
<!-- batch rather than transform so that we can split FilesFromArchive metadata -->
<_fileFromArchive Include="%(TensorFlowArchive.FilesFromArchive)" ExtractDirectory="%(TensorFlowArchive.ExtractDirectory)" Runtime="%(TensorFlowArchive.Runtime)" />
<_fileFromArchive DestinationFile="%(FileName)%(Extension)"/>
<_fileFromArchive PackagePath="runtimes\%(_fileFromArchive.Runtime)\native\%(_fileFromArchive.DestinationFile)" />

<!-- LICENSE from the package is actually THIRD_PARTY_NOTICES-->
<_fileFromArchive Condition="'%(DestinationFile)' == 'LICENSE'" PackagePath="THIRD_PARTY_NOTICES.txt" Runtime="" />

<!-- copy to packaging location -->
<FilesFromArchive Include="@(_fileFromArchive->'%(ExtractDirectory)\%(Identity)')"
TargetPath="$(PackageAssetsPath)$(MSBuildProjectName)\%(PackagePath)" />
<!-- include LICENSE that was downloaded from GitHub -->
<FilesFromArchive Include="$(BinDir)\LICENSE"
TargetPath="$(PackageAssetsPath)$(MSBuildProjectName)\LICENSE.txt" />

<!-- copy to NativeAssets location, only for current RID, so that they may be used by tests -->
<!--<FilesFromArchive Condition="'$(PackageRID)' == '%(_fileFromArchive.Runtime)'"
Include="@(_fileFromArchive->'%(ExtractDirectory)\%(Identity)')"
TargetPath="$(NativeAssetsBuiltPath)\%(_fileFromArchive.DestinationFile)" />-->
</ItemGroup>
</Target>

<Target Name="CopyFilesFromArchive"
DependsOnTargets="GetFilesFromArchive">

<Message Importance="High" Text="@(FilesFromArchive) -> %(FilesFromArchive.TargetPath)" />
<Copy SourceFiles="@(FilesFromArchive)"
DestinationFiles="@(FilesFromArchive->'%(TargetPath)')" />

</Target>

<Target Name="Clean">
<Message Importance="High" Text="Deleting $(BinDir);$(ObjDir)" />
<RemoveDir Directories="$(BinDir);$(ObjDir)" />
</Target>
</Project>

+ 1
- 0
src/SciSharp.TensorFlow.Redist/libtensorflow-cpu-darwin-x86_64-1.14.0.tar.gz.sha View File

@@ -0,0 +1 @@
7002EF701BD23C5EF5FF94192E935F0DDF960A21BE2531CEE158586830C00E0BA889900F7F6E8AB568BEE0ACF1F5A6A246BB43D11C4109E9DC782B46377D8142

+ 1
- 0
src/SciSharp.TensorFlow.Redist/libtensorflow-cpu-linux-x86_64-1.14.0.tar.gz.sha View File

@@ -0,0 +1 @@
E3F6D0309117E9E45780ECF8BC4D0268B3FC9F12E3E38FFE58496789330A4ACD2DC8FF721F3B8900357F6155F8A54000E45B99495F823486B558E8B42532392D

+ 1
- 0
src/SciSharp.TensorFlow.Redist/libtensorflow-cpu-windows-x86_64-1.14.0.zip.sha View File

@@ -0,0 +1 @@
59A2B80B441439B851202358CE4A65BA0DDDB319A8A29E87B135DCD9954BC5B0628F2C0C8E72D6942EA3CDCE172805C2BD5421815B3D0210B62BC0936DC59A08

+ 26
- 0
src/SciSharp.TensorFlow.Redist/redist.nuspec View File

@@ -0,0 +1,26 @@
<?xml version="1.0" encoding="utf-8"?>
<package xmlns="http://schemas.microsoft.com/packaging/2012/06/nuspec.xsd">
<metadata>
<id>$packageId$</id>
<version>$version$</version>
<authors>The TensorFlow Authors</authors>
<owners>The TensorFlow Authors</owners>
<requireLicenseAcceptance>true</requireLicenseAcceptance>
<license type="file">LICENSE.txt</license>
<licenseUrl>https://aka.ms/deprecateLicenseUrl</licenseUrl>
<projectUrl>https://www.tensorflow.org/</projectUrl>
<description>$packageId$ contains the TensorFlow C library version $version$ redistributed as a NuGet package.</description>
<releaseNotes>https://github.com/tensorflow/tensorflow/releases/tag/v$version$</releaseNotes>
<copyright>Copyright 2019 The TensorFlow Authors. All rights reserved.</copyright>
<tags>TensorFlow</tags>
<dependencies>
<group targetFramework=".NETStandard2.0" />
</dependencies>
</metadata>
<files>
<file src="CommonPackage.props" target="build\netstandard2.0\$packageId$.props" />
<file src="bin\packages\$packageId$\LICENSE.txt" target="LICENSE.txt" />
<file src="bin\packages\$packageId$\THIRD_PARTY_NOTICES.txt" target="THIRD_PARTY_NOTICES.txt" />
<file src="bin\packages\$packageId$\runtimes\**\*" target="runtimes" />
</files>
</package>

+ 282
- 19
src/TensorFlowNET.Core/Tensors/Tensor.Creation.cs View File

@@ -1,6 +1,6 @@
/*****************************************************************************
Copyright 2018 The TensorFlow.NET Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
@@ -18,6 +18,8 @@ using NumSharp;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Numerics;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Text;
using static Tensorflow.c_api;
@@ -36,39 +38,246 @@ namespace Tensorflow
_handle = handle;
}

public Tensor(NDArray nd, TF_DataType? tensorDType = null)
#if _REGEN
%types=["sbyte", "byte", "short", "ushort", "int", "uint", "long", "ulong", "float", "double", "Complex"]
%foreach types%
/// <summary>
/// Create a 1d Tensor from the given linear array and shape
/// </summary>
public Tensor(#1[] data)
{
_handle = Allocate(nd, tensorDType: tensorDType);
_handle = CreateTensorWithoutCopying(dtypes.as_dtype(typeof(#1)), new long[]{data.Length}, data, Marshal.SizeOf<#1>());
}

public unsafe Tensor(byte[] buffer)
/// <summary>
/// Create a N-dimensional Tensor from the given array
/// </summary>
public Tensor(#1[] data, long[] shape)
{
var size = c_api.TF_StringEncodedSize((UIntPtr)buffer.Length);
_handle = TF_AllocateTensor(TF_DataType.TF_STRING, IntPtr.Zero, 0, (UIntPtr)((ulong)size + 8));
_handle = CreateTensorWithoutCopying(dtypes.as_dtype(typeof(#1)), shape, data, Marshal.SizeOf<#1>());
}

IntPtr tensor = c_api.TF_TensorData(_handle);
Marshal.WriteInt64(tensor, 0);
fixed (byte* src = &buffer[0])
c_api.TF_StringEncode(src, (UIntPtr)buffer.Length, (sbyte*)(tensor + sizeof(Int64)), size, status);
%
#else
/// <summary>
/// Create a 1d Tensor from the given linear array and shape
/// </summary>
public Tensor(sbyte[] data)
{
_handle = CreateTensorWithoutCopying(dtypes.as_dtype(typeof(sbyte)), new long[]{data.Length}, data, Marshal.SizeOf<sbyte>());
}

status.Check(true);
/// <summary>
/// Create a N-dimensional Tensor from the given array
/// </summary>
public Tensor(sbyte[] data, long[] shape)
{
_handle = CreateTensorWithoutCopying(dtypes.as_dtype(typeof(sbyte)), shape, data, Marshal.SizeOf<sbyte>());
}

private IntPtr Allocate(NDArray nd, TF_DataType? tensorDType = null)
/// <summary>
/// Create a 1d Tensor from the given linear array and shape
/// </summary>
public Tensor(byte[] data)
{
if (tensorDType == TF_DataType.TF_STRING &&
nd.dtype.Name == "Byte")
_handle = CreateTensorWithoutCopying(dtypes.as_dtype(typeof(byte)), new long[]{data.Length}, data, Marshal.SizeOf<byte>());
}

/// <summary>
/// Create a N-dimensional Tensor from the given array
/// </summary>
public Tensor(byte[] data, long[] shape)
{
_handle = CreateTensorWithoutCopying(dtypes.as_dtype(typeof(byte)), shape, data, Marshal.SizeOf<byte>());
}

/// <summary>
/// Create a 1d Tensor from the given linear array and shape
/// </summary>
public Tensor(short[] data)
{
_handle = CreateTensorWithoutCopying(dtypes.as_dtype(typeof(short)), new long[]{data.Length}, data, Marshal.SizeOf<short>());
}

/// <summary>
/// Create a N-dimensional Tensor from the given array
/// </summary>
public Tensor(short[] data, long[] shape)
{
_handle = CreateTensorWithoutCopying(dtypes.as_dtype(typeof(short)), shape, data, Marshal.SizeOf<short>());
}

/// <summary>
/// Create a 1d Tensor from the given linear array and shape
/// </summary>
public Tensor(ushort[] data)
{
_handle = CreateTensorWithoutCopying(dtypes.as_dtype(typeof(ushort)), new long[]{data.Length}, data, Marshal.SizeOf<ushort>());
}

/// <summary>
/// Create a N-dimensional Tensor from the given array
/// </summary>
public Tensor(ushort[] data, long[] shape)
{
_handle = CreateTensorWithoutCopying(dtypes.as_dtype(typeof(ushort)), shape, data, Marshal.SizeOf<ushort>());
}

/// <summary>
/// Create a 1d Tensor from the given linear array and shape
/// </summary>
public Tensor(int[] data)
{
_handle = CreateTensorWithoutCopying(dtypes.as_dtype(typeof(int)), new long[]{data.Length}, data, Marshal.SizeOf<int>());
}

/// <summary>
/// Create a N-dimensional Tensor from the given array
/// </summary>
public Tensor(int[] data, long[] shape)
{
_handle = CreateTensorWithoutCopying(dtypes.as_dtype(typeof(int)), shape, data, Marshal.SizeOf<int>());
}

/// <summary>
/// Create a 1d Tensor from the given linear array and shape
/// </summary>
public Tensor(uint[] data)
{
_handle = CreateTensorWithoutCopying(dtypes.as_dtype(typeof(uint)), new long[]{data.Length}, data, Marshal.SizeOf<uint>());
}

/// <summary>
/// Create a N-dimensional Tensor from the given array
/// </summary>
public Tensor(uint[] data, long[] shape)
{
_handle = CreateTensorWithoutCopying(dtypes.as_dtype(typeof(uint)), shape, data, Marshal.SizeOf<uint>());
}

/// <summary>
/// Create a 1d Tensor from the given linear array and shape
/// </summary>
public Tensor(long[] data)
{
_handle = CreateTensorWithoutCopying(dtypes.as_dtype(typeof(long)), new long[]{data.Length}, data, Marshal.SizeOf<long>());
}

/// <summary>
/// Create a N-dimensional Tensor from the given array
/// </summary>
public Tensor(long[] data, long[] shape)
{
_handle = CreateTensorWithoutCopying(dtypes.as_dtype(typeof(long)), shape, data, Marshal.SizeOf<long>());
}

/// <summary>
/// Create a 1d Tensor from the given linear array and shape
/// </summary>
public Tensor(ulong[] data)
{
_handle = CreateTensorWithoutCopying(dtypes.as_dtype(typeof(ulong)), new long[]{data.Length}, data, Marshal.SizeOf<ulong>());
}

/// <summary>
/// Create a N-dimensional Tensor from the given array
/// </summary>
public Tensor(ulong[] data, long[] shape)
{
_handle = CreateTensorWithoutCopying(dtypes.as_dtype(typeof(ulong)), shape, data, Marshal.SizeOf<ulong>());
}

/// <summary>
/// Create a 1d Tensor from the given linear array and shape
/// </summary>
public Tensor(float[] data)
{
_handle = CreateTensorWithoutCopying(dtypes.as_dtype(typeof(float)), new long[]{data.Length}, data, Marshal.SizeOf<float>());
}

/// <summary>
/// Create a N-dimensional Tensor from the given array
/// </summary>
public Tensor(float[] data, long[] shape)
{
_handle = CreateTensorWithoutCopying(dtypes.as_dtype(typeof(float)), shape, data, Marshal.SizeOf<float>());
}

/// <summary>
/// Create a 1d Tensor from the given linear array and shape
/// </summary>
public Tensor(double[] data)
{
_handle = CreateTensorWithoutCopying(dtypes.as_dtype(typeof(double)), new long[]{data.Length}, data, Marshal.SizeOf<double>());
}

/// <summary>
/// Create a N-dimensional Tensor from the given array
/// </summary>
public Tensor(double[] data, long[] shape)
{
_handle = CreateTensorWithoutCopying(dtypes.as_dtype(typeof(double)), shape, data, Marshal.SizeOf<double>());
}

/// <summary>
/// Create a 1d Tensor from the given linear array and shape
/// </summary>
public Tensor(Complex[] data)
{
_handle = CreateTensorWithoutCopying(dtypes.as_dtype(typeof(Complex)), new long[]{data.Length}, data, Marshal.SizeOf<Complex>());
}

/// <summary>
/// Create a N-dimensional Tensor from the given array
/// </summary>
public Tensor(Complex[] data, long[] shape)
{
_handle = CreateTensorWithoutCopying(dtypes.as_dtype(typeof(Complex)), shape, data, Marshal.SizeOf<Complex>());
}
#endif

public Tensor(NDArray nd, TF_DataType? tensorDType = null)
{
_handle = Allocate(nd, tensorDType: tensorDType);
}

private unsafe IntPtr Allocate(NDArray nd, TF_DataType? tensorDType = null)
{
if (tensorDType == TF_DataType.TF_STRING && nd.dtype.Name == "Byte")
{
return new Tensor(nd.Data<byte>());
var buffer=nd.Data<byte>();
var size = c_api.TF_StringEncodedSize((UIntPtr)buffer.Length);
var handle = TF_AllocateTensor(TF_DataType.TF_STRING, IntPtr.Zero, 0, (UIntPtr)((ulong)size + 8));

IntPtr tensor = c_api.TF_TensorData(handle);
Marshal.WriteInt64(tensor, 0);
fixed (byte* src = &buffer[0])
c_api.TF_StringEncode(src, (UIntPtr)buffer.Length, (sbyte*)(tensor + sizeof(Int64)), size, status);

status.Check(true);
return handle;
}

IntPtr dotHandle = IntPtr.Zero;
ulong size = 0;
int buffersize = 0;

if (nd.dtype.Name != "String")
{
dotHandle = Marshal.AllocHGlobal(nd.dtypesize * nd.size);
size = (ulong)(nd.size * nd.dtypesize);
buffersize = (nd.size * nd.dtypesize);
dotHandle = Marshal.AllocHGlobal(buffersize);
}

var dataType = ToTFDataType(nd.dtype);
@@ -116,7 +325,7 @@ namespace Tensorflow
dims,
dims.Length,
dotHandle,
(UIntPtr)size,
(UIntPtr)buffersize,
deallocator,
ref deallocator_called);

@@ -130,5 +339,59 @@ namespace Tensorflow
_dtype = dtype;
_id = ops.uid();
}

/// <summary>
/// Creates a new tensor from the given array without copying memory. The array is pinned down and the pointer passed on.
/// </summary>
/// <param name="shape">Represents the tensor shape.</param>
/// <param name="data">The linear array of data, the data must fit in the tensor with the specified dimensions.</param>
/// <param name="element_size">The number of bytes in memory of a single array element</param>
/// <remarks>
/// Use the FromBuffer method to create a tensor that has the specified dimensions
/// and is initialized with data from the data array. The data is copied starting
/// at the start offset, for count bytes and is laid out into the tensor following the
/// specified dimensions.
/// </remarks>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
protected IntPtr CreateTensorWithoutCopying(TF_DataType dt, long[] shape, Array data, int element_size)
{
return CreateTensorWithoutCopying(dt, shape, data, 0, data.Length, element_size);
}
/// <summary>
/// Creates a new tensor from a subsection of the given array without copying memory. The array is pinned down and the pointer passed on.
/// </summary>
/// <param name="shape">Represents the tensor shape.</param>
/// <param name="data">The linear array of data, the data must fit in the tensor with the specified dimensions.</param>
/// <param name="start">The offset into the provided data array where the data resides.</param>
/// <param name="count">The number of elements to copy from data.</param>
/// <param name="element_size">The number of bytes in memory of a single array element</param>
/// <remarks>
/// Use the FromBuffer method to create a tensor that has the specified dimensions
/// and is initialized with data from the data array. The data is copied starting
/// at the start offset, for count bytes and is laid out into the tensor following the
/// specified dimensions.
/// </remarks>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
protected IntPtr CreateTensorWithoutCopying(TF_DataType dt, long[] shape, Array data, int start, int count, int element_size)
{
if (start < 0 || start > data.Length - count)
throw new ArgumentException($"Array length {data.Length} does not match the given shape {new Shape(shape.Cast<int>().ToArray())}");
// get a handle to the pinned array which we will pass on to the tensor computation engine to use
var dataHandle = GCHandle.Alloc(data, GCHandleType.Pinned);

// Free the original buffer and set flag
Deallocator deallocator = (IntPtr values, IntPtr len, ref bool closure) =>
{
dataHandle.Free();
closure = true;
};

if (shape == null)
return TF_NewTensor(dt, null, 0, dataHandle.AddrOfPinnedObject() + start * element_size, (UIntPtr)(count * element_size), deallocator, ref deallocator_called);
else
return TF_NewTensor(dt, shape, shape.Length, dataHandle.AddrOfPinnedObject() + start * element_size, (UIntPtr)(count * element_size), deallocator, ref deallocator_called);
}
}
}

+ 2
- 1
src/TensorFlowNET.Core/Tensors/Tensor.cs View File

@@ -31,7 +31,7 @@ namespace Tensorflow
/// </summary>
public partial class Tensor : IDisposable, ITensorOrOperation
{
private readonly IntPtr _handle;
private IntPtr _handle;

private int _id;
private Operation _op;
@@ -351,6 +351,7 @@ namespace Tensorflow
public void Dispose()
{
c_api.TF_DeleteTensor(_handle);
_handle = IntPtr.Zero;
status.Dispose();
}



+ 24
- 5
src/TensorFlowNET.Core/Tensors/dtypes.cs View File

@@ -52,32 +52,51 @@ namespace Tensorflow
}
}

// "sbyte", "byte", "short", "ushort", "int", "uint", "long", "ulong", "float", "double", "Complex"
public static TF_DataType as_dtype(Type type)
{
TF_DataType dtype = TF_DataType.DtInvalid;
switch (type.Name)
{
case "Boolean":
dtype = TF_DataType.TF_BOOL;
case "SByte":
dtype = TF_DataType.TF_INT8;
break;
case "Byte":
dtype = TF_DataType.TF_UINT8;
break;
case "Int16":
dtype = TF_DataType.TF_INT16;
break;
case "UInt16":
dtype = TF_DataType.TF_UINT16;
break;
case "Int32":
dtype = TF_DataType.TF_INT32;
break;
case "UInt32":
dtype = TF_DataType.TF_UINT32;
break;
case "Int64":
dtype = TF_DataType.TF_INT64;
break;
case "UInt64":
dtype = TF_DataType.TF_UINT64;
break;
case "Single":
dtype = TF_DataType.TF_FLOAT;
break;
case "Double":
dtype = TF_DataType.TF_DOUBLE;
break;
case "Complex":
dtype = TF_DataType.TF_COMPLEX128;
break;
case "String":
dtype = TF_DataType.TF_STRING;
break;
case "Byte":
dtype = TF_DataType.TF_STRING;
case "Boolean":
dtype = TF_DataType.TF_BOOL;
break;
default:
throw new Exception("as_dtype Not Implemented");


+ 4
- 0
src/TensorFlowNET.Core/Variables/_VariableScopeStore.cs View File

@@ -41,7 +41,11 @@ namespace Tensorflow

public void close_variable_subscopes(string scope_name)
{
var variable_scopes_count_tmp = new Dictionary<string, int>();
foreach (var k in variable_scopes_count.Keys)
variable_scopes_count_tmp.Add(k, variable_scopes_count[k]);

foreach (var k in variable_scopes_count_tmp.Keys)
if (scope_name == null || k.StartsWith(scope_name + "/"))
variable_scopes_count[k] = 0;
}


+ 40
- 0
src/TensorFlowNet.Benchmarks/Program.cs View File

@@ -0,0 +1,40 @@
using System;
using System.Reflection;
using BenchmarkDotNet.Configs;
using BenchmarkDotNet.Running;
namespace TensorFlowNet.Benchmark
{
class Program
{
/// <summary>
/// dotnet NumSharp.Benchmark.dll (Benchmark Class Name)
/// dotnet NumSharp.Benchmark.dll nparange
/// </summary>
/// <param name="args"></param>
static void Main(string[] args)
{
#if DEBUG
IConfig config = new DebugInProcessConfig();
#else
IConfig config = null;
#endif
if (args?.Length > 0)
{
for (int i = 0; i < args.Length; i++)
{
string name = $"TensorFlowNet.Benchmark.{args[i]}";
var type = Type.GetType(name);
BenchmarkRunner.Run(type, config);
}
}
else
{
BenchmarkSwitcher.FromAssembly(Assembly.GetExecutingAssembly()).Run(args, config);
}
Console.ReadLine();
}
}
}

+ 55
- 0
src/TensorFlowNet.Benchmarks/TensorCreation.cs View File

@@ -0,0 +1,55 @@
using System;
using System.Linq;
using BenchmarkDotNet.Attributes;
using BenchmarkDotNet.Engines;
using NumSharp;
using Tensorflow;
namespace TensorFlowNet.Benchmark
{
[SimpleJob(launchCount: 1, warmupCount: 10, targetCount: 30)]
[MinColumn, MaxColumn, MeanColumn, MedianColumn]
public class TensorCreation
{
private double[] data;
[GlobalSetup]
public void Setup()
{
data = new double[1000];
}
[Benchmark]
public void TensorFromArray()
{
var g=new Graph();
for (int i = 0; i < 100; i++)
{
var tensor = new Tensor(data);
}
}
[Benchmark]
public void TensorFromNDArray()
{
var g = new Graph();
for (int i = 0; i < 100; i++)
{
var tensor = new Tensor(new NDArray(data));
}
}
//[Benchmark]
//public void Constant()
//{
// for (int i = 0; i < 100; i++)
// {
// //var tensor = new Tensor(new NDArray(data));
// var c = tf.constant(42.0);
// }
//}
}
}

+ 22
- 0
src/TensorFlowNet.Benchmarks/TensorFlowNet.Benchmark.csproj View File

@@ -0,0 +1,22 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>netcoreapp2.2</TargetFramework>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="BenchmarkDotNet" Version="0.11.5" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\TensorFlowNET.Core\TensorFlowNET.Core.csproj" />
</ItemGroup>
<ItemGroup>
<None Update="tensorflow.dll">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</None>
</ItemGroup>
</Project>

BIN
src/TensorFlowNet.Benchmarks/tensorflow.dll View File


test/TensorFlowNET.Examples/AudioProcess/README.md → test/TensorFlowNET.Examples/AudioProcessing/README.md View File


test/TensorFlowNET.Examples/ImageProcess/DigitRecognitionCNN.cs → test/TensorFlowNET.Examples/ImageProcessing/DigitRecognitionCNN.cs View File

@@ -73,6 +73,10 @@ namespace TensorFlowNET.Examples.ImageProcess
float accuracy_test = 0f;
float loss_test = 1f;

NDArray x_train, y_train;
NDArray x_valid, y_valid;
NDArray x_test, y_test;

public bool Run()
{
PrepareData();
@@ -133,6 +137,62 @@ namespace TensorFlowNET.Examples.ImageProcess
return graph;
}

public void Train(Session sess)
{
// Number of training iterations in each epoch
var num_tr_iter = y_train.len / batch_size;

var init = tf.global_variables_initializer();
sess.run(init);

float loss_val = 100.0f;
float accuracy_val = 0f;

foreach (var epoch in range(epochs))
{
print($"Training epoch: {epoch + 1}");
// Randomly shuffle the training data at the beginning of each epoch
(x_train, y_train) = mnist.Randomize(x_train, y_train);

foreach (var iteration in range(num_tr_iter))
{
var start = iteration * batch_size;
var end = (iteration + 1) * batch_size;
var (x_batch, y_batch) = mnist.GetNextBatch(x_train, y_train, start, end);

// Run optimization op (backprop)
sess.run(optimizer, new FeedItem(x, x_batch), new FeedItem(y, y_batch));

if (iteration % display_freq == 0)
{
// Calculate and display the batch loss and accuracy
var result = sess.run(new[] { loss, accuracy }, new FeedItem(x, x_batch), new FeedItem(y, y_batch));
loss_val = result[0];
accuracy_val = result[1];
print($"iter {iteration.ToString("000")}: Loss={loss_val.ToString("0.0000")}, Training Accuracy={accuracy_val.ToString("P")}");
}
}

// Run validation after every epoch
var results1 = sess.run(new[] { loss, accuracy }, new FeedItem(x, x_valid), new FeedItem(y, y_valid));
loss_val = results1[0];
accuracy_val = results1[1];
print("---------------------------------------------------------");
print($"Epoch: {epoch + 1}, validation loss: {loss_val.ToString("0.0000")}, validation accuracy: {accuracy_val.ToString("P")}");
print("---------------------------------------------------------");
}
}

public void Test(Session sess)
{
var result = sess.run(new[] { loss, accuracy }, new FeedItem(x, x_test), new FeedItem(y, y_test));
loss_test = result[0];
accuracy_test = result[1];
print("---------------------------------------------------------");
print($"Test loss: {loss_test.ToString("0.0000")}, test accuracy: {accuracy_test.ToString("P")}");
print("---------------------------------------------------------");
}

/// <summary>
/// Create a 2D convolution layer
/// </summary>
@@ -217,6 +277,14 @@ namespace TensorFlowNET.Examples.ImageProcess
initializer: initial);
}

/// <summary>
/// Create a fully-connected layer
/// </summary>
/// <param name="x">input from previous layer</param>
/// <param name="num_units">number of hidden units in the fully-connected layer</param>
/// <param name="name">layer name</param>
/// <param name="use_relu">boolean to add ReLU non-linearity (or not)</param>
/// <returns>The output array</returns>
private Tensor fc_layer(Tensor x, int num_units, string name, bool use_relu = true)
{
return with(tf.variable_scope(name), delegate
@@ -233,73 +301,36 @@ namespace TensorFlowNET.Examples.ImageProcess
return layer;
});
}

public Graph ImportGraph() => throw new NotImplementedException();

public void Predict(Session sess) => throw new NotImplementedException();
public void PrepareData()
{
mnist = MNIST.read_data_sets("mnist", one_hot: true);
(x_train, y_train) = Reformat(mnist.train.data, mnist.train.labels);
(x_valid, y_valid) = Reformat(mnist.validation.data, mnist.validation.labels);
(x_test, y_test) = Reformat(mnist.test.data, mnist.test.labels);

print("Size of:");
print($"- Training-set:\t\t{len(mnist.train.data)}");
print($"- Validation-set:\t{len(mnist.validation.data)}");
}

public void Train(Session sess)
/// <summary>
/// Reformats the data to the format acceptable for convolutional layers
/// </summary>
/// <param name="x"></param>
/// <param name="y"></param>
/// <returns></returns>
private (NDArray, NDArray) Reformat(NDArray x, NDArray y)
{
// Number of training iterations in each epoch
var num_tr_iter = mnist.train.labels.len / batch_size;

var init = tf.global_variables_initializer();
sess.run(init);

float loss_val = 100.0f;
float accuracy_val = 0f;

foreach (var epoch in range(epochs))
{
print($"Training epoch: {epoch + 1}");
// Randomly shuffle the training data at the beginning of each epoch
var (x_train, y_train) = mnist.Randomize(mnist.train.data, mnist.train.labels);

foreach (var iteration in range(num_tr_iter))
{
var start = iteration * batch_size;
var end = (iteration + 1) * batch_size;
var (x_batch, y_batch) = mnist.GetNextBatch(x_train, y_train, start, end);

// Run optimization op (backprop)
sess.run(optimizer, new FeedItem(x, x_batch), new FeedItem(y, y_batch));

if (iteration % display_freq == 0)
{
// Calculate and display the batch loss and accuracy
var result = sess.run(new[] { loss, accuracy }, new FeedItem(x, x_batch), new FeedItem(y, y_batch));
loss_val = result[0];
accuracy_val = result[1];
print($"iter {iteration.ToString("000")}: Loss={loss_val.ToString("0.0000")}, Training Accuracy={accuracy_val.ToString("P")}");
}
}

// Run validation after every epoch
var results1 = sess.run(new[] { loss, accuracy }, new FeedItem(x, mnist.validation.data), new FeedItem(y, mnist.validation.labels));
loss_val = results1[0];
accuracy_val = results1[1];
print("---------------------------------------------------------");
print($"Epoch: {epoch + 1}, validation loss: {loss_val.ToString("0.0000")}, validation accuracy: {accuracy_val.ToString("P")}");
print("---------------------------------------------------------");
}
var (img_size, num_ch, num_class) = (np.sqrt(x.shape[1]), 1, len(np.unique<int>(np.argmax(y, 1))));
var dataset = x.reshape(x.shape[0], img_size, img_size, num_ch).astype(np.float32);
//y[0] = np.arange(num_class) == y[0];
//var labels = (np.arange(num_class) == y.reshape(y.shape[0], 1, y.shape[1])).astype(np.float32);
return (dataset, y);
}

public void Test(Session sess)
{
var result = sess.run(new[] { loss, accuracy }, new FeedItem(x, mnist.test.data), new FeedItem(y, mnist.test.labels));
loss_test = result[0];
accuracy_test = result[1];
print("---------------------------------------------------------");
print($"Test loss: {loss_test.ToString("0.0000")}, test accuracy: {accuracy_test.ToString("P")}");
print("---------------------------------------------------------");
}
public Graph ImportGraph() => throw new NotImplementedException();

public void Predict(Session sess) => throw new NotImplementedException();
}
}

test/TensorFlowNET.Examples/ImageProcess/DigitRecognitionNN.cs → test/TensorFlowNET.Examples/ImageProcessing/DigitRecognitionNN.cs View File


test/TensorFlowNET.Examples/ImageProcess/ImageBackgroundRemoval.cs → test/TensorFlowNET.Examples/ImageProcessing/ImageBackgroundRemoval.cs View File


test/TensorFlowNET.Examples/ImageProcess/ImageRecognitionInception.cs → test/TensorFlowNET.Examples/ImageProcessing/ImageRecognitionInception.cs View File


test/TensorFlowNET.Examples/ImageProcess/InceptionArchGoogLeNet.cs → test/TensorFlowNET.Examples/ImageProcessing/InceptionArchGoogLeNet.cs View File


test/TensorFlowNET.Examples/ImageProcess/ObjectDetection.cs → test/TensorFlowNET.Examples/ImageProcessing/ObjectDetection.cs View File

@@ -32,7 +32,7 @@ namespace TensorFlowNET.Examples
{
public bool Enabled { get; set; } = true;
public string Name => "Object Detection";
public bool IsImportingGraph { get; set; } = false;
public bool IsImportingGraph { get; set; } = true;

public float MIN_SCORE = 0.5f;

@@ -42,16 +42,34 @@ namespace TensorFlowNET.Examples
string labelFile = "mscoco_label_map.pbtxt";
string picFile = "input.jpg";

NDArray imgArr;

public bool Run()
{
PrepareData();

// read in the input image
var imgArr = ReadTensorFromImageFile(Path.Join(imageDir, "input.jpg"));
imgArr = ReadTensorFromImageFile(Path.Join(imageDir, "input.jpg"));

var graph = IsImportingGraph ? ImportGraph() : BuildGraph();

with(tf.Session(graph), sess => Predict(sess));

var graph = new Graph().as_default();
return true;
}

public Graph ImportGraph()
{
var graph = new Graph().as_default();
graph.Import(Path.Join(modelDir, pbFile));

return graph;
}

public void Predict(Session sess)
{
var graph = tf.get_default_graph();

Tensor tensorNum = graph.OperationByName("num_detections");
Tensor tensorBoxes = graph.OperationByName("detection_boxes");
Tensor tensorScores = graph.OperationByName("detection_scores");
@@ -59,16 +77,11 @@ namespace TensorFlowNET.Examples
Tensor imgTensor = graph.OperationByName("image_tensor");
Tensor[] outTensorArr = new Tensor[] { tensorNum, tensorBoxes, tensorScores, tensorClasses };

with(tf.Session(graph), sess =>
{
var results = sess.run(outTensorArr, new FeedItem(imgTensor, imgArr));
NDArray[] resultArr = results.Data<NDArray>();
buildOutputImage(resultArr);
});
var results = sess.run(outTensorArr, new FeedItem(imgTensor, imgArr));

return true;
NDArray[] resultArr = results.Data<NDArray>();

buildOutputImage(resultArr);
}

public void PrepareData()
@@ -159,29 +172,8 @@ namespace TensorFlowNET.Examples
}
}

public Graph ImportGraph()
{
throw new NotImplementedException();
}

public Graph BuildGraph()
{
throw new NotImplementedException();
}

public void Train(Session sess)
{
throw new NotImplementedException();
}

public void Predict(Session sess)
{
throw new NotImplementedException();
}

public void Test(Session sess)
{
throw new NotImplementedException();
}
public Graph BuildGraph() => throw new NotImplementedException();
public void Train(Session sess) => throw new NotImplementedException();
public void Test(Session sess) => throw new NotImplementedException();
}
}

test/TensorFlowNET.Examples/ImageProcess/RetrainImageClassifier.cs → test/TensorFlowNET.Examples/ImageProcessing/RetrainImageClassifier.cs View File


test/TensorFlowNET.Examples/TextProcess/BinaryTextClassification.cs → test/TensorFlowNET.Examples/TextProcessing/BinaryTextClassification.cs View File


test/TensorFlowNET.Examples/TextProcess/CnnTextClassification.cs → test/TensorFlowNET.Examples/TextProcessing/CnnTextClassification.cs View File


test/TensorFlowNET.Examples/TextProcess/DataHelpers.cs → test/TensorFlowNET.Examples/TextProcessing/DataHelpers.cs View File


test/TensorFlowNET.Examples/TextProcess/NER/BiLstmCrfNer.cs → test/TensorFlowNET.Examples/TextProcessing/NER/BiLstmCrfNer.cs View File


test/TensorFlowNET.Examples/TextProcess/NER/CRF.cs → test/TensorFlowNET.Examples/TextProcessing/NER/CRF.cs View File


test/TensorFlowNET.Examples/TextProcess/NER/LstmCrfNer.cs → test/TensorFlowNET.Examples/TextProcessing/NER/LstmCrfNer.cs View File


test/TensorFlowNET.Examples/TextProcess/NamedEntityRecognition.cs → test/TensorFlowNET.Examples/TextProcessing/NamedEntityRecognition.cs View File


test/TensorFlowNET.Examples/TextProcess/Word2Vec.cs → test/TensorFlowNET.Examples/TextProcessing/Word2Vec.cs View File


test/TensorFlowNET.Examples/TextProcess/cnn_models/CharCnn.cs → test/TensorFlowNET.Examples/TextProcessing/cnn_models/CharCnn.cs View File


test/TensorFlowNET.Examples/TextProcess/cnn_models/ITextModel.cs → test/TensorFlowNET.Examples/TextProcessing/cnn_models/ITextModel.cs View File


test/TensorFlowNET.Examples/TextProcess/cnn_models/VdCnn.cs → test/TensorFlowNET.Examples/TextProcessing/cnn_models/VdCnn.cs View File


test/TensorFlowNET.Examples/TextProcess/cnn_models/WordCnn.cs → test/TensorFlowNET.Examples/TextProcessing/cnn_models/WordCnn.cs View File


+ 1
- 1
test/TensorFlowNET.Examples/Utility/Datasets.cs View File

@@ -28,7 +28,7 @@ namespace TensorFlowNET.Examples.Utility
var perm = np.random.permutation(y.shape[0]);

np.random.shuffle(perm);
return (train.data[perm], train.labels[perm]);
return (x[perm], y[perm]);
}

/// <summary>


+ 3
- 0
test/TensorFlowNET.UnitTest/ExamplesTests/ExamplesTest.cs View File

@@ -39,6 +39,7 @@ namespace TensorFlowNET.ExamplesTests
new InceptionArchGoogLeNet() { Enabled = true }.Run();
}
[Ignore]
[TestMethod]
public void KMeansClustering()
{
@@ -83,10 +84,12 @@ namespace TensorFlowNET.ExamplesTests
new NearestNeighbor() { Enabled = true, TrainSize = 500, ValidationSize = 100, TestSize = 100 }.Run();
}
[Ignore]
[TestMethod]
public void WordCnnTextClassification()
=> new CnnTextClassification { Enabled = true, ModelName = "word_cnn", DataLimit =100 }.Run();
[Ignore]
[TestMethod]
public void CharCnnTextClassification()
=> new CnnTextClassification { Enabled = true, ModelName = "char_cnn", DataLimit = 100 }.Run();


Loading…
Cancel
Save