Browse Source

iOS版本更新为端到端识别

fetches/huiwei13/master
lisiyuan 5 years ago
parent
commit
36d168e1ae
70 changed files with 16539 additions and 0 deletions
  1. +0
    -0
      Prj-iOS/Resource/README.md
  2. +3
    -0
      Prj-iOS/lpr/.gitignore
  3. BIN
      Prj-iOS/lpr/DemoImage/Screen Shot 2018-10-26 at 3.43.44 PM.png
  4. BIN
      Prj-iOS/lpr/DemoImage/Screen Shot 2018-10-26 at 3.43.53 PM.png
  5. BIN
      Prj-iOS/lpr/DemoImage/Screen Shot 2018-10-26 at 3.44.04 PM.png
  6. +8
    -0
      Prj-iOS/lpr/Podfile
  7. +20
    -0
      Prj-iOS/lpr/Podfile.lock
  8. +4
    -0
      Prj-iOS/lpr/README.md
  9. BIN
      Prj-iOS/lpr/Resource/CharacterRecognization.caffemodel
  10. +123
    -0
      Prj-iOS/lpr/Resource/CharacterRecognization.prototxt
  11. BIN
      Prj-iOS/lpr/Resource/HorizonalFinemapping.caffemodel
  12. +95
    -0
      Prj-iOS/lpr/Resource/HorizonalFinemapping.prototxt
  13. BIN
      Prj-iOS/lpr/Resource/SegmenationFree-Inception.caffemodel
  14. +454
    -0
      Prj-iOS/lpr/Resource/SegmenationFree-Inception.prototxt
  15. BIN
      Prj-iOS/lpr/Resource/Segmentation.caffemodel
  16. +114
    -0
      Prj-iOS/lpr/Resource/Segmentation.prototxt
  17. +12117
    -0
      Prj-iOS/lpr/Resource/cascade.xml
  18. +550
    -0
      Prj-iOS/lpr/lpr.xcodeproj/project.pbxproj
  19. +7
    -0
      Prj-iOS/lpr/lpr.xcodeproj/project.xcworkspace/contents.xcworkspacedata
  20. +8
    -0
      Prj-iOS/lpr/lpr.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist
  21. BIN
      Prj-iOS/lpr/lpr.xcodeproj/project.xcworkspace/xcuserdata/tbao.xcuserdatad/UserInterfaceState.xcuserstate
  22. +19
    -0
      Prj-iOS/lpr/lpr.xcodeproj/xcuserdata/tbao.xcuserdatad/xcschemes/xcschememanagement.plist
  23. +10
    -0
      Prj-iOS/lpr/lpr.xcworkspace/contents.xcworkspacedata
  24. +8
    -0
      Prj-iOS/lpr/lpr.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist
  25. BIN
      Prj-iOS/lpr/lpr.xcworkspace/xcuserdata/tbao.xcuserdatad/UserInterfaceState.xcuserstate
  26. +37
    -0
      Prj-iOS/lpr/lpr.xcworkspace/xcuserdata/tbao.xcuserdatad/xcdebugger/Breakpoints_v2.xcbkptlist
  27. +17
    -0
      Prj-iOS/lpr/lpr/AppDelegate.h
  28. +55
    -0
      Prj-iOS/lpr/lpr/AppDelegate.m
  29. +98
    -0
      Prj-iOS/lpr/lpr/Assets.xcassets/AppIcon.appiconset/Contents.json
  30. +6
    -0
      Prj-iOS/lpr/lpr/Assets.xcassets/Contents.json
  31. +22
    -0
      Prj-iOS/lpr/lpr/Assets.xcassets/back_camera_btn.imageset/Contents.json
  32. BIN
      Prj-iOS/lpr/lpr/Assets.xcassets/back_camera_btn.imageset/back_camera_btn@2x.png
  33. BIN
      Prj-iOS/lpr/lpr/Assets.xcassets/back_camera_btn.imageset/back_camera_btn@3x.png
  34. +22
    -0
      Prj-iOS/lpr/lpr/Assets.xcassets/flash_camera_btn.imageset/Contents.json
  35. BIN
      Prj-iOS/lpr/lpr/Assets.xcassets/flash_camera_btn.imageset/flash_camera_btn@2x.png
  36. BIN
      Prj-iOS/lpr/lpr/Assets.xcassets/flash_camera_btn.imageset/flash_camera_btn@3x.png
  37. +22
    -0
      Prj-iOS/lpr/lpr/Assets.xcassets/locker_btn_def.imageset/Contents.json
  38. BIN
      Prj-iOS/lpr/lpr/Assets.xcassets/locker_btn_def.imageset/locker_btn_def@2x.png
  39. BIN
      Prj-iOS/lpr/lpr/Assets.xcassets/locker_btn_def.imageset/locker_btn_def@3x.png
  40. +22
    -0
      Prj-iOS/lpr/lpr/Assets.xcassets/take_pic_btn.imageset/Contents.json
  41. BIN
      Prj-iOS/lpr/lpr/Assets.xcassets/take_pic_btn.imageset/take_pic_btn@2x.png
  42. BIN
      Prj-iOS/lpr/lpr/Assets.xcassets/take_pic_btn.imageset/take_pic_btn@3x.png
  43. +25
    -0
      Prj-iOS/lpr/lpr/Base.lproj/LaunchScreen.storyboard
  44. +32
    -0
      Prj-iOS/lpr/lpr/CameraViewController.h
  45. +517
    -0
      Prj-iOS/lpr/lpr/CameraViewController.mm
  46. +45
    -0
      Prj-iOS/lpr/lpr/Info.plist
  47. +26
    -0
      Prj-iOS/lpr/lpr/RootViewController.h
  48. +201
    -0
      Prj-iOS/lpr/lpr/RootViewController.mm
  49. +24
    -0
      Prj-iOS/lpr/lpr/Source/include/CNNRecognizer.h
  50. +18
    -0
      Prj-iOS/lpr/lpr/Source/include/FastDeskew.h
  51. +32
    -0
      Prj-iOS/lpr/lpr/Source/include/FineMapping.h
  52. +45
    -0
      Prj-iOS/lpr/lpr/Source/include/Pipeline.h
  53. +33
    -0
      Prj-iOS/lpr/lpr/Source/include/PlateDetection.h
  54. +126
    -0
      Prj-iOS/lpr/lpr/Source/include/PlateInfo.h
  55. +35
    -0
      Prj-iOS/lpr/lpr/Source/include/PlateSegmentation.h
  56. +23
    -0
      Prj-iOS/lpr/lpr/Source/include/Recognizer.h
  57. +28
    -0
      Prj-iOS/lpr/lpr/Source/include/SegmentationFreeRecognizer.h
  58. +107
    -0
      Prj-iOS/lpr/lpr/Source/include/niBlackThreshold.h
  59. +19
    -0
      Prj-iOS/lpr/lpr/Source/src/CNNRecognizer.cpp
  60. +108
    -0
      Prj-iOS/lpr/lpr/Source/src/FastDeskew.cpp
  61. +170
    -0
      Prj-iOS/lpr/lpr/Source/src/FineMapping.cpp
  62. +103
    -0
      Prj-iOS/lpr/lpr/Source/src/Pipeline.cpp
  63. +32
    -0
      Prj-iOS/lpr/lpr/Source/src/PlateDetection.cpp
  64. +404
    -0
      Prj-iOS/lpr/lpr/Source/src/PlateSegmentation.cpp
  65. +23
    -0
      Prj-iOS/lpr/lpr/Source/src/Recognizer.cpp
  66. +89
    -0
      Prj-iOS/lpr/lpr/Source/src/SegmentationFreeRecognizer.cpp
  67. +67
    -0
      Prj-iOS/lpr/lpr/Source/src/util.h
  68. +30
    -0
      Prj-iOS/lpr/lpr/Utility.h
  69. +320
    -0
      Prj-iOS/lpr/lpr/Utility.mm
  70. +16
    -0
      Prj-iOS/lpr/lpr/main.m

+ 0
- 0
Prj-iOS/Resource/README.md View File


+ 3
- 0
Prj-iOS/lpr/.gitignore View File

@@ -0,0 +1,3 @@
.DS_Store
Pods/
.clang-format

BIN
Prj-iOS/lpr/DemoImage/Screen Shot 2018-10-26 at 3.43.44 PM.png View File

Before After
Width: 750  |  Height: 1334  |  Size: 69 kB

BIN
Prj-iOS/lpr/DemoImage/Screen Shot 2018-10-26 at 3.43.53 PM.png View File

Before After
Width: 750  |  Height: 1334  |  Size: 3.0 MB

BIN
Prj-iOS/lpr/DemoImage/Screen Shot 2018-10-26 at 3.44.04 PM.png View File

Before After
Width: 750  |  Height: 1334  |  Size: 1.2 MB

+ 8
- 0
Prj-iOS/lpr/Podfile View File

@@ -0,0 +1,8 @@
platform :ios, '10.1'

target "lpr" do

pod 'OpenCV', '~> 3.4.2'
pod 'Masonry'

end

+ 20
- 0
Prj-iOS/lpr/Podfile.lock View File

@@ -0,0 +1,20 @@
PODS:
- Masonry (1.1.0)
- OpenCV (3.4.2)

DEPENDENCIES:
- Masonry
- OpenCV (~> 3.4.2)

SPEC REPOS:
https://github.com/cocoapods/specs.git:
- Masonry
- OpenCV

SPEC CHECKSUMS:
Masonry: 678fab65091a9290e40e2832a55e7ab731aad201
OpenCV: 452909747854c92e0f59670961ed5131f5286cb5

PODFILE CHECKSUM: 94db02cda76a5ac6371a03e6e2ca4e9035fc0da6

COCOAPODS: 1.5.3

+ 4
- 0
Prj-iOS/lpr/README.md View File

@@ -0,0 +1,4 @@

Depency Library:
- Masonry, 1.1.0
- OpenCV, 3.4.2

BIN
Prj-iOS/lpr/Resource/CharacterRecognization.caffemodel View File


+ 123
- 0
Prj-iOS/lpr/Resource/CharacterRecognization.prototxt View File

@@ -0,0 +1,123 @@
input: "data"
input_dim: 1
input_dim: 1
input_dim: 30
input_dim: 14
layer {
name: "conv2d_1"
type: "Convolution"
bottom: "data"
top: "conv2d_1"
convolution_param {
num_output: 32
bias_term: true
pad: 0
kernel_size: 3
stride: 1
}
}
layer {
name: "activation_1"
type: "ReLU"
bottom: "conv2d_1"
top: "activation_1"
}
layer {
name: "max_pooling2d_1"
type: "Pooling"
bottom: "activation_1"
top: "max_pooling2d_1"
pooling_param {
pool: MAX
kernel_size: 2
stride: 2
pad: 0
}
}
layer {
name: "conv2d_2"
type: "Convolution"
bottom: "max_pooling2d_1"
top: "conv2d_2"
convolution_param {
num_output: 64
bias_term: true
pad: 0
kernel_size: 3
stride: 1
}
}
layer {
name: "activation_2"
type: "ReLU"
bottom: "conv2d_2"
top: "activation_2"
}
layer {
name: "max_pooling2d_2"
type: "Pooling"
bottom: "activation_2"
top: "max_pooling2d_2"
pooling_param {
pool: MAX
kernel_size: 2
stride: 2
pad: 0
}
}
layer {
name: "conv2d_3"
type: "Convolution"
bottom: "max_pooling2d_2"
top: "conv2d_3"
convolution_param {
num_output: 128
bias_term: true
pad: 0
kernel_size: 2
stride: 1
}
}
layer {
name: "activation_3"
type: "ReLU"
bottom: "conv2d_3"
top: "activation_3"
}
layer {
name: "flatten_1"
type: "Flatten"
bottom: "activation_3"
top: "flatten_1"
}
layer {
name: "dense_1"
type: "InnerProduct"
bottom: "flatten_1"
top: "dense_1"
inner_product_param {
num_output: 256
}
}
layer {
name: "relu2"
type: "ReLU"
bottom: "dense_1"
top: "relu2"
}
layer {
name: "dense2"
type: "InnerProduct"
bottom: "relu2"
top: "dense2"
inner_product_param {
num_output: 65
}
}

layer {
name: "prob"
type: "Softmax"
bottom: "dense2"
top: "prob"
}

BIN
Prj-iOS/lpr/Resource/HorizonalFinemapping.caffemodel View File


+ 95
- 0
Prj-iOS/lpr/Resource/HorizonalFinemapping.prototxt View File

@@ -0,0 +1,95 @@
input: "data"
input_dim: 1
input_dim: 3
input_dim: 16
input_dim: 66
layer {
name: "conv1"
type: "Convolution"
bottom: "data"
top: "conv1"
convolution_param {
num_output: 10
bias_term: true
pad: 0
kernel_size: 3
stride: 1
}
}
layer {
name: "relu1"
type: "ReLU"
bottom: "conv1"
top: "conv1"
}
layer {
name: "max_pooling2d_3"
type: "Pooling"
bottom: "conv1"
top: "max_pooling2d_3"
pooling_param {
pool: MAX
kernel_size: 2
stride: 2
pad: 0
}
}
layer {
name: "conv2"
type: "Convolution"
bottom: "max_pooling2d_3"
top: "conv2"
convolution_param {
num_output: 16
bias_term: true
pad: 0
kernel_size: 3
stride: 1
}
}
layer {
name: "relu2"
type: "ReLU"
bottom: "conv2"
top: "conv2"
}
layer {
name: "conv3"
type: "Convolution"
bottom: "conv2"
top: "conv3"
convolution_param {
num_output: 32
bias_term: true
pad: 0
kernel_size: 3
stride: 1
}
}
layer {
name: "relu3"
type: "ReLU"
bottom: "conv3"
top: "conv3"
}
layer {
name: "flatten_2"
type: "Flatten"
bottom: "conv3"
top: "flatten_2"
}
layer {
name: "dense"
type: "InnerProduct"
bottom: "flatten_2"
top: "dense"
inner_product_param {
num_output: 2
}
}
layer {
name: "relu4"
type: "ReLU"
bottom: "dense"
top: "dense"
}

BIN
Prj-iOS/lpr/Resource/SegmenationFree-Inception.caffemodel View File


+ 454
- 0
Prj-iOS/lpr/Resource/SegmenationFree-Inception.prototxt View File

@@ -0,0 +1,454 @@
input: "data"
input_dim: 1
input_dim: 3
input_dim: 160
input_dim: 40
layer {
name: "conv0"
type: "Convolution"
bottom: "data"
top: "conv0"
convolution_param {
num_output: 32
bias_term: true
pad_h: 1
pad_w: 1
kernel_h: 3
kernel_w: 3
stride_h: 1
stride_w: 1
}
}
layer {
name: "bn0"
type: "BatchNorm"
bottom: "conv0"
top: "bn0"
batch_norm_param {
moving_average_fraction: 0.99
eps: 0.001
}
}
layer {
name: "bn0_scale"
type: "Scale"
bottom: "bn0"
top: "bn0"
scale_param {
bias_term: true
}
}
layer {
name: "relu0"
type: "ReLU"
bottom: "bn0"
top: "bn0"
}
layer {
name: "pool0"
type: "Pooling"
bottom: "bn0"
top: "pool0"
pooling_param {
pool: MAX
kernel_h: 2
kernel_w: 2
stride_h: 2
stride_w: 2
pad_h: 0
pad_w: 0
}
}
layer {
name: "conv1"
type: "Convolution"
bottom: "pool0"
top: "conv1"
convolution_param {
num_output: 64
bias_term: true
pad_h: 1
pad_w: 1
kernel_h: 3
kernel_w: 3
stride_h: 1
stride_w: 1
}
}
layer {
name: "bn1"
type: "BatchNorm"
bottom: "conv1"
top: "bn1"
batch_norm_param {
moving_average_fraction: 0.99
eps: 0.001
}
}
layer {
name: "bn1_scale"
type: "Scale"
bottom: "bn1"
top: "bn1"
scale_param {
bias_term: true
}
}
layer {
name: "relu1"
type: "ReLU"
bottom: "bn1"
top: "bn1"
}
layer {
name: "pool1"
type: "Pooling"
bottom: "bn1"
top: "pool1"
pooling_param {
pool: MAX
kernel_h: 2
kernel_w: 2
stride_h: 2
stride_w: 2
pad_h: 0
pad_w: 0
}
}
layer {
name: "conv2"
type: "Convolution"
bottom: "pool1"
top: "conv2"
convolution_param {
num_output: 128
bias_term: true
pad_h: 1
pad_w: 1
kernel_h: 3
kernel_w: 3
stride_h: 1
stride_w: 1
}
}
layer {
name: "bn2"
type: "BatchNorm"
bottom: "conv2"
top: "bn2"
batch_norm_param {
moving_average_fraction: 0.99
eps: 0.001
}
}
layer {
name: "bn2_scale"
type: "Scale"
bottom: "bn2"
top: "bn2"
scale_param {
bias_term: true
}
}
layer {
name: "relu2"
type: "ReLU"
bottom: "bn2"
top: "bn2"
}
layer {
name: "pool2"
type: "Pooling"
bottom: "bn2"
top: "pool2"
pooling_param {
pool: MAX
kernel_h: 2
kernel_w: 2
stride_h: 2
stride_w: 2
pad_h: 0
pad_w: 0
}
}
layer {
name: "conv2d_1"
type: "Convolution"
bottom: "pool2"
top: "conv2d_1"
convolution_param {
num_output: 256
bias_term: true
pad_h: 0
pad_w: 0
kernel_h: 1
kernel_w: 5
stride_h: 1
stride_w: 1
}
}
layer {
name: "batch_normalization_1"
type: "BatchNorm"
bottom: "conv2d_1"
top: "batch_normalization_1"
batch_norm_param {
moving_average_fraction: 0.99
eps: 0.001
}
}
layer {
name: "batch_normalization_1_scale"
type: "Scale"
bottom: "batch_normalization_1"
top: "batch_normalization_1"
scale_param {
bias_term: true
}
}
layer {
name: "activation_1"
type: "ReLU"
bottom: "batch_normalization_1"
top: "batch_normalization_1"
}
layer {
name: "conv2d_2"
type: "Convolution"
bottom: "batch_normalization_1"
top: "conv2d_2"
convolution_param {
num_output: 256
bias_term: true
pad_h: 3
pad_w: 0
kernel_h: 7
kernel_w: 1
stride_h: 1
stride_w: 1
}
}
layer {
name: "conv2d_3"
type: "Convolution"
bottom: "batch_normalization_1"
top: "conv2d_3"
convolution_param {
num_output: 256
bias_term: true
pad_h: 2
pad_w: 0
kernel_h: 5
kernel_w: 1
stride_h: 1
stride_w: 1
}
}
layer {
name: "conv2d_4"
type: "Convolution"
bottom: "batch_normalization_1"
top: "conv2d_4"
convolution_param {
num_output: 256
bias_term: true
pad_h: 1
pad_w: 0
kernel_h: 3
kernel_w: 1
stride_h: 1
stride_w: 1
}
}
layer {
name: "conv2d_5"
type: "Convolution"
bottom: "batch_normalization_1"
top: "conv2d_5"
convolution_param {
num_output: 256
bias_term: true
pad_h: 0
pad_w: 0
kernel_h: 1
kernel_w: 1
stride_h: 1
stride_w: 1
}
}
layer {
name: "batch_normalization_2"
type: "BatchNorm"
bottom: "conv2d_2"
top: "batch_normalization_2"
batch_norm_param {
moving_average_fraction: 0.99
eps: 0.001
}
}
layer {
name: "batch_normalization_2_scale"
type: "Scale"
bottom: "batch_normalization_2"
top: "batch_normalization_2"
scale_param {
bias_term: true
}
}
layer {
name: "batch_normalization_3"
type: "BatchNorm"
bottom: "conv2d_3"
top: "batch_normalization_3"
batch_norm_param {
moving_average_fraction: 0.99
eps: 0.001
}
}
layer {
name: "batch_normalization_3_scale"
type: "Scale"
bottom: "batch_normalization_3"
top: "batch_normalization_3"
scale_param {
bias_term: true
}
}
layer {
name: "batch_normalization_4"
type: "BatchNorm"
bottom: "conv2d_4"
top: "batch_normalization_4"
batch_norm_param {
moving_average_fraction: 0.99
eps: 0.001
}
}
layer {
name: "batch_normalization_4_scale"
type: "Scale"
bottom: "batch_normalization_4"
top: "batch_normalization_4"
scale_param {
bias_term: true
}
}
layer {
name: "batch_normalization_5"
type: "BatchNorm"
bottom: "conv2d_5"
top: "batch_normalization_5"
batch_norm_param {
moving_average_fraction: 0.99
eps: 0.001
}
}
layer {
name: "batch_normalization_5_scale"
type: "Scale"
bottom: "batch_normalization_5"
top: "batch_normalization_5"
scale_param {
bias_term: true
}
}
layer {
name: "activation_2"
type: "ReLU"
bottom: "batch_normalization_2"
top: "batch_normalization_2"
}
layer {
name: "activation_3"
type: "ReLU"
bottom: "batch_normalization_3"
top: "batch_normalization_3"
}
layer {
name: "activation_4"
type: "ReLU"
bottom: "batch_normalization_4"
top: "batch_normalization_4"
}
layer {
name: "activation_5"
type: "ReLU"
bottom: "batch_normalization_5"
top: "batch_normalization_5"
}
layer {
name: "concatenate_1"
type: "Concat"
bottom: "batch_normalization_2"
bottom: "batch_normalization_3"
bottom: "batch_normalization_4"
bottom: "batch_normalization_5"
top: "concatenate_1"
concat_param {
axis: 1
}
}
layer {
name: "conv_1024_11"
type: "Convolution"
bottom: "concatenate_1"
top: "conv_1024_11"
convolution_param {
num_output: 1024
bias_term: true
pad_h: 0
pad_w: 0
kernel_h: 1
kernel_w: 1
stride_h: 1
stride_w: 1
}
}
layer {
name: "batch_normalization_6"
type: "BatchNorm"
bottom: "conv_1024_11"
top: "batch_normalization_6"
batch_norm_param {
moving_average_fraction: 0.99
eps: 0.001
}
}
layer {
name: "batch_normalization_6_scale"
type: "Scale"
bottom: "batch_normalization_6"
top: "batch_normalization_6"
scale_param {
bias_term: true
}
}
layer {
name: "activation_6"
type: "ReLU"
bottom: "batch_normalization_6"
top: "batch_normalization_6"
}
layer {
name: "conv_class_11"
type: "Convolution"
bottom: "batch_normalization_6"
top: "conv_class_11"
convolution_param {
num_output: 84
bias_term: true
pad_h: 0
pad_w: 0
kernel_h: 1
kernel_w: 1
stride_h: 1
stride_w: 1
}
}
layer {
name: "prob"
type: "Softmax"
bottom: "conv_class_11"
top: "prob"
}


BIN
Prj-iOS/lpr/Resource/Segmentation.caffemodel View File


+ 114
- 0
Prj-iOS/lpr/Resource/Segmentation.prototxt View File

@@ -0,0 +1,114 @@
input: "data"
input_dim: 1
input_dim: 1
input_dim: 22
input_dim: 22
layer {
name: "conv2d_12"
type: "Convolution"
bottom: "data"
top: "conv2d_12"
convolution_param {
num_output: 16
bias_term: true
pad: 0
kernel_size: 3
stride: 1
}
}
layer {
name: "activation_18"
type: "ReLU"
bottom: "conv2d_12"
top: "activation_18"
}
layer {
name: "max_pooling2d_10"
type: "Pooling"
bottom: "activation_18"
top: "max_pooling2d_10"
pooling_param {
pool: MAX
kernel_size: 2
stride: 2
pad: 0
}
}
layer {
name: "conv2d_13"
type: "Convolution"
bottom: "max_pooling2d_10"
top: "conv2d_13"
convolution_param {
num_output: 16
bias_term: true
pad: 0
kernel_size: 3
stride: 1
}
}
layer {
name: "activation_19"
type: "ReLU"
bottom: "conv2d_13"
top: "activation_19"
}
layer {
name: "max_pooling2d_11"
type: "Pooling"
bottom: "activation_19"
top: "max_pooling2d_11"
pooling_param {
pool: MAX
kernel_size: 2
stride: 2
pad: 0
}
}
layer {
name: "flatten_6"
type: "Flatten"
bottom: "max_pooling2d_11"
top: "flatten_6"
}
layer {
name: "dense_9"
type: "InnerProduct"
bottom: "flatten_6"
top: "dense_9"
inner_product_param {
num_output: 256
}
}
layer {
name: "dropout_9"
type: "Dropout"
bottom: "dense_9"
top: "dropout_9"
dropout_param {
dropout_ratio: 0.5
}
}
layer {
name: "activation_20"
type: "ReLU"
bottom: "dropout_9"
top: "activation_20"
}
layer {
name: "dense_10"
type: "InnerProduct"
bottom: "activation_20"
top: "dense_10"
inner_product_param {
num_output: 3
}
}


layer {
name: "prob"
type: "Softmax"
bottom: "dense_10"
top: "prob"
}

+ 12117
- 0
Prj-iOS/lpr/Resource/cascade.xml
File diff suppressed because it is too large
View File


+ 550
- 0
Prj-iOS/lpr/lpr.xcodeproj/project.pbxproj View File

@@ -0,0 +1,550 @@
// !$*UTF8*$!
{
archiveVersion = 1;
classes = {
};
objectVersion = 50;
objects = {

/* Begin PBXBuildFile section */
343B620D21997FC800D03830 /* SegmentationFreeRecognizer.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 343B620C21997FC700D03830 /* SegmentationFreeRecognizer.cpp */; };
343B6210219980FE00D03830 /* SegmenationFree-Inception.caffemodel in Resources */ = {isa = PBXBuildFile; fileRef = 343B620E219980FD00D03830 /* SegmenationFree-Inception.caffemodel */; };
343B6211219980FE00D03830 /* SegmenationFree-Inception.prototxt in Resources */ = {isa = PBXBuildFile; fileRef = 343B620F219980FE00D03830 /* SegmenationFree-Inception.prototxt */; };
343B621521998AB200D03830 /* CameraViewController.mm in Sources */ = {isa = PBXBuildFile; fileRef = 343B621421998AB200D03830 /* CameraViewController.mm */; };
34ECFDD12182DD9300B162D4 /* AppDelegate.m in Sources */ = {isa = PBXBuildFile; fileRef = 34ECFDD02182DD9300B162D4 /* AppDelegate.m */; };
34ECFDD92182DD9400B162D4 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 34ECFDD82182DD9400B162D4 /* Assets.xcassets */; };
34ECFDDC2182DD9400B162D4 /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 34ECFDDA2182DD9400B162D4 /* LaunchScreen.storyboard */; };
34ECFDDF2182DD9400B162D4 /* main.m in Sources */ = {isa = PBXBuildFile; fileRef = 34ECFDDE2182DD9400B162D4 /* main.m */; };
34ECFE152182DEF500B162D4 /* PlateSegmentation.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 34ECFE0D2182DEF500B162D4 /* PlateSegmentation.cpp */; };
34ECFE162182DEF500B162D4 /* PlateDetection.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 34ECFE0E2182DEF500B162D4 /* PlateDetection.cpp */; };
34ECFE172182DEF500B162D4 /* Pipeline.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 34ECFE0F2182DEF500B162D4 /* Pipeline.cpp */; };
34ECFE182182DEF500B162D4 /* FastDeskew.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 34ECFE102182DEF500B162D4 /* FastDeskew.cpp */; };
34ECFE192182DEF500B162D4 /* Recognizer.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 34ECFE112182DEF500B162D4 /* Recognizer.cpp */; };
34ECFE1A2182DEF500B162D4 /* FineMapping.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 34ECFE122182DEF500B162D4 /* FineMapping.cpp */; };
34ECFE1B2182DEF500B162D4 /* CNNRecognizer.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 34ECFE142182DEF500B162D4 /* CNNRecognizer.cpp */; };
34ECFE242182DF2F00B162D4 /* HorizonalFinemapping.caffemodel in Resources */ = {isa = PBXBuildFile; fileRef = 34ECFE1D2182DF2E00B162D4 /* HorizonalFinemapping.caffemodel */; };
34ECFE252182DF2F00B162D4 /* CharacterRecognization.caffemodel in Resources */ = {isa = PBXBuildFile; fileRef = 34ECFE1E2182DF2E00B162D4 /* CharacterRecognization.caffemodel */; };
34ECFE262182DF2F00B162D4 /* cascade.xml in Resources */ = {isa = PBXBuildFile; fileRef = 34ECFE1F2182DF2E00B162D4 /* cascade.xml */; };
34ECFE272182DF2F00B162D4 /* Segmentation.prototxt in Resources */ = {isa = PBXBuildFile; fileRef = 34ECFE202182DF2E00B162D4 /* Segmentation.prototxt */; };
34ECFE282182DF2F00B162D4 /* HorizonalFinemapping.prototxt in Resources */ = {isa = PBXBuildFile; fileRef = 34ECFE212182DF2E00B162D4 /* HorizonalFinemapping.prototxt */; };
34ECFE292182DF2F00B162D4 /* Segmentation.caffemodel in Resources */ = {isa = PBXBuildFile; fileRef = 34ECFE222182DF2E00B162D4 /* Segmentation.caffemodel */; };
34ECFE2A2182DF2F00B162D4 /* CharacterRecognization.prototxt in Resources */ = {isa = PBXBuildFile; fileRef = 34ECFE232182DF2E00B162D4 /* CharacterRecognization.prototxt */; };
34ECFE2D2182E10700B162D4 /* RootViewController.mm in Sources */ = {isa = PBXBuildFile; fileRef = 34ECFE2C2182E10700B162D4 /* RootViewController.mm */; };
34ECFE352182E56900B162D4 /* AVFoundation.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 34ECFE342182E56900B162D4 /* AVFoundation.framework */; };
34ECFE382182E5F100B162D4 /* Utility.mm in Sources */ = {isa = PBXBuildFile; fileRef = 34ECFE372182E5F100B162D4 /* Utility.mm */; };
34ECFE3A2182FAB700B162D4 /* CoreImage.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 34ECFE392182FAB700B162D4 /* CoreImage.framework */; };
A396A338993421DC89845E63 /* libPods-lpr.a in Frameworks */ = {isa = PBXBuildFile; fileRef = A33861A99BC945120023AC25 /* libPods-lpr.a */; };
/* End PBXBuildFile section */

/* Begin PBXFileReference section */
343B620B21997FC100D03830 /* SegmentationFreeRecognizer.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = SegmentationFreeRecognizer.h; sourceTree = "<group>"; };
343B620C21997FC700D03830 /* SegmentationFreeRecognizer.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = SegmentationFreeRecognizer.cpp; sourceTree = "<group>"; };
343B620E219980FD00D03830 /* SegmenationFree-Inception.caffemodel */ = {isa = PBXFileReference; lastKnownFileType = file; path = "SegmenationFree-Inception.caffemodel"; sourceTree = "<group>"; };
343B620F219980FE00D03830 /* SegmenationFree-Inception.prototxt */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text; path = "SegmenationFree-Inception.prototxt"; sourceTree = "<group>"; };
343B621321998AB200D03830 /* CameraViewController.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = CameraViewController.h; path = lpr/CameraViewController.h; sourceTree = SOURCE_ROOT; };
343B621421998AB200D03830 /* CameraViewController.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; name = CameraViewController.mm; path = lpr/CameraViewController.mm; sourceTree = SOURCE_ROOT; };
34ECFDCC2182DD9300B162D4 /* lpr.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = lpr.app; sourceTree = BUILT_PRODUCTS_DIR; };
34ECFDCF2182DD9300B162D4 /* AppDelegate.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = AppDelegate.h; sourceTree = "<group>"; };
34ECFDD02182DD9300B162D4 /* AppDelegate.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = AppDelegate.m; sourceTree = "<group>"; };
34ECFDD82182DD9400B162D4 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = "<group>"; };
34ECFDDB2182DD9400B162D4 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = "<group>"; };
34ECFDDD2182DD9400B162D4 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = "<group>"; };
34ECFDDE2182DD9400B162D4 /* main.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = main.m; sourceTree = "<group>"; };
34ECFE032182DEF500B162D4 /* PlateSegmentation.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = PlateSegmentation.h; sourceTree = "<group>"; };
34ECFE042182DEF500B162D4 /* Recognizer.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = Recognizer.h; sourceTree = "<group>"; };
34ECFE052182DEF500B162D4 /* PlateDetection.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = PlateDetection.h; sourceTree = "<group>"; };
34ECFE062182DEF500B162D4 /* PlateInfo.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = PlateInfo.h; sourceTree = "<group>"; };
34ECFE072182DEF500B162D4 /* FineMapping.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = FineMapping.h; sourceTree = "<group>"; };
34ECFE082182DEF500B162D4 /* niBlackThreshold.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = niBlackThreshold.h; sourceTree = "<group>"; };
34ECFE092182DEF500B162D4 /* FastDeskew.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = FastDeskew.h; sourceTree = "<group>"; };
34ECFE0A2182DEF500B162D4 /* Pipeline.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = Pipeline.h; sourceTree = "<group>"; };
34ECFE0B2182DEF500B162D4 /* CNNRecognizer.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = CNNRecognizer.h; sourceTree = "<group>"; };
34ECFE0D2182DEF500B162D4 /* PlateSegmentation.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = PlateSegmentation.cpp; sourceTree = "<group>"; };
34ECFE0E2182DEF500B162D4 /* PlateDetection.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = PlateDetection.cpp; sourceTree = "<group>"; };
34ECFE0F2182DEF500B162D4 /* Pipeline.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = Pipeline.cpp; sourceTree = "<group>"; };
34ECFE102182DEF500B162D4 /* FastDeskew.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = FastDeskew.cpp; sourceTree = "<group>"; };
34ECFE112182DEF500B162D4 /* Recognizer.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = Recognizer.cpp; sourceTree = "<group>"; };
34ECFE122182DEF500B162D4 /* FineMapping.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = FineMapping.cpp; sourceTree = "<group>"; };
34ECFE132182DEF500B162D4 /* util.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = util.h; sourceTree = "<group>"; };
34ECFE142182DEF500B162D4 /* CNNRecognizer.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = CNNRecognizer.cpp; sourceTree = "<group>"; };
34ECFE1D2182DF2E00B162D4 /* HorizonalFinemapping.caffemodel */ = {isa = PBXFileReference; lastKnownFileType = file; path = HorizonalFinemapping.caffemodel; sourceTree = "<group>"; };
34ECFE1E2182DF2E00B162D4 /* CharacterRecognization.caffemodel */ = {isa = PBXFileReference; lastKnownFileType = file; path = CharacterRecognization.caffemodel; sourceTree = "<group>"; };
34ECFE1F2182DF2E00B162D4 /* cascade.xml */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.xml; path = cascade.xml; sourceTree = "<group>"; };
34ECFE202182DF2E00B162D4 /* Segmentation.prototxt */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text; path = Segmentation.prototxt; sourceTree = "<group>"; };
34ECFE212182DF2E00B162D4 /* HorizonalFinemapping.prototxt */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text; path = HorizonalFinemapping.prototxt; sourceTree = "<group>"; };
34ECFE222182DF2E00B162D4 /* Segmentation.caffemodel */ = {isa = PBXFileReference; lastKnownFileType = file; path = Segmentation.caffemodel; sourceTree = "<group>"; };
34ECFE232182DF2E00B162D4 /* CharacterRecognization.prototxt */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text; path = CharacterRecognization.prototxt; sourceTree = "<group>"; };
34ECFE2B2182E10700B162D4 /* RootViewController.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = RootViewController.h; sourceTree = "<group>"; };
34ECFE2C2182E10700B162D4 /* RootViewController.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = RootViewController.mm; sourceTree = "<group>"; };
34ECFE342182E56900B162D4 /* AVFoundation.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = AVFoundation.framework; path = System/Library/Frameworks/AVFoundation.framework; sourceTree = SDKROOT; };
34ECFE362182E5F100B162D4 /* Utility.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = Utility.h; sourceTree = "<group>"; };
34ECFE372182E5F100B162D4 /* Utility.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = Utility.mm; sourceTree = "<group>"; };
34ECFE392182FAB700B162D4 /* CoreImage.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreImage.framework; path = System/Library/Frameworks/CoreImage.framework; sourceTree = SDKROOT; };
66BAD4CEFABD6705C5100968 /* Pods-lpr.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-lpr.release.xcconfig"; path = "Pods/Target Support Files/Pods-lpr/Pods-lpr.release.xcconfig"; sourceTree = "<group>"; };
A33861A99BC945120023AC25 /* libPods-lpr.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = "libPods-lpr.a"; sourceTree = BUILT_PRODUCTS_DIR; };
E169020B9373AF28E9C472DD /* Pods-lpr.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-lpr.debug.xcconfig"; path = "Pods/Target Support Files/Pods-lpr/Pods-lpr.debug.xcconfig"; sourceTree = "<group>"; };
/* End PBXFileReference section */

/* Begin PBXFrameworksBuildPhase section */
34ECFDC92182DD9300B162D4 /* Frameworks */ = {
isa = PBXFrameworksBuildPhase;
buildActionMask = 2147483647;
files = (
34ECFE3A2182FAB700B162D4 /* CoreImage.framework in Frameworks */,
34ECFE352182E56900B162D4 /* AVFoundation.framework in Frameworks */,
A396A338993421DC89845E63 /* libPods-lpr.a in Frameworks */,
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXFrameworksBuildPhase section */

/* Begin PBXGroup section */
34ECFDC32182DD9300B162D4 = {
isa = PBXGroup;
children = (
34ECFDCE2182DD9300B162D4 /* lpr */,
34ECFE1C2182DF2E00B162D4 /* Resource */,
34ECFDCD2182DD9300B162D4 /* Products */,
DE72138A4B42368326688C6A /* Pods */,
97C4CF37B3E356C8641CE105 /* Frameworks */,
);
sourceTree = "<group>";
};
34ECFDCD2182DD9300B162D4 /* Products */ = {
isa = PBXGroup;
children = (
34ECFDCC2182DD9300B162D4 /* lpr.app */,
);
name = Products;
sourceTree = "<group>";
};
34ECFDCE2182DD9300B162D4 /* lpr */ = {
isa = PBXGroup;
children = (
34ECFE012182DEF500B162D4 /* Source */,
34ECFDCF2182DD9300B162D4 /* AppDelegate.h */,
34ECFDD02182DD9300B162D4 /* AppDelegate.m */,
34ECFDD82182DD9400B162D4 /* Assets.xcassets */,
34ECFDDA2182DD9400B162D4 /* LaunchScreen.storyboard */,
34ECFDDD2182DD9400B162D4 /* Info.plist */,
34ECFDDE2182DD9400B162D4 /* main.m */,
34ECFE2B2182E10700B162D4 /* RootViewController.h */,
34ECFE2C2182E10700B162D4 /* RootViewController.mm */,
343B621321998AB200D03830 /* CameraViewController.h */,
343B621421998AB200D03830 /* CameraViewController.mm */,
34ECFE362182E5F100B162D4 /* Utility.h */,
34ECFE372182E5F100B162D4 /* Utility.mm */,
);
path = lpr;
sourceTree = "<group>";
};
34ECFE012182DEF500B162D4 /* Source */ = {
isa = PBXGroup;
children = (
34ECFE022182DEF500B162D4 /* include */,
34ECFE0C2182DEF500B162D4 /* src */,
);
path = Source;
sourceTree = "<group>";
};
34ECFE022182DEF500B162D4 /* include */ = {
isa = PBXGroup;
children = (
343B620B21997FC100D03830 /* SegmentationFreeRecognizer.h */,
34ECFE032182DEF500B162D4 /* PlateSegmentation.h */,
34ECFE042182DEF500B162D4 /* Recognizer.h */,
34ECFE052182DEF500B162D4 /* PlateDetection.h */,
34ECFE062182DEF500B162D4 /* PlateInfo.h */,
34ECFE072182DEF500B162D4 /* FineMapping.h */,
34ECFE082182DEF500B162D4 /* niBlackThreshold.h */,
34ECFE092182DEF500B162D4 /* FastDeskew.h */,
34ECFE0A2182DEF500B162D4 /* Pipeline.h */,
34ECFE0B2182DEF500B162D4 /* CNNRecognizer.h */,
);
path = include;
sourceTree = "<group>";
};
34ECFE0C2182DEF500B162D4 /* src */ = {
isa = PBXGroup;
children = (
343B620C21997FC700D03830 /* SegmentationFreeRecognizer.cpp */,
34ECFE0D2182DEF500B162D4 /* PlateSegmentation.cpp */,
34ECFE0E2182DEF500B162D4 /* PlateDetection.cpp */,
34ECFE0F2182DEF500B162D4 /* Pipeline.cpp */,
34ECFE102182DEF500B162D4 /* FastDeskew.cpp */,
34ECFE112182DEF500B162D4 /* Recognizer.cpp */,
34ECFE122182DEF500B162D4 /* FineMapping.cpp */,
34ECFE132182DEF500B162D4 /* util.h */,
34ECFE142182DEF500B162D4 /* CNNRecognizer.cpp */,
);
path = src;
sourceTree = "<group>";
};
34ECFE1C2182DF2E00B162D4 /* Resource */ = {
isa = PBXGroup;
children = (
343B620E219980FD00D03830 /* SegmenationFree-Inception.caffemodel */,
343B620F219980FE00D03830 /* SegmenationFree-Inception.prototxt */,
34ECFE1D2182DF2E00B162D4 /* HorizonalFinemapping.caffemodel */,
34ECFE1E2182DF2E00B162D4 /* CharacterRecognization.caffemodel */,
34ECFE1F2182DF2E00B162D4 /* cascade.xml */,
34ECFE202182DF2E00B162D4 /* Segmentation.prototxt */,
34ECFE212182DF2E00B162D4 /* HorizonalFinemapping.prototxt */,
34ECFE222182DF2E00B162D4 /* Segmentation.caffemodel */,
34ECFE232182DF2E00B162D4 /* CharacterRecognization.prototxt */,
);
path = Resource;
sourceTree = "<group>";
};
97C4CF37B3E356C8641CE105 /* Frameworks */ = {
isa = PBXGroup;
children = (
34ECFE392182FAB700B162D4 /* CoreImage.framework */,
34ECFE342182E56900B162D4 /* AVFoundation.framework */,
A33861A99BC945120023AC25 /* libPods-lpr.a */,
);
name = Frameworks;
sourceTree = "<group>";
};
DE72138A4B42368326688C6A /* Pods */ = {
isa = PBXGroup;
children = (
E169020B9373AF28E9C472DD /* Pods-lpr.debug.xcconfig */,
66BAD4CEFABD6705C5100968 /* Pods-lpr.release.xcconfig */,
);
name = Pods;
sourceTree = "<group>";
};
/* End PBXGroup section */

/* Begin PBXNativeTarget section */
34ECFDCB2182DD9300B162D4 /* lpr */ = {
isa = PBXNativeTarget;
buildConfigurationList = 34ECFDE22182DD9400B162D4 /* Build configuration list for PBXNativeTarget "lpr" */;
buildPhases = (
12CCA15FA936D51F737FDEDF /* [CP] Check Pods Manifest.lock */,
34ECFDC82182DD9300B162D4 /* Sources */,
34ECFDC92182DD9300B162D4 /* Frameworks */,
34ECFDCA2182DD9300B162D4 /* Resources */,
);
buildRules = (
);
dependencies = (
);
name = lpr;
productName = lpr;
productReference = 34ECFDCC2182DD9300B162D4 /* lpr.app */;
productType = "com.apple.product-type.application";
};
/* End PBXNativeTarget section */

/* Begin PBXProject section */
34ECFDC42182DD9300B162D4 /* Project object */ = {
isa = PBXProject;
attributes = {
LastUpgradeCheck = 1000;
ORGANIZATIONNAME = lprSample;
TargetAttributes = {
34ECFDCB2182DD9300B162D4 = {
CreatedOnToolsVersion = 10.0;
};
};
};
buildConfigurationList = 34ECFDC72182DD9300B162D4 /* Build configuration list for PBXProject "lpr" */;
compatibilityVersion = "Xcode 9.3";
developmentRegion = en;
hasScannedForEncodings = 0;
knownRegions = (
en,
Base,
);
mainGroup = 34ECFDC32182DD9300B162D4;
productRefGroup = 34ECFDCD2182DD9300B162D4 /* Products */;
projectDirPath = "";
projectRoot = "";
targets = (
34ECFDCB2182DD9300B162D4 /* lpr */,
);
};
/* End PBXProject section */

/* Begin PBXResourcesBuildPhase section */
34ECFDCA2182DD9300B162D4 /* Resources */ = {
isa = PBXResourcesBuildPhase;
buildActionMask = 2147483647;
files = (
343B6210219980FE00D03830 /* SegmenationFree-Inception.caffemodel in Resources */,
34ECFE2A2182DF2F00B162D4 /* CharacterRecognization.prototxt in Resources */,
34ECFDDC2182DD9400B162D4 /* LaunchScreen.storyboard in Resources */,
34ECFDD92182DD9400B162D4 /* Assets.xcassets in Resources */,
34ECFE282182DF2F00B162D4 /* HorizonalFinemapping.prototxt in Resources */,
34ECFE242182DF2F00B162D4 /* HorizonalFinemapping.caffemodel in Resources */,
34ECFE272182DF2F00B162D4 /* Segmentation.prototxt in Resources */,
34ECFE262182DF2F00B162D4 /* cascade.xml in Resources */,
34ECFE292182DF2F00B162D4 /* Segmentation.caffemodel in Resources */,
343B6211219980FE00D03830 /* SegmenationFree-Inception.prototxt in Resources */,
34ECFE252182DF2F00B162D4 /* CharacterRecognization.caffemodel in Resources */,
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXResourcesBuildPhase section */

/* Begin PBXShellScriptBuildPhase section */
12CCA15FA936D51F737FDEDF /* [CP] Check Pods Manifest.lock */ = {
isa = PBXShellScriptBuildPhase;
buildActionMask = 2147483647;
files = (
);
inputPaths = (
"${PODS_PODFILE_DIR_PATH}/Podfile.lock",
"${PODS_ROOT}/Manifest.lock",
);
name = "[CP] Check Pods Manifest.lock";
outputPaths = (
"$(DERIVED_FILE_DIR)/Pods-lpr-checkManifestLockResult.txt",
);
runOnlyForDeploymentPostprocessing = 0;
shellPath = /bin/sh;
shellScript = "diff \"${PODS_PODFILE_DIR_PATH}/Podfile.lock\" \"${PODS_ROOT}/Manifest.lock\" > /dev/null\nif [ $? != 0 ] ; then\n # print error to STDERR\n echo \"error: The sandbox is not in sync with the Podfile.lock. Run 'pod install' or update your CocoaPods installation.\" >&2\n exit 1\nfi\n# This output is used by Xcode 'outputs' to avoid re-running this script phase.\necho \"SUCCESS\" > \"${SCRIPT_OUTPUT_FILE_0}\"\n";
showEnvVarsInLog = 0;
};
/* End PBXShellScriptBuildPhase section */

/* Begin PBXSourcesBuildPhase section */
34ECFDC82182DD9300B162D4 /* Sources */ = {
isa = PBXSourcesBuildPhase;
buildActionMask = 2147483647;
files = (
343B620D21997FC800D03830 /* SegmentationFreeRecognizer.cpp in Sources */,
34ECFE1A2182DEF500B162D4 /* FineMapping.cpp in Sources */,
34ECFE182182DEF500B162D4 /* FastDeskew.cpp in Sources */,
34ECFE162182DEF500B162D4 /* PlateDetection.cpp in Sources */,
34ECFDDF2182DD9400B162D4 /* main.m in Sources */,
34ECFE382182E5F100B162D4 /* Utility.mm in Sources */,
34ECFE2D2182E10700B162D4 /* RootViewController.mm in Sources */,
34ECFE172182DEF500B162D4 /* Pipeline.cpp in Sources */,
34ECFE152182DEF500B162D4 /* PlateSegmentation.cpp in Sources */,
34ECFE1B2182DEF500B162D4 /* CNNRecognizer.cpp in Sources */,
34ECFDD12182DD9300B162D4 /* AppDelegate.m in Sources */,
343B621521998AB200D03830 /* CameraViewController.mm in Sources */,
34ECFE192182DEF500B162D4 /* Recognizer.cpp in Sources */,
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXSourcesBuildPhase section */

/* Begin PBXVariantGroup section */
34ECFDDA2182DD9400B162D4 /* LaunchScreen.storyboard */ = {
isa = PBXVariantGroup;
children = (
34ECFDDB2182DD9400B162D4 /* Base */,
);
name = LaunchScreen.storyboard;
sourceTree = "<group>";
};
/* End PBXVariantGroup section */

/* Begin XCBuildConfiguration section */
34ECFDE02182DD9400B162D4 /* Debug */ = {
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
CLANG_ANALYZER_NONNULL = YES;
CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
CLANG_CXX_LANGUAGE_STANDARD = "gnu++14";
CLANG_CXX_LIBRARY = "libc++";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_ENABLE_OBJC_WEAK = YES;
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_COMMA = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
CLANG_WARN_STRICT_PROTOTYPES = YES;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
CODE_SIGN_IDENTITY = "iPhone Developer";
COPY_PHASE_STRIP = NO;
DEBUG_INFORMATION_FORMAT = dwarf;
ENABLE_STRICT_OBJC_MSGSEND = YES;
ENABLE_TESTABILITY = YES;
GCC_C_LANGUAGE_STANDARD = gnu11;
GCC_DYNAMIC_NO_PIC = NO;
GCC_NO_COMMON_BLOCKS = YES;
GCC_OPTIMIZATION_LEVEL = 0;
GCC_PREPROCESSOR_DEFINITIONS = (
"DEBUG=1",
"$(inherited)",
);
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
GCC_WARN_UNDECLARED_SELECTOR = YES;
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
HEADER_SEARCH_PATHS = (
"\"$(SRCROOT)/lpr/Source/incllude\"",
"\"$(SRCROOT)\"",
);
IPHONEOS_DEPLOYMENT_TARGET = 10.1;
MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE;
MTL_FAST_MATH = YES;
ONLY_ACTIVE_ARCH = YES;
SDKROOT = iphoneos;
};
name = Debug;
};
34ECFDE12182DD9400B162D4 /* Release */ = {
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
CLANG_ANALYZER_NONNULL = YES;
CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
CLANG_CXX_LANGUAGE_STANDARD = "gnu++14";
CLANG_CXX_LIBRARY = "libc++";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_ENABLE_OBJC_WEAK = YES;
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_COMMA = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
CLANG_WARN_STRICT_PROTOTYPES = YES;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
CODE_SIGN_IDENTITY = "iPhone Developer";
COPY_PHASE_STRIP = NO;
DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
ENABLE_NS_ASSERTIONS = NO;
ENABLE_STRICT_OBJC_MSGSEND = YES;
GCC_C_LANGUAGE_STANDARD = gnu11;
GCC_NO_COMMON_BLOCKS = YES;
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
GCC_WARN_UNDECLARED_SELECTOR = YES;
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
HEADER_SEARCH_PATHS = (
"\"$(SRCROOT)/lpr/Source/incllude\"",
"\"$(SRCROOT)\"",
);
IPHONEOS_DEPLOYMENT_TARGET = 10.1;
MTL_ENABLE_DEBUG_INFO = NO;
MTL_FAST_MATH = YES;
SDKROOT = iphoneos;
VALIDATE_PRODUCT = YES;
};
name = Release;
};
34ECFDE32182DD9400B162D4 /* Debug */ = {
isa = XCBuildConfiguration;
baseConfigurationReference = E169020B9373AF28E9C472DD /* Pods-lpr.debug.xcconfig */;
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
CLANG_CXX_LANGUAGE_STANDARD = "compiler-default";
CLANG_CXX_LIBRARY = "compiler-default";
CODE_SIGN_IDENTITY = "iPhone Developer";
CODE_SIGN_STYLE = Automatic;
DEVELOPMENT_TEAM = YZGMTHK294;
ENABLE_BITCODE = YES;
GCC_C_LANGUAGE_STANDARD = gnu11;
HEADER_SEARCH_PATHS = (
"$(inherited)",
"\"${PODS_ROOT}/Headers/Public\"",
"\"${PODS_ROOT}/Headers/Public/OpenCV\"",
);
INFOPLIST_FILE = lpr/Info.plist;
LD_RUNPATH_SEARCH_PATHS = (
"$(inherited)",
"@executable_path/Frameworks",
);
PRODUCT_BUNDLE_IDENTIFIER = com.TongxingPay.IDCardRecognizeDemo;
PRODUCT_NAME = "$(TARGET_NAME)";
PROVISIONING_PROFILE_SPECIFIER = "";
TARGETED_DEVICE_FAMILY = "1,2";
};
name = Debug;
};
34ECFDE42182DD9400B162D4 /* Release */ = {
isa = XCBuildConfiguration;
baseConfigurationReference = 66BAD4CEFABD6705C5100968 /* Pods-lpr.release.xcconfig */;
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
CLANG_CXX_LANGUAGE_STANDARD = "compiler-default";
CLANG_CXX_LIBRARY = "compiler-default";
CODE_SIGN_IDENTITY = "iPhone Developer";
CODE_SIGN_STYLE = Automatic;
DEVELOPMENT_TEAM = YZGMTHK294;
ENABLE_BITCODE = YES;
GCC_C_LANGUAGE_STANDARD = gnu11;
HEADER_SEARCH_PATHS = (
"$(inherited)",
"\"${PODS_ROOT}/Headers/Public\"",
"\"${PODS_ROOT}/Headers/Public/OpenCV\"",
);
INFOPLIST_FILE = lpr/Info.plist;
LD_RUNPATH_SEARCH_PATHS = (
"$(inherited)",
"@executable_path/Frameworks",
);
PRODUCT_BUNDLE_IDENTIFIER = com.TongxingPay.IDCardRecognizeDemo;
PRODUCT_NAME = "$(TARGET_NAME)";
PROVISIONING_PROFILE_SPECIFIER = "";
TARGETED_DEVICE_FAMILY = "1,2";
};
name = Release;
};
/* End XCBuildConfiguration section */

/* Begin XCConfigurationList section */
34ECFDC72182DD9300B162D4 /* Build configuration list for PBXProject "lpr" */ = {
isa = XCConfigurationList;
buildConfigurations = (
34ECFDE02182DD9400B162D4 /* Debug */,
34ECFDE12182DD9400B162D4 /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
34ECFDE22182DD9400B162D4 /* Build configuration list for PBXNativeTarget "lpr" */ = {
isa = XCConfigurationList;
buildConfigurations = (
34ECFDE32182DD9400B162D4 /* Debug */,
34ECFDE42182DD9400B162D4 /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
/* End XCConfigurationList section */
};
rootObject = 34ECFDC42182DD9300B162D4 /* Project object */;
}

+ 7
- 0
Prj-iOS/lpr/lpr.xcodeproj/project.xcworkspace/contents.xcworkspacedata View File

@@ -0,0 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<Workspace
version = "1.0">
<FileRef
location = "self:lpr.xcodeproj">
</FileRef>
</Workspace>

+ 8
- 0
Prj-iOS/lpr/lpr.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist View File

@@ -0,0 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>IDEDidComputeMac32BitWarning</key>
<true/>
</dict>
</plist>

BIN
Prj-iOS/lpr/lpr.xcodeproj/project.xcworkspace/xcuserdata/tbao.xcuserdatad/UserInterfaceState.xcuserstate View File


+ 19
- 0
Prj-iOS/lpr/lpr.xcodeproj/xcuserdata/tbao.xcuserdatad/xcschemes/xcschememanagement.plist View File

@@ -0,0 +1,19 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>SchemeUserState</key>
<dict>
<key>lpr.xcscheme</key>
<dict>
<key>orderHint</key>
<integer>2</integer>
</dict>
<key>lpr.xcscheme_^#shared#^_</key>
<dict>
<key>orderHint</key>
<integer>2</integer>
</dict>
</dict>
</dict>
</plist>

+ 10
- 0
Prj-iOS/lpr/lpr.xcworkspace/contents.xcworkspacedata View File

@@ -0,0 +1,10 @@
<?xml version="1.0" encoding="UTF-8"?>
<Workspace
version = "1.0">
<FileRef
location = "group:lpr.xcodeproj">
</FileRef>
<FileRef
location = "group:Pods/Pods.xcodeproj">
</FileRef>
</Workspace>

+ 8
- 0
Prj-iOS/lpr/lpr.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist View File

@@ -0,0 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>IDEDidComputeMac32BitWarning</key>
<true/>
</dict>
</plist>

BIN
Prj-iOS/lpr/lpr.xcworkspace/xcuserdata/tbao.xcuserdatad/UserInterfaceState.xcuserstate View File


+ 37
- 0
Prj-iOS/lpr/lpr.xcworkspace/xcuserdata/tbao.xcuserdatad/xcdebugger/Breakpoints_v2.xcbkptlist View File

@@ -0,0 +1,37 @@
<?xml version="1.0" encoding="UTF-8"?>
<Bucket
type = "0"
version = "2.0">
<Breakpoints>
<BreakpointProxy
BreakpointExtensionID = "Xcode.Breakpoint.ExceptionBreakpoint">
<BreakpointContent
shouldBeEnabled = "Yes"
ignoreCount = "0"
continueAfterRunningActions = "No"
scope = "0"
stopOnStyle = "0">
</BreakpointContent>
</BreakpointProxy>
<BreakpointProxy
BreakpointExtensionID = "Xcode.Breakpoint.OpenGLErrorBreakpoint">
<BreakpointContent
shouldBeEnabled = "Yes"
ignoreCount = "0"
continueAfterRunningActions = "No"
breakpointStackSelectionBehavior = "1"
symbolName = "__GPUTOOLS_HAS_DETECTED_AN_OPENGL_ERROR__"
moduleName = "libglInterpose.dylib">
<Actions>
<BreakpointActionProxy
ActionExtensionID = "Xcode.BreakpointAction.OpenGLError">
<ActionContent>
</ActionContent>
</BreakpointActionProxy>
</Actions>
<Locations>
</Locations>
</BreakpointContent>
</BreakpointProxy>
</Breakpoints>
</Bucket>

+ 17
- 0
Prj-iOS/lpr/lpr/AppDelegate.h View File

@@ -0,0 +1,17 @@
//
// AppDelegate.h
// lpr
//
// Created by baotim on 2018/10/26.
// Copyright © 2018 lprSample. All rights reserved.
//

#import <UIKit/UIKit.h>

@interface AppDelegate : UIResponder <UIApplicationDelegate>

@property (strong, nonatomic) UIWindow *window;


@end


+ 55
- 0
Prj-iOS/lpr/lpr/AppDelegate.m View File

@@ -0,0 +1,55 @@
//
// AppDelegate.m
// lpr
//
// Created by baotim on 2018/10/26.
// Copyright © 2018 lprSample. All rights reserved.
//

#import "AppDelegate.h"
#import "RootViewController.h"

@interface AppDelegate ()

@end

@implementation AppDelegate


- (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptions {
// Override point for customization after application launch.
self.window = [[UIWindow alloc] initWithFrame:[[UIScreen mainScreen] bounds]];
self.window.rootViewController = [[UINavigationController alloc] initWithRootViewController:[RootViewController new]];
[self.window makeKeyAndVisible];
return YES;
}


- (void)applicationWillResignActive:(UIApplication *)application {
// Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state.
// Use this method to pause ongoing tasks, disable timers, and invalidate graphics rendering callbacks. Games should use this method to pause the game.
}


- (void)applicationDidEnterBackground:(UIApplication *)application {
// Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later.
// If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits.
}


- (void)applicationWillEnterForeground:(UIApplication *)application {
// Called as part of the transition from the background to the active state; here you can undo many of the changes made on entering the background.
}


- (void)applicationDidBecomeActive:(UIApplication *)application {
// Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface.
}


- (void)applicationWillTerminate:(UIApplication *)application {
// Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:.
}


@end

+ 98
- 0
Prj-iOS/lpr/lpr/Assets.xcassets/AppIcon.appiconset/Contents.json View File

@@ -0,0 +1,98 @@
{
"images" : [
{
"idiom" : "iphone",
"size" : "20x20",
"scale" : "2x"
},
{
"idiom" : "iphone",
"size" : "20x20",
"scale" : "3x"
},
{
"idiom" : "iphone",
"size" : "29x29",
"scale" : "2x"
},
{
"idiom" : "iphone",
"size" : "29x29",
"scale" : "3x"
},
{
"idiom" : "iphone",
"size" : "40x40",
"scale" : "2x"
},
{
"idiom" : "iphone",
"size" : "40x40",
"scale" : "3x"
},
{
"idiom" : "iphone",
"size" : "60x60",
"scale" : "2x"
},
{
"idiom" : "iphone",
"size" : "60x60",
"scale" : "3x"
},
{
"idiom" : "ipad",
"size" : "20x20",
"scale" : "1x"
},
{
"idiom" : "ipad",
"size" : "20x20",
"scale" : "2x"
},
{
"idiom" : "ipad",
"size" : "29x29",
"scale" : "1x"
},
{
"idiom" : "ipad",
"size" : "29x29",
"scale" : "2x"
},
{
"idiom" : "ipad",
"size" : "40x40",
"scale" : "1x"
},
{
"idiom" : "ipad",
"size" : "40x40",
"scale" : "2x"
},
{
"idiom" : "ipad",
"size" : "76x76",
"scale" : "1x"
},
{
"idiom" : "ipad",
"size" : "76x76",
"scale" : "2x"
},
{
"idiom" : "ipad",
"size" : "83.5x83.5",
"scale" : "2x"
},
{
"idiom" : "ios-marketing",
"size" : "1024x1024",
"scale" : "1x"
}
],
"info" : {
"version" : 1,
"author" : "xcode"
}
}

+ 6
- 0
Prj-iOS/lpr/lpr/Assets.xcassets/Contents.json View File

@@ -0,0 +1,6 @@
{
"info" : {
"version" : 1,
"author" : "xcode"
}
}

+ 22
- 0
Prj-iOS/lpr/lpr/Assets.xcassets/back_camera_btn.imageset/Contents.json View File

@@ -0,0 +1,22 @@
{
"images" : [
{
"idiom" : "universal",
"scale" : "1x"
},
{
"idiom" : "universal",
"filename" : "back_camera_btn@2x.png",
"scale" : "2x"
},
{
"idiom" : "universal",
"filename" : "back_camera_btn@3x.png",
"scale" : "3x"
}
],
"info" : {
"version" : 1,
"author" : "xcode"
}
}

BIN
Prj-iOS/lpr/lpr/Assets.xcassets/back_camera_btn.imageset/back_camera_btn@2x.png View File

Before After
Width: 84  |  Height: 84  |  Size: 9.9 kB

BIN
Prj-iOS/lpr/lpr/Assets.xcassets/back_camera_btn.imageset/back_camera_btn@3x.png View File

Before After
Width: 126  |  Height: 126  |  Size: 15 kB

+ 22
- 0
Prj-iOS/lpr/lpr/Assets.xcassets/flash_camera_btn.imageset/Contents.json View File

@@ -0,0 +1,22 @@
{
"images" : [
{
"idiom" : "universal",
"scale" : "1x"
},
{
"idiom" : "universal",
"filename" : "flash_camera_btn@2x.png",
"scale" : "2x"
},
{
"idiom" : "universal",
"filename" : "flash_camera_btn@3x.png",
"scale" : "3x"
}
],
"info" : {
"version" : 1,
"author" : "xcode"
}
}

BIN
Prj-iOS/lpr/lpr/Assets.xcassets/flash_camera_btn.imageset/flash_camera_btn@2x.png View File

Before After
Width: 84  |  Height: 84  |  Size: 10 kB

BIN
Prj-iOS/lpr/lpr/Assets.xcassets/flash_camera_btn.imageset/flash_camera_btn@3x.png View File

Before After
Width: 126  |  Height: 126  |  Size: 15 kB

+ 22
- 0
Prj-iOS/lpr/lpr/Assets.xcassets/locker_btn_def.imageset/Contents.json View File

@@ -0,0 +1,22 @@
{
"images" : [
{
"idiom" : "universal",
"scale" : "1x"
},
{
"idiom" : "universal",
"filename" : "locker_btn_def@2x.png",
"scale" : "2x"
},
{
"idiom" : "universal",
"filename" : "locker_btn_def@3x.png",
"scale" : "3x"
}
],
"info" : {
"version" : 1,
"author" : "xcode"
}
}

BIN
Prj-iOS/lpr/lpr/Assets.xcassets/locker_btn_def.imageset/locker_btn_def@2x.png View File

Before After
Width: 280  |  Height: 36  |  Size: 5.4 kB

BIN
Prj-iOS/lpr/lpr/Assets.xcassets/locker_btn_def.imageset/locker_btn_def@3x.png View File

Before After
Width: 420  |  Height: 54  |  Size: 8.8 kB

+ 22
- 0
Prj-iOS/lpr/lpr/Assets.xcassets/take_pic_btn.imageset/Contents.json View File

@@ -0,0 +1,22 @@
{
"images" : [
{
"idiom" : "universal",
"scale" : "1x"
},
{
"idiom" : "universal",
"filename" : "take_pic_btn@2x.png",
"scale" : "2x"
},
{
"idiom" : "universal",
"filename" : "take_pic_btn@3x.png",
"scale" : "3x"
}
],
"info" : {
"version" : 1,
"author" : "xcode"
}
}

BIN
Prj-iOS/lpr/lpr/Assets.xcassets/take_pic_btn.imageset/take_pic_btn@2x.png View File

Before After
Width: 144  |  Height: 144  |  Size: 20 kB

BIN
Prj-iOS/lpr/lpr/Assets.xcassets/take_pic_btn.imageset/take_pic_btn@3x.png View File

Before After
Width: 238  |  Height: 238  |  Size: 40 kB

+ 25
- 0
Prj-iOS/lpr/lpr/Base.lproj/LaunchScreen.storyboard View File

@@ -0,0 +1,25 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="13122.16" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" launchScreen="YES" useTraitCollections="YES" useSafeAreas="YES" colorMatched="YES" initialViewController="01J-lp-oVM">
<dependencies>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="13104.12"/>
<capability name="Safe area layout guides" minToolsVersion="9.0"/>
<capability name="documents saved in the Xcode 8 format" minToolsVersion="8.0"/>
</dependencies>
<scenes>
<!--View Controller-->
<scene sceneID="EHf-IW-A2E">
<objects>
<viewController id="01J-lp-oVM" sceneMemberID="viewController">
<view key="view" contentMode="scaleToFill" id="Ze5-6b-2t3">
<rect key="frame" x="0.0" y="0.0" width="375" height="667"/>
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
<color key="backgroundColor" red="1" green="1" blue="1" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
<viewLayoutGuide key="safeArea" id="6Tk-OE-BBY"/>
</view>
</viewController>
<placeholder placeholderIdentifier="IBFirstResponder" id="iYj-Kq-Ea1" userLabel="First Responder" sceneMemberID="firstResponder"/>
</objects>
<point key="canvasLocation" x="53" y="375"/>
</scene>
</scenes>
</document>

+ 32
- 0
Prj-iOS/lpr/lpr/CameraViewController.h View File

@@ -0,0 +1,32 @@
//
// CameraViewController.h
// lpr
//
// Created by baotim on 2018/10/26.
// Copyright © 2018 lprSample. All rights reserved.
//

#ifdef __cplusplus
#import <opencv2/opencv.hpp>
#import <opencv2/imgcodecs/ios.h>
#endif

#ifdef __OBJC__
#import <UIKit/UIKit.h>
#import <Foundation/Foundation.h>
#import <AVFoundation/AVFoundation.h>
#import <AudioToolbox/AudioToolbox.h>
#import <CoreMedia/CoreMedia.h>
#endif

NS_ASSUME_NONNULL_BEGIN

typedef void(^ResultCallBack)(NSString *reuslt, UIImage *image);

@interface CameraViewController : UIViewController

@property(nonatomic, copy) ResultCallBack resultCB;

@end

NS_ASSUME_NONNULL_END

+ 517
- 0
Prj-iOS/lpr/lpr/CameraViewController.mm View File

@@ -0,0 +1,517 @@
//
// CameraViewController.m
// lpr
//
// Created by baotim on 2018/10/26.
// Copyright © 2018 lprSample. All rights reserved.
//

#import "CameraViewController.h"
#import "Utility.h"
#import "Pipeline.h"

//屏幕的宽、高
#define kScreenWidth [UIScreen mainScreen].bounds.size.width
#define kScreenHeight [UIScreen mainScreen].bounds.size.height

@interface CameraViewController () <AVCaptureVideoDataOutputSampleBufferDelegate>
{
AVCaptureSession *_session;
AVCaptureDeviceInput *_captureInput;
AVCaptureStillImageOutput *_captureOutput;
AVCaptureVideoPreviewLayer *_preview;
AVCaptureDevice *_device;

NSTimer *_timer; //定时器
BOOL _on; //闪光灯状态
BOOL _capture;//导航栏动画是否完成
BOOL _isFoucePixel;//是否相位对焦
CGRect _imgRect;//拍照裁剪
int _count;//每几帧识别
CGFloat _isLensChanged;//镜头位置
/*相位聚焦下镜头位置 镜头晃动 值不停的改变 */
CGFloat _isIOS8AndFoucePixelLensPosition;
/*
控制识别速度,最小值为1!数值越大识别越慢。
相机初始化时,设置默认值为1(不要改动),判断设备若为相位对焦时,设置此值为2(可以修改,最小为1,越大越慢)
此值的功能是为了减小相位对焦下,因识别速度过快
此值在相机初始化中设置,在相机代理中使用,用户若无特殊需求不用修改。
*/
int _MaxFR;
cv::Mat source_image;
}

@property (assign, nonatomic) BOOL adjustingFocus;
@property (nonatomic, retain) CALayer *customLayer;
@property (nonatomic,assign) BOOL isProcessingImage;

@property (nonatomic, strong) UIImage* image;

@end

@implementation CameraViewController

- (void)viewDidLoad {
[super viewDidLoad];
// Do any additional setup after loading the view.
self.view.backgroundColor = [UIColor clearColor];
//初始化相机
[self initialize];
//创建相机界面控件
[self createCameraView];
}

- (void)viewWillAppear:(BOOL)animated {
[super viewWillAppear:animated];
_capture = NO;
[self performSelector:@selector(changeCapture) withObject:nil afterDelay:0.4];
//不支持相位对焦情况下(iPhone6以后的手机支持相位对焦) 设置定时器 开启连续对焦
if (!_isFoucePixel) {
_timer = [NSTimer scheduledTimerWithTimeInterval:1.3 target:self selector:@selector(fouceMode) userInfo:nil repeats:YES];
}
AVCaptureDevice*camDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
int flags = NSKeyValueObservingOptionNew;
//注册通知
[camDevice addObserver:self forKeyPath:@"adjustingFocus" options:flags context:nil];
if (_isFoucePixel) {
[camDevice addObserver:self forKeyPath:@"lensPosition" options:flags context:nil];
}
[_session startRunning];
}

- (void)viewWillDisappear:(BOOL)animated {
[super viewWillDisappear:animated];
self.navigationController.navigationBarHidden = NO;
}

- (void)viewDidDisappear:(BOOL)animated {
[super viewDidDisappear:animated];
if (!_isFoucePixel) {
[_timer invalidate];
_timer = nil;
}
AVCaptureDevice*camDevice =[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
[camDevice removeObserver:self forKeyPath:@"adjustingFocus"];
if (_isFoucePixel) {
[camDevice removeObserver:self forKeyPath:@"lensPosition"];
}
[_session stopRunning];
_capture = NO;
}

- (void)changeCapture {
_capture = YES;
}

#pragma mark - Private Methods
//初始化相机
- (void)initialize {
//判断摄像头授权
AVAuthorizationStatus authStatus = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo];
if(authStatus == AVAuthorizationStatusRestricted || authStatus == AVAuthorizationStatusDenied){
self.view.backgroundColor = [UIColor blackColor];
// UIAlertView * alt = [[UIAlertView alloc] initWithTitle:@"未获得授权使用摄像头" message:@"请在'设置-隐私-相机'打开" delegate:self cancelButtonTitle:nil otherButtonTitles:@"OK", nil];
// [alt show];
UIAlertController * alert = [UIAlertController
alertControllerWithTitle:@"未获得授权使用摄像头"
message:@"请在'设置-隐私-相机'打开"
preferredStyle:UIAlertControllerStyleAlert];
__weak typeof(self) weakSelf = self;
UIAlertAction* yesButton = [UIAlertAction
actionWithTitle:@"OK"
style:UIAlertActionStyleDefault
handler:^(UIAlertAction * action) {
//Handle your yes please button action here
[weakSelf.navigationController popViewControllerAnimated:YES];
}];
[alert addAction:yesButton];
[self presentViewController:alert animated:YES completion:nil];
return;
}
_MaxFR = 1;
//1.创建会话层
_session = [[AVCaptureSession alloc] init];
[_session setSessionPreset:AVCaptureSessionPreset1920x1080];
//2.创建、配置输入设备
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for (AVCaptureDevice *device in devices){
if (device.position == AVCaptureDevicePositionBack){
_device = device;
_captureInput = [AVCaptureDeviceInput deviceInputWithDevice:device error:nil];
}
}
[_session addInput:_captureInput];
//2.创建视频流输出
AVCaptureVideoDataOutput *captureOutput = [[AVCaptureVideoDataOutput alloc] init];
captureOutput.alwaysDiscardsLateVideoFrames = YES;
dispatch_queue_t queue;
queue = dispatch_queue_create("cameraQueue", NULL);
[captureOutput setSampleBufferDelegate:self queue:queue];
NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey;
NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA];
NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:value forKey:key];
[captureOutput setVideoSettings:videoSettings];
[_session addOutput:captureOutput];
//3.创建、配置静态拍照输出
_captureOutput = [[AVCaptureStillImageOutput alloc] init];
NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys:AVVideoCodecJPEG,AVVideoCodecKey,nil];
[_captureOutput setOutputSettings:outputSettings];
[_session addOutput:_captureOutput];
//4.预览图层
_preview = [AVCaptureVideoPreviewLayer layerWithSession: _session];
_preview.frame = CGRectMake(0, 0, self.view.frame.size.width, self.view.frame.size.height);
_preview.videoGravity = AVLayerVideoGravityResizeAspectFill;
[self.view.layer addSublayer:_preview];
//判断是否相位对焦
if ([[[UIDevice currentDevice] systemVersion] floatValue] >= 8.0) {
AVCaptureDeviceFormat *deviceFormat = _device.activeFormat;
if (deviceFormat.autoFocusSystem == AVCaptureAutoFocusSystemPhaseDetection){
_isFoucePixel = YES;
_MaxFR = 2;
}
}
}

- (void)createCameraView
{
//设置覆盖层
CAShapeLayer *maskWithHole = [CAShapeLayer layer];
// Both frames are defined in the same coordinate system
CGRect biggerRect = self.view.bounds;
CGFloat offset = 1.0f;
if ([[UIScreen mainScreen] scale] >= 2) {
offset = 0.5;
}
CGRect smallFrame = CGRectMake(45, 100, 300, 500);
CGRect smallerRect = CGRectInset(smallFrame, -offset, -offset) ;
UIBezierPath *maskPath = [UIBezierPath bezierPath];
[maskPath moveToPoint:CGPointMake(CGRectGetMinX(biggerRect), CGRectGetMinY(biggerRect))];
[maskPath addLineToPoint:CGPointMake(CGRectGetMinX(biggerRect), CGRectGetMaxY(biggerRect))];
[maskPath addLineToPoint:CGPointMake(CGRectGetMaxX(biggerRect), CGRectGetMaxY(biggerRect))];
[maskPath addLineToPoint:CGPointMake(CGRectGetMaxX(biggerRect), CGRectGetMinY(biggerRect))];
[maskPath addLineToPoint:CGPointMake(CGRectGetMinX(biggerRect), CGRectGetMinY(biggerRect))];
[maskPath moveToPoint:CGPointMake(CGRectGetMinX(smallerRect), CGRectGetMinY(smallerRect))];
[maskPath addLineToPoint:CGPointMake(CGRectGetMinX(smallerRect), CGRectGetMaxY(smallerRect))];
[maskPath addLineToPoint:CGPointMake(CGRectGetMaxX(smallerRect), CGRectGetMaxY(smallerRect))];
[maskPath addLineToPoint:CGPointMake(CGRectGetMaxX(smallerRect), CGRectGetMinY(smallerRect))];
[maskPath addLineToPoint:CGPointMake(CGRectGetMinX(smallerRect), CGRectGetMinY(smallerRect))];
[maskWithHole setPath:[maskPath CGPath]];
[maskWithHole setFillRule:kCAFillRuleEvenOdd];
[maskWithHole setFillColor:[[UIColor colorWithWhite:0 alpha:0.35] CGColor]];
[self.view.layer addSublayer:maskWithHole];
[self.view.layer setMasksToBounds:YES];
/* 相机按钮 适配了iPhone和ipad 不同需求自行修改界面*/
//返回、闪光灯按钮
CGFloat backWidth = 35;
if (kScreenHeight>=1024) {
backWidth = 50;
}
CGFloat s = 80;
CGFloat s1 = 0;
if (kScreenHeight==480) {
s = 60;
s1 = 10;
}
UIButton *backBtn = [[UIButton alloc]initWithFrame:CGRectMake(kScreenWidth/16,kScreenWidth/16-s1, backWidth, backWidth)];
[backBtn addTarget:self action:@selector(backAction) forControlEvents:UIControlEventTouchUpInside];
[backBtn setImage:[UIImage imageNamed:@"back_camera_btn"] forState:UIControlStateNormal];
backBtn.titleLabel.textAlignment = NSTextAlignmentLeft;
[self.view addSubview:backBtn];
UIButton *flashBtn = [[UIButton alloc]initWithFrame:CGRectMake(kScreenWidth-kScreenWidth/16-backWidth,kScreenWidth/16-s1, backWidth, backWidth)];
[flashBtn setImage:[UIImage imageNamed:@"flash_camera_btn"] forState:UIControlStateNormal];
[flashBtn addTarget:self action:@selector(modeBtn) forControlEvents:UIControlEventTouchUpInside];
[self.view addSubview:flashBtn];
//拍照视图 上拉按钮 拍照按钮
UIButton *upBtn = [[UIButton alloc]initWithFrame:CGRectMake(kScreenWidth/2-60, kScreenHeight-20, 120, 20)];
upBtn.tag = 1001;
[upBtn addTarget:self action:@selector(upBtn:) forControlEvents:UIControlEventTouchUpInside];
[upBtn setImage:[UIImage imageNamed:@"locker_btn_def"] forState:UIControlStateNormal];
[self.view addSubview:upBtn];
UIButton *photoBtn = [[UIButton alloc]initWithFrame:CGRectMake(kScreenWidth/2-30,kScreenHeight-s,60, 60)];
photoBtn.tag = 1000;
photoBtn.hidden = YES;
[photoBtn setImage:[UIImage imageNamed:@"take_pic_btn"] forState:UIControlStateNormal];
[photoBtn addTarget:self action:@selector(photoBtn) forControlEvents:UIControlEventTouchUpInside];
[photoBtn setTitleColor:[UIColor grayColor] forState:UIControlStateHighlighted];
[self.view addSubview:photoBtn];
[self.view bringSubviewToFront:photoBtn];
}

//隐藏状态栏
- (UIStatusBarStyle)preferredStatusBarStyle{
return UIStatusBarStyleDefault;
}
- (BOOL)prefersStatusBarHidden{
return YES;
}

//对焦
- (void)fouceMode
{
NSError *error;
AVCaptureDevice *device = [self cameraWithPosition:AVCaptureDevicePositionBack];
if ([device isFocusModeSupported:AVCaptureFocusModeAutoFocus])
{
if ([device lockForConfiguration:&error]) {
CGPoint cameraPoint = [_preview captureDevicePointOfInterestForPoint:self.view.center];
[device setFocusPointOfInterest:cameraPoint];
[device setFocusMode:AVCaptureFocusModeAutoFocus];
[device unlockForConfiguration];
} else {
//NSLog(@"Error: %@", error);
}
}
}

- (AVCaptureDevice *)cameraWithPosition:(AVCaptureDevicePosition)position
{
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for (AVCaptureDevice *device in devices){
if (device.position == position){
return device;
}
}
return nil;
}

- (NSString *)getPath:(NSString*)fileName
{
NSString *bundlePath = [NSBundle mainBundle].bundlePath;
NSString *path = [bundlePath stringByAppendingPathComponent:fileName];
return path;
}

- (NSString *)simpleRecognition:(cv::Mat&)src
{
NSString *path_1 = [self getPath:@"cascade.xml"];
NSString *path_2 = [self getPath:@"HorizonalFinemapping.prototxt"];
NSString *path_3 = [self getPath:@"HorizonalFinemapping.caffemodel"];
NSString *path_4 = [self getPath:@"Segmentation.prototxt"];
NSString *path_5 = [self getPath:@"Segmentation.caffemodel"];
NSString *path_6 = [self getPath:@"CharacterRecognization.prototxt"];
NSString *path_7 = [self getPath:@"CharacterRecognization.caffemodel"];
NSString *path_8 = [self getPath:@"SegmenationFree-Inception.prototxt"];
NSString *path_9 = [self getPath:@"SegmenationFree-Inception.caffemodel"];
std::string *cpath_1 = new std::string([path_1 UTF8String]);
std::string *cpath_2 = new std::string([path_2 UTF8String]);
std::string *cpath_3 = new std::string([path_3 UTF8String]);
std::string *cpath_4 = new std::string([path_4 UTF8String]);
std::string *cpath_5 = new std::string([path_5 UTF8String]);
std::string *cpath_6 = new std::string([path_6 UTF8String]);
std::string *cpath_7 = new std::string([path_7 UTF8String]);
std::string *cpath_8 = new std::string([path_8 UTF8String]);
std::string *cpath_9 = new std::string([path_9 UTF8String]);
pr::PipelinePR pr2 = pr::PipelinePR(*cpath_1, *cpath_2, *cpath_3, *cpath_4, *cpath_5, *cpath_6, *cpath_7, *cpath_8, *cpath_9);
std::vector<pr::PlateInfo> list_res = pr2.RunPiplineAsImage(src, pr::SEGMENTATION_FREE_METHOD);
std::string concat_results = "";
for(auto one:list_res) {
if(one.confidence>0.7) {
concat_results += one.getPlateName()+",";
}
}
NSString *str = [NSString stringWithCString:concat_results.c_str() encoding:NSUTF8StringEncoding];
if (str.length > 0) {
str = [str substringToIndex:str.length-1];
str = [NSString stringWithFormat:@"%@",str];
} else {
str = [NSString stringWithFormat:@"未识别成功"];
}
NSLog(@"===> 识别结果 = %@", str);
return str;
}

#pragma mark - Actions
//返回按钮按钮点击事件
- (void)backAction
{
[self dismissViewControllerAnimated:YES completion:nil];
}

//闪光灯按钮点击事件
- (void)modeBtn
{
if (![_device hasTorch]) {
//NSLog(@"no torch");
} else {
[_device lockForConfiguration:nil];
if (!_on) {
[_device setTorchMode: AVCaptureTorchModeOn];
_on = YES;
}else{
[_device setTorchMode: AVCaptureTorchModeOff];
_on = NO;
}
[_device unlockForConfiguration];
}
}

//上拉按钮点击事件
- (void)upBtn:(UIButton *)upBtn
{
UIButton *photoBtn = (UIButton *)[self.view viewWithTag:1000];
photoBtn.hidden = NO;
upBtn.hidden = YES;
}

//拍照按钮点击事件
- (void)photoBtn
{
self.isProcessingImage = YES;
//get connection
AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in _captureOutput.connections) {
for (AVCaptureInputPort *port in [connection inputPorts]) {
if ([[port mediaType] isEqual:AVMediaTypeVideo] ) {
videoConnection = connection;
break;
}
}
if (videoConnection) { break; }
}
//get UIImage
__weak typeof(self) weakSelf = self;
[_captureOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler:
^(CMSampleBufferRef imageSampleBuffer, NSError *error) {
if (imageSampleBuffer != NULL) {
//停止取景
[_session stopRunning];

NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
UIImage *tempImage = [[UIImage alloc] initWithData:imageData];

UIImage *temp_image = [Utility scaleAndRotateImageBackCamera:tempImage];
source_image = [Utility cvMatFromUIImage:temp_image];
NSString* text = [weakSelf simpleRecognition:source_image];

NSMutableDictionary* resultDict = [NSMutableDictionary new];
resultDict[@"image"] = temp_image;
resultDict[@"text"] = text;
[self performSelectorOnMainThread:@selector(readyToGetImage:) withObject:resultDict waitUntilDone:NO];
weakSelf.isProcessingImage = NO;
}
}];
}

//从摄像头缓冲区获取图像
#pragma mark - AVCaptureSession delegate
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection
{
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
CVPixelBufferLockBaseAddress(imageBuffer,0);
uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer);
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
/*We unlock the image buffer*/
CVPixelBufferUnlockBaseAddress(imageBuffer,0);
/*Create a CGImageRef from the CVImageBufferRef*/
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef newContext = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
CGImageRef newImage = CGBitmapContextCreateImage(newContext);
/*We release some components*/
CGContextRelease(newContext);
CGColorSpaceRelease(colorSpace);
/*We display the result on the image view (We need to change the orientation of the image so that the video is displayed correctly)*/
self.image = [UIImage imageWithCGImage:newImage scale:1.0 orientation:UIImageOrientationUp];
/*We relase the CGImageRef*/
CGImageRelease(newImage);
//检边识别
if (_capture == YES) { //导航栏动画完成
if (self.isProcessingImage==NO) { //点击拍照后 不去识别
if (!self.adjustingFocus) { //反差对焦下 非正在对焦状态(相位对焦下self.adjustingFocus此值不会改变)
if (_isLensChanged == _isIOS8AndFoucePixelLensPosition) {
_count++;
if (_count >= _MaxFR) {
//识别
UIImage *temp_image = [Utility scaleAndRotateImageBackCamera:self.image];
source_image = [Utility cvMatFromUIImage:temp_image];
NSString* text = [self simpleRecognition:source_image];
if (text.length == 7) { //识别成功
_count = 0;
// 停止取景
[_session stopRunning];
//设置震动
AudioServicesPlaySystemSound(kSystemSoundID_Vibrate);
NSMutableDictionary* resultDict = [NSMutableDictionary new];
resultDict[@"image"] = temp_image;
resultDict[@"text"] = text;

[self performSelectorOnMainThread:@selector(readyToGetImage:) withObject:resultDict waitUntilDone:NO];
}
}
} else {
_isLensChanged = _isIOS8AndFoucePixelLensPosition;
_count = 0;
}
}
}
}
CVPixelBufferUnlockBaseAddress(imageBuffer,0);
}

//找边成功开始拍照
- (void)readyToGetImage:(NSDictionary *)resultDict
{
[self dismissViewControllerAnimated:NO completion:^{
}];
if (self.resultCB) {
self.resultCB(resultDict[@"text"], resultDict[@"image"]);
}
}

- (void)observeValueForKeyPath:(NSString*)keyPath ofObject:(id)object change:(NSDictionary*)change context:(void*)context {
/*反差对焦 监听反差对焦此*/
if([keyPath isEqualToString:@"adjustingFocus"]){
self.adjustingFocus =[[change objectForKey:NSKeyValueChangeNewKey] isEqualToNumber:[NSNumber numberWithInt:1]];
}
/*监听相位对焦此*/
if([keyPath isEqualToString:@"lensPosition"]){
_isIOS8AndFoucePixelLensPosition =[[change objectForKey:NSKeyValueChangeNewKey] floatValue];
//NSLog(@"监听_isIOS8AndFoucePixelLensPosition == %f",_isIOS8AndFoucePixelLensPosition);
}
}


@end

+ 45
- 0
Prj-iOS/lpr/lpr/Info.plist View File

@@ -0,0 +1,45 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>CFBundleDevelopmentRegion</key>
<string>$(DEVELOPMENT_LANGUAGE)</string>
<key>CFBundleExecutable</key>
<string>$(EXECUTABLE_NAME)</string>
<key>CFBundleIdentifier</key>
<string>$(PRODUCT_BUNDLE_IDENTIFIER)</string>
<key>CFBundleInfoDictionaryVersion</key>
<string>6.0</string>
<key>CFBundleName</key>
<string>$(PRODUCT_NAME)</string>
<key>CFBundlePackageType</key>
<string>APPL</string>
<key>CFBundleShortVersionString</key>
<string>1.0</string>
<key>CFBundleVersion</key>
<string>1</string>
<key>LSRequiresIPhoneOS</key>
<true/>
<key>NSCameraUsageDescription</key>
<string>欢迎使用相机</string>
<key>NSPhotoLibraryUsageDescription</key>
<string>欢迎使用相册</string>
<key>UILaunchStoryboardName</key>
<string>LaunchScreen</string>
<key>UIRequiredDeviceCapabilities</key>
<array>
<string>armv7</string>
</array>
<key>UISupportedInterfaceOrientations</key>
<array>
<string>UIInterfaceOrientationPortrait</string>
</array>
<key>UISupportedInterfaceOrientations~ipad</key>
<array>
<string>UIInterfaceOrientationPortrait</string>
<string>UIInterfaceOrientationPortraitUpsideDown</string>
<string>UIInterfaceOrientationLandscapeLeft</string>
<string>UIInterfaceOrientationLandscapeRight</string>
</array>
</dict>
</plist>

+ 26
- 0
Prj-iOS/lpr/lpr/RootViewController.h View File

@@ -0,0 +1,26 @@
//
// RootViewController.h
// lpr
//
// Created by baotim on 2018/10/26.
// Copyright © 2018 lprSample. All rights reserved.
//

#ifdef __cplusplus
#import <opencv2/opencv.hpp>
#import <opencv2/imgcodecs/ios.h>
#endif

#ifdef __OBJC__
#import <UIKit/UIKit.h>
#import <Foundation/Foundation.h>
#import <AVFoundation/AVFoundation.h>
#endif

NS_ASSUME_NONNULL_BEGIN

@interface RootViewController : UIViewController

@end

NS_ASSUME_NONNULL_END

+ 201
- 0
Prj-iOS/lpr/lpr/RootViewController.mm View File

@@ -0,0 +1,201 @@
//
// RootViewController.m
// lpr
//
// Created by baotim on 2018/10/26.
// Copyright © 2018 lprSample. All rights reserved.
//

#import "RootViewController.h"
#import <Masonry/Masonry.h>
#import "CameraViewController.h"
#import "Utility.h"
#import "Pipeline.h"

@interface RootViewController () <UIImagePickerControllerDelegate, UINavigationControllerDelegate>

@property (nonatomic, strong) UIButton* albumButton;
@property (nonatomic, strong) UIButton* cameraButton;
@property (nonatomic, strong) UIImageView* imageView;
@property (nonatomic, strong) UILabel* resultLabel;

@end

@implementation RootViewController
{
cv::Mat source_image;
}

#pragma mark - Lazy Initialize
- (UIButton *)albumButton
{
if (!_albumButton) {
_albumButton = [UIButton buttonWithType:UIButtonTypeCustom];
[_albumButton setTitle:@"打开相册" forState:UIControlStateNormal];
[_albumButton setBackgroundColor:[UIColor redColor]];
[_albumButton addTarget:self action:@selector(openAlbum) forControlEvents:UIControlEventTouchUpInside];
}
return _albumButton;
}

- (UIButton *)cameraButton
{
if (!_cameraButton) {
_cameraButton = [UIButton buttonWithType:UIButtonTypeCustom];
[_cameraButton setBackgroundColor:[UIColor redColor]];
[_cameraButton setTitle:@"实时拍照" forState:UIControlStateNormal];
[_cameraButton addTarget:self action:@selector(openCamera) forControlEvents:UIControlEventTouchUpInside];
}
return _cameraButton;
}

- (UIImageView *)imageView
{
if (!_imageView) {
_imageView = [[UIImageView alloc] init];
_imageView.contentMode = UIViewContentModeScaleAspectFit;
}
return _imageView;
}

- (UILabel *)resultLabel
{
if (!_resultLabel) {
_resultLabel = [UILabel new];
_resultLabel.textColor = [UIColor redColor];
_resultLabel.textAlignment = NSTextAlignmentLeft;
_resultLabel.font = [UIFont systemFontOfSize:15];
}
return _resultLabel;
}

#pragma mark - Life Cycle
- (void)viewDidLoad {
[super viewDidLoad];
// Do any additional setup after loading the view.
self.view.backgroundColor = [UIColor whiteColor];
self.navigationItem.title = @"车牌识别Demo";
[self.view addSubview:self.albumButton];
[self.albumButton mas_makeConstraints:^(MASConstraintMaker* make) {
make.centerX.equalTo(self.view).offset(-80);
make.bottom.equalTo(self.mas_bottomLayoutGuideTop).offset(-20);
make.width.mas_equalTo(100);
make.height.mas_equalTo(50);
}];
[self.view addSubview:self.cameraButton];
[self.cameraButton mas_makeConstraints:^(MASConstraintMaker* make) {
make.centerX.equalTo(self.view).offset(80);
make.bottom.equalTo(self.albumButton);
make.width.mas_equalTo(100);
make.height.mas_equalTo(50);
}];
[self.view addSubview:self.resultLabel];
[self.resultLabel mas_makeConstraints:^(MASConstraintMaker* make) {
make.left.right.equalTo(self.view);
make.bottom.lessThanOrEqualTo(self.albumButton.mas_top);
}];
[self.view addSubview:self.imageView];
[self.imageView mas_makeConstraints:^(MASConstraintMaker* make) {
make.top.left.right.equalTo(self.view);
make.bottom.lessThanOrEqualTo(self.resultLabel.mas_top);
}];
}

#pragma mark - Actions
- (void)openAlbum
{
UIImagePickerController* picker = [[UIImagePickerController alloc] init];
picker.delegate = self;
if (![UIImagePickerController isSourceTypeAvailable:UIImagePickerControllerSourceTypePhotoLibrary])
return;
picker.sourceType = UIImagePickerControllerSourceTypePhotoLibrary;
[self presentViewController:picker animated:YES completion:nil];
}

- (void)openCamera
{
CameraViewController* video = [CameraViewController new];
video.resultCB = ^(NSString* text, UIImage* image) {
self.imageView.image = image;
self.resultLabel.text = text;
};
[self presentViewController:video animated:YES completion:nil];
}

#pragma mark - Private Methods
- (NSString *)getPathFromBundle:(NSString*)fileName
{
NSString *bundlePath = [NSBundle mainBundle].bundlePath;
NSString *path = [bundlePath stringByAppendingPathComponent:fileName];
return path;
}

- (void)freeRecognition:(cv::Mat&)src
{
NSString *path_1 = [self getPathFromBundle:@"cascade.xml"];
NSString *path_2 = [self getPathFromBundle:@"HorizonalFinemapping.prototxt"];
NSString *path_3 = [self getPathFromBundle:@"HorizonalFinemapping.caffemodel"];
NSString *path_4 = [self getPathFromBundle:@"Segmentation.prototxt"];
NSString *path_5 = [self getPathFromBundle:@"Segmentation.caffemodel"];
NSString *path_6 = [self getPathFromBundle:@"CharacterRecognization.prototxt"];
NSString *path_7 = [self getPathFromBundle:@"CharacterRecognization.caffemodel"];
NSString *path_8 = [self getPathFromBundle:@"SegmenationFree-Inception.prototxt"];
NSString *path_9 = [self getPathFromBundle:@"SegmenationFree-Inception.caffemodel"];
std::string *cpath_1 = new std::string([path_1 UTF8String]);
std::string *cpath_2 = new std::string([path_2 UTF8String]);
std::string *cpath_3 = new std::string([path_3 UTF8String]);
std::string *cpath_4 = new std::string([path_4 UTF8String]);
std::string *cpath_5 = new std::string([path_5 UTF8String]);
std::string *cpath_6 = new std::string([path_6 UTF8String]);
std::string *cpath_7 = new std::string([path_7 UTF8String]);
std::string *cpath_8 = new std::string([path_8 UTF8String]);
std::string *cpath_9 = new std::string([path_9 UTF8String]);
pr::PipelinePR pr2 = pr::PipelinePR(*cpath_1, *cpath_2, *cpath_3, *cpath_4, *cpath_5, *cpath_6, *cpath_7, *cpath_8, *cpath_9);
std::vector<pr::PlateInfo> list_res = pr2.RunPiplineAsImage(src, pr::SEGMENTATION_FREE_METHOD);
std::string concat_results = "";
for(auto one:list_res) {
if(one.confidence>0.7) {
concat_results += one.getPlateName()+",";
}
}
NSString *str = [NSString stringWithCString:concat_results.c_str() encoding:NSUTF8StringEncoding];
if (str.length > 0) {
str = [str substringToIndex:str.length-1];
str = [NSString stringWithFormat:@"识别结果: %@",str];
} else {
str = [NSString stringWithFormat:@"识别结果: 未识别成功"];
}
[self.resultLabel setText:str];
}

#pragma mark - UIImagePickerControllerDelegate
- (void)imagePickerController:(UIImagePickerController*)picker didFinishPickingMediaWithInfo:(NSDictionary *)info
{
[picker dismissViewControllerAnimated:YES completion:nil];
UIImage* temp = [info objectForKey:@"UIImagePickerControllerOriginalImage"];
UIImage *temp_image = [Utility scaleAndRotateImageBackCamera:temp];
source_image = [Utility cvMatFromUIImage:temp_image];
[self freeRecognition:source_image];
self.imageView.image = temp;
}

- (void)imagePickerControllerDidCancel:(UIImagePickerController *)picker
{
[picker dismissViewControllerAnimated:YES completion:nil];
}

@end

+ 24
- 0
Prj-iOS/lpr/lpr/Source/include/CNNRecognizer.h View File

@@ -0,0 +1,24 @@
//
// Created by Jack Yu on 21/10/2017.
//

#ifndef SWIFTPR_CNNRECOGNIZER_H
#define SWIFTPR_CNNRECOGNIZER_H

#include "Recognizer.h"
namespace pr{
class CNNRecognizer: public GeneralRecognizer{
public:
const int CHAR_INPUT_W = 14;
const int CHAR_INPUT_H = 30;

CNNRecognizer(std::string prototxt,std::string caffemodel);
label recognizeCharacter(cv::Mat character);
private:
cv::dnn::Net net;

};

}

#endif //SWIFTPR_CNNRECOGNIZER_H

+ 18
- 0
Prj-iOS/lpr/lpr/Source/include/FastDeskew.h View File

@@ -0,0 +1,18 @@
//
// Created by 庾金科 on 22/09/2017.
//

#ifndef SWIFTPR_FASTDESKEW_H
#define SWIFTPR_FASTDESKEW_H

#include <opencv2/opencv.hpp>
#include <math.h>
namespace pr{

cv::Mat fastdeskew(cv::Mat skewImage,int blockSize);
// cv::Mat spatialTransformer(cv::Mat skewImage);

}//namepace pr


#endif //SWIFTPR_FASTDESKEW_H

+ 32
- 0
Prj-iOS/lpr/lpr/Source/include/FineMapping.h View File

@@ -0,0 +1,32 @@
//
// Created by 庾金科 on 22/09/2017.
//

#ifndef SWIFTPR_FINEMAPPING_H
#define SWIFTPR_FINEMAPPING_H

#include <opencv2/opencv.hpp>
#include <opencv2/dnn.hpp>

#include <string>
namespace pr{
class FineMapping{
public:
FineMapping();


FineMapping(std::string prototxt,std::string caffemodel);
static cv::Mat FineMappingVertical(cv::Mat InputProposal,int sliceNum=15,int upper=0,int lower=-50,int windows_size=17);
cv::Mat FineMappingHorizon(cv::Mat FinedVertical,int leftPadding,int rightPadding);


private:
cv::dnn::Net net;

};




}
#endif //SWIFTPR_FINEMAPPING_H

+ 45
- 0
Prj-iOS/lpr/lpr/Source/include/Pipeline.h View File

@@ -0,0 +1,45 @@
//
// Created by 庾金科 on 22/10/2017.
//

#ifndef SWIFTPR_PIPLINE_H
#define SWIFTPR_PIPLINE_H

#include "PlateDetection.h"
#include "PlateSegmentation.h"
#include "CNNRecognizer.h"
#include "PlateInfo.h"
#include "FastDeskew.h"
#include "FineMapping.h"
#include "Recognizer.h"
#include "SegmentationFreeRecognizer.h"

namespace pr{

// std::string str_code[]={"京", "沪", "津", "渝", "冀", "晋", "蒙", "辽", "吉", "黑", "苏", "浙", "皖", "闽", "赣", "鲁", "豫", "鄂", "湘", "粤", "桂", "琼", "川", "贵", "云", "藏", "陕", "甘", "青", "宁", "新", "0", "1", "2", "3", "4", "5", "6", "7", "8", "9", "A", "B", "C", "D", "E", "F", "G", "H", "J", "K", "L", "M", "N", "P", "Q", "R", "S", "T", "U", "V", "W", "X", "Y", "Z","港","学","使","警","澳","挂","军","北","南","广","沈","兰","成","济","海","民","航","空"};
// const std::vector<std::string> CH_PLATE_CODE(str_code, str_code+83);

const int SEGMENTATION_FREE_METHOD = 0;
const int SEGMENTATION_BASED_METHOD = 1;

class PipelinePR {
public:
GeneralRecognizer *generalRecognizer;
PlateDetection *plateDetection;
PlateSegmentation *plateSegmentation;
FineMapping *fineMapping;
SegmentationFreeRecognizer *segmentationFreeRecognizer;

PipelinePR(std::string detector_filename,
std::string finemapping_prototxt,std::string finemapping_caffemodel,
std::string segmentation_prototxt,std::string segmentation_caffemodel,
std::string charRecognization_proto,std::string charRecognization_caffemodel,
std::string segmentationfree_proto,std::string segmentationfree_caffemodel
);
~PipelinePR();

std::vector<std::string> plateRes;
std::vector<PlateInfo> RunPiplineAsImage(cv::Mat plateImage,int method);
};
}
#endif //SWIFTPR_PIPLINE_H

+ 33
- 0
Prj-iOS/lpr/lpr/Source/include/PlateDetection.h View File

@@ -0,0 +1,33 @@
//
// Created by 庾金科 on 20/09/2017.
//

#ifndef SWIFTPR_PLATEDETECTION_H
#define SWIFTPR_PLATEDETECTION_H

#include <opencv2/opencv.hpp>
#include "PlateInfo.h"
#include <vector>
namespace pr{
class PlateDetection{
public:
PlateDetection(std::string filename_cascade);
PlateDetection();
void LoadModel(std::string filename_cascade);
void plateDetectionRough(cv::Mat InputImage,std::vector<pr::PlateInfo> &plateInfos,int min_w=36,int max_w=800);
// std::vector<pr::PlateInfo> plateDetectionRough(cv::Mat InputImage,int min_w= 60,int max_h = 400);


// std::vector<pr::PlateInfo> plateDetectionRoughByMultiScaleEdge(cv::Mat InputImage);



private:
cv::CascadeClassifier cascade;


};

}// namespace pr

#endif //SWIFTPR_PLATEDETECTION_H

+ 126
- 0
Prj-iOS/lpr/lpr/Source/include/PlateInfo.h View File

@@ -0,0 +1,126 @@
//
// Created by 庾金科 on 20/09/2017.
//

#ifndef SWIFTPR_PLATEINFO_H
#define SWIFTPR_PLATEINFO_H
#include <opencv2/opencv.hpp>
namespace pr {

typedef std::vector<cv::Mat> Character;

enum PlateColor { BLUE, YELLOW, WHITE, GREEN, BLACK,UNKNOWN};
enum CharType {CHINESE,LETTER,LETTER_NUMS,INVALID};


class PlateInfo {
public:
std::vector<std::pair<CharType,cv::Mat> > plateChars;
std::vector<std::pair<CharType,cv::Mat> > plateCoding;
float confidence = 0;
PlateInfo(const cv::Mat &plateData, std::string plateName, cv::Rect plateRect, PlateColor plateType) {
licensePlate = plateData;
name = plateName;
ROI = plateRect;
Type = plateType;
}
PlateInfo(const cv::Mat &plateData, cv::Rect plateRect, PlateColor plateType) {
licensePlate = plateData;
ROI = plateRect;
Type = plateType;
}
PlateInfo(const cv::Mat &plateData, cv::Rect plateRect) {
licensePlate = plateData;
ROI = plateRect;
}
PlateInfo() {

}

cv::Mat getPlateImage() {
return licensePlate;
}

void setPlateImage(cv::Mat plateImage){
licensePlate = plateImage;
}

cv::Rect getPlateRect() {
return ROI;
}

void setPlateRect(cv::Rect plateRect) {
ROI = plateRect;
}
cv::String getPlateName() {
return name;

}
void setPlateName(cv::String plateName) {
name = plateName;
}
int getPlateType() {
return Type;
}

void appendPlateChar(const std::pair<CharType,cv::Mat> &plateChar)
{
plateChars.push_back(plateChar);
}

void appendPlateCoding(const std::pair<CharType,cv::Mat> &charProb){
plateCoding.push_back(charProb);
}

// cv::Mat getPlateChars(int id) {
// if(id<PlateChars.size())
// return PlateChars[id];
// }
std::string decodePlateNormal(std::vector<std::string> mappingTable) {
std::string decode;
for(auto plate:plateCoding) {
float *prob = (float *)plate.second.data;
if(plate.first == CHINESE) {

decode += mappingTable[std::max_element(prob,prob+31) - prob];
confidence+=*std::max_element(prob,prob+31);


// std::cout<<*std::max_element(prob,prob+31)<<std::endl;

}

else if(plate.first == LETTER) {
decode += mappingTable[std::max_element(prob+41,prob+65)- prob];
confidence+=*std::max_element(prob+41,prob+65);
}

else if(plate.first == LETTER_NUMS) {
decode += mappingTable[std::max_element(prob+31,prob+65)- prob];
confidence+=*std::max_element(prob+31,prob+65);
// std::cout<<*std::max_element(prob+31,prob+65)<<std::endl;

}
else if(plate.first == INVALID)
{
decode+='*';
}

}
name = decode;

confidence/=7;

return decode;
}

private:
cv::Mat licensePlate;
cv::Rect ROI;
std::string name ;
PlateColor Type;
};
}


#endif //SWIFTPR_PLATEINFO_H

+ 35
- 0
Prj-iOS/lpr/lpr/Source/include/PlateSegmentation.h View File

@@ -0,0 +1,35 @@
#ifndef SWIFTPR_PLATESEGMENTATION_H
#define SWIFTPR_PLATESEGMENTATION_H

#include "opencv2/opencv.hpp"
#include <opencv2/dnn.hpp>
#include "PlateInfo.h"

namespace pr{


class PlateSegmentation{
public:
const int PLATE_NORMAL = 6;
const int PLATE_NORMAL_GREEN = 7;
const int DEFAULT_WIDTH = 20;
PlateSegmentation(std::string phototxt,std::string caffemodel);
PlateSegmentation(){}
void segmentPlatePipline(PlateInfo &plateInfo,int stride,std::vector<cv::Rect> &Char_rects);

void segmentPlateBySlidingWindows(cv::Mat &plateImage,int windowsWidth,int stride,cv::Mat &respones);
void templateMatchFinding(const cv::Mat &respones,int windowsWidth,std::pair<float,std::vector<int> > &candidatePts);
void refineRegion(cv::Mat &plateImage,const std::vector<int> &candidatePts,const int padding,std::vector<cv::Rect> &rects);
void ExtractRegions(PlateInfo &plateInfo,std::vector<cv::Rect> &rects);
cv::Mat classifyResponse(const cv::Mat &cropped);
private:
cv::dnn::Net net;


// RefineRegion()

};

}//namespace pr

#endif //SWIFTPR_PLATESEGMENTATION_H

+ 23
- 0
Prj-iOS/lpr/lpr/Source/include/Recognizer.h View File

@@ -0,0 +1,23 @@
//
// Created by 庾金科 on 20/10/2017.
//


#ifndef SWIFTPR_RECOGNIZER_H
#define SWIFTPR_RECOGNIZER_H

#include <opencv2/dnn.hpp>
#include "PlateInfo.h"
namespace pr{
typedef cv::Mat label;
class GeneralRecognizer{
public:
virtual label recognizeCharacter(cv::Mat character) = 0;
// virtual cv::Mat SegmentationFreeForSinglePlate(cv::Mat plate) = 0;
void SegmentBasedSequenceRecognition(PlateInfo &plateinfo);
void SegmentationFreeSequenceRecognition(PlateInfo &plateInfo);

};

}
#endif //SWIFTPR_RECOGNIZER_H

+ 28
- 0
Prj-iOS/lpr/lpr/Source/include/SegmentationFreeRecognizer.h View File

@@ -0,0 +1,28 @@
//
// Created by 庾金科 on 28/11/2017.
//

#ifndef SWIFTPR_SEGMENTATIONFREERECOGNIZER_H
#define SWIFTPR_SEGMENTATIONFREERECOGNIZER_H

#include "Recognizer.h"
namespace pr{


class SegmentationFreeRecognizer{
public:
const int CHAR_INPUT_W = 14;
const int CHAR_INPUT_H = 30;
const int CHAR_LEN = 84;

SegmentationFreeRecognizer(std::string prototxt,std::string caffemodel);
std::pair<std::string,float> SegmentationFreeForSinglePlate(cv::Mat plate,std::vector<std::string> mapping_table);


private:
cv::dnn::Net net;

};

}
#endif //SWIFTPR_SEGMENTATIONFREERECOGNIZER_H

+ 107
- 0
Prj-iOS/lpr/lpr/Source/include/niBlackThreshold.h View File

@@ -0,0 +1,107 @@
//
// Created by 庾金科 on 26/10/2017.
//

#ifndef SWIFTPR_NIBLACKTHRESHOLD_H
#define SWIFTPR_NIBLACKTHRESHOLD_H


#include <opencv2/opencv.hpp>
using namespace cv;

enum LocalBinarizationMethods{
BINARIZATION_NIBLACK = 0, //!< Classic Niblack binarization. See @cite Niblack1985 .
BINARIZATION_SAUVOLA = 1, //!< Sauvola's technique. See @cite Sauvola1997 .
BINARIZATION_WOLF = 2, //!< Wolf's technique. See @cite Wolf2004 .
BINARIZATION_NICK = 3 //!< NICK technique. See @cite Khurshid2009 .
};


void niBlackThreshold( InputArray _src, OutputArray _dst, double maxValue,
int type, int blockSize, double k, int binarizationMethod )
{
// Input grayscale image
Mat src = _src.getMat();
CV_Assert(src.channels() == 1);
CV_Assert(blockSize % 2 == 1 && blockSize > 1);
if (binarizationMethod == BINARIZATION_SAUVOLA) {
CV_Assert(src.depth() == CV_8U);
}
type &= THRESH_MASK;
// Compute local threshold (T = mean + k * stddev)
// using mean and standard deviation in the neighborhood of each pixel
// (intermediate calculations are done with floating-point precision)
Mat test;
Mat thresh;
{
// note that: Var[X] = E[X^2] - E[X]^2
Mat mean, sqmean, variance, stddev, sqrtVarianceMeanSum;
double srcMin, stddevMax;
boxFilter(src, mean, CV_32F, Size(blockSize, blockSize),
Point(-1,-1), true, BORDER_REPLICATE);
sqrBoxFilter(src, sqmean, CV_32F, Size(blockSize, blockSize),
Point(-1,-1), true, BORDER_REPLICATE);
variance = sqmean - mean.mul(mean);
sqrt(variance, stddev);
switch (binarizationMethod)
{
case BINARIZATION_NIBLACK:
thresh = mean + stddev * static_cast<float>(k);

break;
case BINARIZATION_SAUVOLA:
thresh = mean.mul(1. + static_cast<float>(k) * (stddev / 128.0 - 1.));
break;
case BINARIZATION_WOLF:
minMaxIdx(src, &srcMin,NULL);
minMaxIdx(stddev, NULL, &stddevMax);
thresh = mean - static_cast<float>(k) * (mean - srcMin - stddev.mul(mean - srcMin) / stddevMax);
break;
case BINARIZATION_NICK:
sqrt(variance + sqmean, sqrtVarianceMeanSum);
thresh = mean + static_cast<float>(k) * sqrtVarianceMeanSum;
break;
default:
CV_Error( CV_StsBadArg, "Unknown binarization method" );
break;
}
thresh.convertTo(thresh, src.depth());

thresh.convertTo(test, src.depth());
//
// cv::imshow("imagex",test);
// cv::waitKey(0);

}
// Prepare output image
_dst.create(src.size(), src.type());
Mat dst = _dst.getMat();
CV_Assert(src.data != dst.data); // no inplace processing
// Apply thresholding: ( pixel > threshold ) ? foreground : background
Mat mask;
switch (type)
{
case THRESH_BINARY: // dst = (src > thresh) ? maxval : 0
case THRESH_BINARY_INV: // dst = (src > thresh) ? 0 : maxval
compare(src, thresh, mask, (type == THRESH_BINARY ? CMP_GT : CMP_LE));
dst.setTo(0);
dst.setTo(maxValue, mask);
break;
case THRESH_TRUNC: // dst = (src > thresh) ? thresh : src
compare(src, thresh, mask, CMP_GT);
src.copyTo(dst);
thresh.copyTo(dst, mask);
break;
case THRESH_TOZERO: // dst = (src > thresh) ? src : 0
case THRESH_TOZERO_INV: // dst = (src > thresh) ? 0 : src
compare(src, thresh, mask, (type == THRESH_TOZERO ? CMP_GT : CMP_LE));
dst.setTo(0);
src.copyTo(dst, mask);
break;
default:
CV_Error( CV_StsBadArg, "Unknown threshold type" );
break;
}
}

#endif //SWIFTPR_NIBLACKTHRESHOLD_H

+ 19
- 0
Prj-iOS/lpr/lpr/Source/src/CNNRecognizer.cpp View File

@@ -0,0 +1,19 @@
//
// Created by Jack Yu on 21/10/2017.
//

#include "CNNRecognizer.h"

namespace pr{
CNNRecognizer::CNNRecognizer(std::string prototxt,std::string caffemodel){
net = cv::dnn::readNetFromCaffe(prototxt, caffemodel);
}

label CNNRecognizer::recognizeCharacter(cv::Mat charImage){
if(charImage.channels()== 3)
cv::cvtColor(charImage,charImage,cv::COLOR_BGR2GRAY);
cv::Mat inputBlob = cv::dnn::blobFromImage(charImage, 1/255.0, cv::Size(CHAR_INPUT_W,CHAR_INPUT_H), cv::Scalar(0,0,0),false);
net.setInput(inputBlob,"data");
return net.forward();
}
}

+ 108
- 0
Prj-iOS/lpr/lpr/Source/src/FastDeskew.cpp View File

@@ -0,0 +1,108 @@
//
// Created by Jack Yu on 02/10/2017.
//



#include "FastDeskew.h"

namespace pr{
const int ANGLE_MIN = 30 ;
const int ANGLE_MAX = 150 ;
const int PLATE_H = 36;
const int PLATE_W = 136;
int angle(float x,float y)
{
return atan2(x,y)*180/3.1415;
}

std::vector<float> avgfilter(std::vector<float> angle_list,int windowsSize) {
std::vector<float> angle_list_filtered(angle_list.size() - windowsSize + 1);
for (int i = 0; i < angle_list.size() - windowsSize + 1; i++) {
float avg = 0.00f;
for (int j = 0; j < windowsSize; j++) {
avg += angle_list[i + j];
}
avg = avg / windowsSize;
angle_list_filtered[i] = avg;
}

return angle_list_filtered;
}


void drawHist(std::vector<float> seq){
cv::Mat image(300,seq.size(),CV_8U);
image.setTo(0);

for(int i = 0;i<seq.size();i++)
{
float l = *std::max_element(seq.begin(),seq.end());

int p = int(float(seq[i])/l*300);

cv::line(image,cv::Point(i,300),cv::Point(i,300-p),cv::Scalar(255,255,255));
}
cv::imshow("vis",image);
}

cv::Mat correctPlateImage(cv::Mat skewPlate,float angle,float maxAngle)
{
cv::Mat dst;
cv::Size size_o(skewPlate.cols,skewPlate.rows);
int extend_padding = 0;
extend_padding = static_cast<int>(skewPlate.rows*tan(cv::abs(angle)/180* 3.14) );
cv::Size size(skewPlate.cols + extend_padding ,skewPlate.rows);
float interval = abs(sin((angle /180) * 3.14)* skewPlate.rows);
cv::Point2f pts1[4] = {cv::Point2f(0,0),cv::Point2f(0,size_o.height),cv::Point2f(size_o.width,0),cv::Point2f(size_o.width,size_o.height)};
if(angle>0) {
cv::Point2f pts2[4] = {cv::Point2f(interval, 0), cv::Point2f(0, size_o.height),
cv::Point2f(size_o.width, 0), cv::Point2f(size_o.width - interval, size_o.height)};
cv::Mat M = cv::getPerspectiveTransform(pts1,pts2);
cv::warpPerspective(skewPlate,dst,M,size);
}
else {
cv::Point2f pts2[4] = {cv::Point2f(0, 0), cv::Point2f(interval, size_o.height), cv::Point2f(size_o.width-interval, 0),
cv::Point2f(size_o.width, size_o.height)};
cv::Mat M = cv::getPerspectiveTransform(pts1,pts2);
cv::warpPerspective(skewPlate,dst,M,size,cv::INTER_CUBIC);
}
return dst;
}
cv::Mat fastdeskew(cv::Mat skewImage,int blockSize){
const int FILTER_WINDOWS_SIZE = 5;
std::vector<float> angle_list(180);
memset(angle_list.data(),0,angle_list.size()*sizeof(int));
cv::Mat bak;
skewImage.copyTo(bak);
if(skewImage.channels() == 3)
cv::cvtColor(skewImage,skewImage,cv::COLOR_RGB2GRAY);
if(skewImage.channels() == 1)
{
cv::Mat eigen;
cv::cornerEigenValsAndVecs(skewImage,eigen,blockSize,5);
for( int j = 0; j < skewImage.rows; j+=blockSize )
{ for( int i = 0; i < skewImage.cols; i+=blockSize )
{
float x2 = eigen.at<cv::Vec6f>(j, i)[4];
float y2 = eigen.at<cv::Vec6f>(j, i)[5];
int angle_cell = angle(x2,y2);
angle_list[(angle_cell + 180)%180]+=1.0;
}
}
}
std::vector<float> filtered = avgfilter(angle_list,5);
int maxPos = std::max_element(filtered.begin(),filtered.end()) - filtered.begin() + FILTER_WINDOWS_SIZE/2;
if(maxPos>ANGLE_MAX)
maxPos = (-maxPos+90+180)%180;
if(maxPos<ANGLE_MIN)
maxPos-=90;
maxPos=90-maxPos;
cv::Mat deskewed = correctPlateImage(bak, static_cast<float>(maxPos),60.0f);
return deskewed;
}



}//namespace pr

+ 170
- 0
Prj-iOS/lpr/lpr/Source/src/FineMapping.cpp View File

@@ -0,0 +1,170 @@
#include "FineMapping.h"
namespace pr{

const int FINEMAPPING_H = 60 ;
const int FINEMAPPING_W = 140;
const int PADDING_UP_DOWN = 30;
void drawRect(cv::Mat image,cv::Rect rect)
{
cv::Point p1(rect.x,rect.y);
cv::Point p2(rect.x+rect.width,rect.y+rect.height);
cv::rectangle(image,p1,p2,cv::Scalar(0,255,0),1);
}


FineMapping::FineMapping(std::string prototxt,std::string caffemodel) {
net = cv::dnn::readNetFromCaffe(prototxt, caffemodel);

}

cv::Mat FineMapping::FineMappingHorizon(cv::Mat FinedVertical,int leftPadding,int rightPadding)
{

// if(FinedVertical.channels()==1)
// cv::cvtColor(FinedVertical,FinedVertical,cv::COLOR_GRAY2BGR);
cv::Mat inputBlob = cv::dnn::blobFromImage(FinedVertical, 1/255.0, cv::Size(66,16),
cv::Scalar(0,0,0),false);

net.setInput(inputBlob,"data");
cv::Mat prob = net.forward();
int front = static_cast<int>(prob.at<float>(0,0)*FinedVertical.cols);
int back = static_cast<int>(prob.at<float>(0,1)*FinedVertical.cols);
front -= leftPadding ;
if(front<0) front = 0;
back +=rightPadding;
if(back>FinedVertical.cols-1) back=FinedVertical.cols - 1;
cv::Mat cropped = FinedVertical.colRange(front,back).clone();
return cropped;


}
std::pair<int,int> FitLineRansac(std::vector<cv::Point> pts,int zeroadd = 0 )
{
std::pair<int,int> res;
if(pts.size()>2)
{
cv::Vec4f line;
cv::fitLine(pts,line,cv::DIST_HUBER,0,0.01,0.01);
float vx = line[0];
float vy = line[1];
float x = line[2];
float y = line[3];
int lefty = static_cast<int>((-x * vy / vx) + y);
int righty = static_cast<int>(((136- x) * vy / vx) + y);
res.first = lefty+PADDING_UP_DOWN+zeroadd;
res.second = righty+PADDING_UP_DOWN+zeroadd;
return res;
}
res.first = zeroadd;
res.second = zeroadd;
return res;
}

cv::Mat FineMapping::FineMappingVertical(cv::Mat InputProposal,int sliceNum,int upper,int lower,int windows_size){
cv::Mat PreInputProposal;
cv::Mat proposal;
cv::resize(InputProposal,PreInputProposal,cv::Size(FINEMAPPING_W,FINEMAPPING_H));
if(InputProposal.channels() == 3)
cv::cvtColor(PreInputProposal,proposal,cv::COLOR_BGR2GRAY);
else
PreInputProposal.copyTo(proposal);
// this will improve some sen
cv::Mat kernal = cv::getStructuringElement(cv::MORPH_ELLIPSE,cv::Size(1,3));
float diff = static_cast<float>(upper-lower);
diff/=static_cast<float>(sliceNum-1);
cv::Mat binary_adaptive;
std::vector<cv::Point> line_upper;
std::vector<cv::Point> line_lower;
int contours_nums=0;
for(int i = 0 ; i < sliceNum ; i++)
{
std::vector<std::vector<cv::Point> > contours;
float k =lower + i*diff;
cv::adaptiveThreshold(proposal,binary_adaptive,255,cv::ADAPTIVE_THRESH_MEAN_C,cv::THRESH_BINARY,windows_size,k);
cv::Mat draw;
binary_adaptive.copyTo(draw);
cv::findContours(binary_adaptive,contours,cv::RETR_EXTERNAL,cv::CHAIN_APPROX_SIMPLE);
for(auto contour: contours)
{
cv::Rect bdbox =cv::boundingRect(contour);
float lwRatio = bdbox.height/static_cast<float>(bdbox.width);
int bdboxAera = bdbox.width*bdbox.height;
if (( lwRatio>0.7&&bdbox.width*bdbox.height>100 && bdboxAera<300)
|| (lwRatio>3.0 && bdboxAera<100 && bdboxAera>10))
{
cv::Point p1(bdbox.x, bdbox.y);
cv::Point p2(bdbox.x + bdbox.width, bdbox.y + bdbox.height);
line_upper.push_back(p1);
line_lower.push_back(p2);
contours_nums+=1;
}
}
}
if(contours_nums<41)
{
cv::bitwise_not(InputProposal,InputProposal);
cv::Mat kernal = cv::getStructuringElement(cv::MORPH_ELLIPSE,cv::Size(1,5));
cv::Mat bak;
cv::resize(InputProposal,bak,cv::Size(FINEMAPPING_W,FINEMAPPING_H));
cv::erode(bak,bak,kernal);
if(InputProposal.channels() == 3)
cv::cvtColor(bak,proposal,cv::COLOR_BGR2GRAY);
else
proposal = bak;
int contours_nums=0;
for(int i = 0 ; i < sliceNum ; i++)
{
std::vector<std::vector<cv::Point> > contours;
float k =lower + i*diff;
cv::adaptiveThreshold(proposal,binary_adaptive,255,cv::ADAPTIVE_THRESH_MEAN_C,cv::THRESH_BINARY,windows_size,k);
cv::Mat draw;
binary_adaptive.copyTo(draw);
cv::findContours(binary_adaptive,contours,cv::RETR_EXTERNAL,cv::CHAIN_APPROX_SIMPLE);
for(auto contour: contours)
{
cv::Rect bdbox =cv::boundingRect(contour);
float lwRatio = bdbox.height/static_cast<float>(bdbox.width);
int bdboxAera = bdbox.width*bdbox.height;
if (( lwRatio>0.7&&bdbox.width*bdbox.height>120 && bdboxAera<300)
|| (lwRatio>3.0 && bdboxAera<100 && bdboxAera>10))
{

cv::Point p1(bdbox.x, bdbox.y);
cv::Point p2(bdbox.x + bdbox.width, bdbox.y + bdbox.height);
line_upper.push_back(p1);
line_lower.push_back(p2);
contours_nums+=1;
}
}
}
}
cv::Mat rgb;
cv::copyMakeBorder(PreInputProposal, rgb, PADDING_UP_DOWN, PADDING_UP_DOWN, 0, 0, cv::BORDER_REPLICATE);
std::pair<int, int> A;
std::pair<int, int> B;
A = FitLineRansac(line_upper, -1);
B = FitLineRansac(line_lower, 1);
int leftyB = A.first;
int rightyB = A.second;
int leftyA = B.first;
int rightyA = B.second;
int cols = rgb.cols;
int rows = rgb.rows;
std::vector<cv::Point2f> corners(4);
corners[0] = cv::Point2f(cols - 1, rightyA);
corners[1] = cv::Point2f(0, leftyA);
corners[2] = cv::Point2f(cols - 1, rightyB);
corners[3] = cv::Point2f(0, leftyB);
std::vector<cv::Point2f> corners_trans(4);
corners_trans[0] = cv::Point2f(136, 36);
corners_trans[1] = cv::Point2f(0, 36);
corners_trans[2] = cv::Point2f(136, 0);
corners_trans[3] = cv::Point2f(0, 0);
cv::Mat transform = cv::getPerspectiveTransform(corners, corners_trans);
cv::Mat quad = cv::Mat::zeros(36, 136, CV_8UC3);
cv::warpPerspective(rgb, quad, transform, quad.size());
return quad;
}
}



+ 103
- 0
Prj-iOS/lpr/lpr/Source/src/Pipeline.cpp View File

@@ -0,0 +1,103 @@
//
// Created by 庾金科 on 23/10/2017.
//

#include "Pipeline.h"


namespace pr {

std::string str_code[]={"京", "沪", "津", "渝", "冀", "晋", "蒙", "辽", "吉", "黑", "苏", "浙", "皖", "闽", "赣", "鲁", "豫", "鄂", "湘", "粤", "桂", "琼", "川", "贵", "云", "藏", "陕", "甘", "青", "宁", "新", "0", "1", "2", "3", "4", "5", "6", "7", "8", "9", "A", "B", "C", "D", "E", "F", "G", "H", "J", "K", "L", "M", "N", "P", "Q", "R", "S", "T", "U", "V", "W", "X", "Y", "Z","港","学","使","警","澳","挂","军","北","南","广","沈","兰","成","济","海","民","航","空"};
const std::vector<std::string> CH_PLATE_CODE(str_code, str_code+83);


const int HorizontalPadding = 4;
PipelinePR::PipelinePR(std::string detector_filename,
std::string finemapping_prototxt, std::string finemapping_caffemodel,
std::string segmentation_prototxt, std::string segmentation_caffemodel,
std::string charRecognization_proto, std::string charRecognization_caffemodel,
std::string segmentationfree_proto,std::string segmentationfree_caffemodel) {
plateDetection = new PlateDetection(detector_filename);
fineMapping = new FineMapping(finemapping_prototxt, finemapping_caffemodel);
plateSegmentation = new PlateSegmentation(segmentation_prototxt, segmentation_caffemodel);
generalRecognizer = new CNNRecognizer(charRecognization_proto, charRecognization_caffemodel);
segmentationFreeRecognizer = new SegmentationFreeRecognizer(segmentationfree_proto,segmentationfree_caffemodel);

}

PipelinePR::~PipelinePR() {

delete plateDetection;
delete fineMapping;
delete plateSegmentation;
delete (CNNRecognizer *)generalRecognizer;
delete segmentationFreeRecognizer;


}

std::vector<PlateInfo> PipelinePR:: RunPiplineAsImage(cv::Mat plateImage,int method) {
std::vector<PlateInfo> results;
std::vector<pr::PlateInfo> plates;
plateDetection->plateDetectionRough(plateImage,plates,36,700);

for (pr::PlateInfo plateinfo:plates) {

cv::Mat image_finemapping = plateinfo.getPlateImage();
image_finemapping = fineMapping->FineMappingVertical(image_finemapping);
image_finemapping = pr::fastdeskew(image_finemapping, 5);



//Segmentation-based

if(method==SEGMENTATION_BASED_METHOD)
{
image_finemapping = fineMapping->FineMappingHorizon(image_finemapping, 2, HorizontalPadding);
cv::resize(image_finemapping, image_finemapping, cv::Size(136+HorizontalPadding, 36));
// cv::imshow("image_finemapping",image_finemapping);
// cv::waitKey(0);
plateinfo.setPlateImage(image_finemapping);
std::vector<cv::Rect> rects;

plateSegmentation->segmentPlatePipline(plateinfo, 1, rects);
plateSegmentation->ExtractRegions(plateinfo, rects);
cv::copyMakeBorder(image_finemapping, image_finemapping, 0, 0, 0, 20, cv::BORDER_REPLICATE);
plateinfo.setPlateImage(image_finemapping);
generalRecognizer->SegmentBasedSequenceRecognition(plateinfo);
plateinfo.decodePlateNormal(pr::CH_PLATE_CODE);

}
//Segmentation-free
else if(method==SEGMENTATION_FREE_METHOD)
{

image_finemapping = fineMapping->FineMappingHorizon(image_finemapping, 4, HorizontalPadding+3);

cv::resize(image_finemapping, image_finemapping, cv::Size(136+HorizontalPadding, 36));
// cv::imwrite("./test.png",image_finemapping);
// cv::imshow("image_finemapping",image_finemapping);
// cv::waitKey(0);
plateinfo.setPlateImage(image_finemapping);
// std::vector<cv::Rect> rects;

std::pair<std::string,float> res = segmentationFreeRecognizer->SegmentationFreeForSinglePlate(plateinfo.getPlateImage(),pr::CH_PLATE_CODE);
plateinfo.confidence = res.second;
plateinfo.setPlateName(res.first);
}



results.push_back(plateinfo);
}

// for (auto str:results) {
// std::cout << str << std::endl;
// }
return results;

}//namespace pr



}

+ 32
- 0
Prj-iOS/lpr/lpr/Source/src/PlateDetection.cpp View File

@@ -0,0 +1,32 @@
#include "PlateDetection.h"
#include "util.h"
namespace pr{
PlateDetection::PlateDetection(std::string filename_cascade){
cascade.load(filename_cascade);

};
void PlateDetection::plateDetectionRough(cv::Mat InputImage,std::vector<pr::PlateInfo> &plateInfos,int min_w,int max_w){
cv::Mat processImage;
cv::cvtColor(InputImage,processImage,cv::COLOR_BGR2GRAY);
std::vector<cv::Rect> platesRegions;
cv::Size minSize(min_w,min_w/4);
cv::Size maxSize(max_w,max_w/4);
cascade.detectMultiScale( processImage, platesRegions,
1.1, 3, cv::CASCADE_SCALE_IMAGE,minSize,maxSize);
for(auto plate:platesRegions)
{
int zeroadd_w = static_cast<int>(plate.width*0.30);
int zeroadd_h = static_cast<int>(plate.height*2);
int zeroadd_x = static_cast<int>(plate.width*0.15);
int zeroadd_y = static_cast<int>(plate.height*1);
plate.x-=zeroadd_x;
plate.y-=zeroadd_y;
plate.height += zeroadd_h;
plate.width += zeroadd_w;
cv::Mat plateImage = util::cropFromImage(InputImage,plate);
PlateInfo plateInfo(plateImage,plate);
plateInfos.push_back(plateInfo);

}
}
}//namespace pr

+ 404
- 0
Prj-iOS/lpr/lpr/Source/src/PlateSegmentation.cpp View File

@@ -0,0 +1,404 @@
//
// Created by Jack Yu on 16/10/2017.
//

#include "PlateSegmentation.h"
#include "niBlackThreshold.h"


//#define DEBUG
namespace pr{

PlateSegmentation::PlateSegmentation(std::string prototxt,std::string caffemodel) {
net = cv::dnn::readNetFromCaffe(prototxt, caffemodel);
}
cv::Mat PlateSegmentation::classifyResponse(const cv::Mat &cropped){
cv::Mat inputBlob = cv::dnn::blobFromImage(cropped, 1/255.0, cv::Size(22,22), cv::Scalar(0,0,0),false);
net.setInput(inputBlob,"data");
return net.forward();
}

void drawHist(float* seq,int size,const char* name){
cv::Mat image(300,size,CV_8U);
image.setTo(0);
float* start =seq;
float* end = seq+size;
float l = *std::max_element(start,end);
for(int i = 0;i<size;i++)
{
int p = int(float(seq[i])/l*300);
cv::line(image,cv::Point(i,300),cv::Point(i,300-p),cv::Scalar(255,255,255));
}
cv::resize(image,image,cv::Size(600,100));
cv::imshow(name,image);
}

inline void computeSafeMargin(int &val,const int &rows){
val = std::min(val,rows);
val = std::max(val,0);
}

cv::Rect boxFromCenter(const cv::Point center,int left,int right,int top,int bottom,cv::Size bdSize)
{
cv::Point p1(center.x - left ,center.y - top);
cv::Point p2( center.x + right, center.y + bottom);
p1.x = std::max(0,p1.x);
p1.y = std::max(0,p1.y);
p2.x = std::min(p2.x,bdSize.width-1);
p2.y = std::min(p2.y,bdSize.height-1);
cv::Rect rect(p1,p2);
return rect;
}

cv::Rect boxPadding(cv::Rect rect,int left,int right,int top,int bottom,cv::Size bdSize)
{

cv::Point center(rect.x+(rect.width>>1),rect.y + (rect.height>>1));
int rebuildLeft = (rect.width>>1 )+ left;
int rebuildRight = (rect.width>>1 )+ right;
int rebuildTop = (rect.height>>1 )+ top;
int rebuildBottom = (rect.height>>1 )+ bottom;
return boxFromCenter(center,rebuildLeft,rebuildRight,rebuildTop,rebuildBottom,bdSize);

}



void PlateSegmentation:: refineRegion(cv::Mat &plateImage,const std::vector<int> &candidatePts,const int padding,std::vector<cv::Rect> &rects){
int w = candidatePts[5] - candidatePts[4];
int cols = plateImage.cols;
int rows = plateImage.rows;
for(int i = 0 ; i < candidatePts.size() ; i++)
{
int left = 0;
int right = 0 ;

if(i == 0 ){
left= candidatePts[i];
right = left+w+padding;
}
else {
left = candidatePts[i] - padding;
right = left + w + padding * 2;
}

computeSafeMargin(right,cols);
computeSafeMargin(left,cols);
cv::Rect roi(left,0,right - left,rows-1);
cv::Mat roiImage;
plateImage(roi).copyTo(roiImage);

if (i>=1)
{

cv::Mat roi_thres;
// cv::threshold(roiImage,roi_thres,0,255,cv::THRESH_OTSU|cv::THRESH_BINARY);

niBlackThreshold(roiImage,roi_thres,255,cv::THRESH_BINARY,15,0.27,BINARIZATION_NIBLACK);

std::vector<std::vector<cv::Point> > contours;
cv::findContours(roi_thres,contours,cv::RETR_LIST,cv::CHAIN_APPROX_SIMPLE);
cv::Point boxCenter(roiImage.cols>>1,roiImage.rows>>1);

cv::Rect final_bdbox;
cv::Point final_center;
int final_dist = INT_MAX;


for(auto contour:contours)
{
cv::Rect bdbox = cv::boundingRect(contour);
cv::Point center(bdbox.x+(bdbox.width>>1),bdbox.y + (bdbox.height>>1));
int dist = (center.x - boxCenter.x)*(center.x - boxCenter.x);
if(dist<final_dist and bdbox.height > rows>>1)
{ final_dist =dist;
final_center = center;
final_bdbox = bdbox;
}
}

//rebuild box
if(final_bdbox.height/ static_cast<float>(final_bdbox.width) > 3.5 && final_bdbox.width*final_bdbox.height<10)
final_bdbox = boxFromCenter(final_center,8,8,(rows>>1)-3 , (rows>>1) - 2,roiImage.size());
else {
if(i == candidatePts.size()-1)
final_bdbox = boxPadding(final_bdbox, padding/2, padding, padding/2, padding/2, roiImage.size());
else
final_bdbox = boxPadding(final_bdbox, padding, padding, padding, padding, roiImage.size());


// std::cout<<final_bdbox<<std::endl;
// std::cout<<roiImage.size()<<std::endl;
//#ifdef DEBUG
// cv::imshow("char_thres",roi_thres);
//
// cv::imshow("char",roiImage(final_bdbox));
// cv::waitKey(0);
//#endif


}


final_bdbox.x += left;

rects.push_back(final_bdbox);
//

}
else
{
rects.push_back(roi);
}

// else
// {
//
// }

// cv::GaussianBlur(roiImage,roiImage,cv::Size(7,7),3);
//
// cv::imshow("image",roiImage);
// cv::waitKey(0);


}



}
void avgfilter(float *angle_list,int size,int windowsSize) {
float *filterd = new float[size];
for(int i = 0 ; i < size ; i++) filterd [i] = angle_list[i];
// memcpy(filterd,angle_list,size);

cv::Mat kernal_gaussian = cv::getGaussianKernel(windowsSize,3,CV_32F);
float *kernal = (float*)kernal_gaussian.data;
// kernal+=windowsSize;
int r = windowsSize/2;




for (int i = 0; i < size; i++) {
float avg = 0.00f;
for (int j = 0; j < windowsSize; j++) {
if(i+j-r>0&&i+j+r<size-1)
avg += filterd[i + j-r]*kernal[j];
}
// avg = avg / windowsSize;
angle_list[i] = avg;

}

delete [] filterd;
}

void PlateSegmentation::templateMatchFinding(const cv::Mat &respones,int windowsWidth,std::pair<float,std::vector<int> > &candidatePts){
int rows = respones.rows;
int cols = respones.cols;



float *data = (float*)respones.data;
float *engNum_prob = data;
float *false_prob = data+cols;
float *ch_prob = data+cols*2;

avgfilter(engNum_prob,cols,5);
avgfilter(false_prob,cols,5);
// avgfilter(ch_prob,cols,5);
std::vector<int> candidate_pts(7);
//#ifdef DEBUG
// drawHist(engNum_prob,cols,"engNum_prob");
// drawHist(false_prob,cols,"false_prob");
// drawHist(ch_prob,cols,"ch_prob");
// cv::waitKey(0);
//#endif




int cp_list[7];
float loss_selected = -10;

for(int start = 0 ; start < 20 ; start+=2)
for(int width = windowsWidth-5; width < windowsWidth+5 ; width++ ){
for(int interval = windowsWidth/2; interval < windowsWidth; interval++)
{
int cp1_ch = start;
int cp2_p0 = cp1_ch+ width;
int cp3_p1 = cp2_p0+ width + interval;
int cp4_p2 = cp3_p1 + width;
int cp5_p3 = cp4_p2 + width+1;
int cp6_p4 = cp5_p3 + width+2;
int cp7_p5= cp6_p4+ width+2;

int md1 = (cp1_ch+cp2_p0)>>1;
int md2 = (cp2_p0+cp3_p1)>>1;
int md3 = (cp3_p1+cp4_p2)>>1;
int md4 = (cp4_p2+cp5_p3)>>1;
int md5 = (cp5_p3+cp6_p4)>>1;
int md6 = (cp6_p4+cp7_p5)>>1;




if(cp7_p5>=cols)
continue;
// float loss = ch_prob[cp1_ch]+
// engNum_prob[cp2_p0] +engNum_prob[cp3_p1]+engNum_prob[cp4_p2]+engNum_prob[cp5_p3]+engNum_prob[cp6_p4] +engNum_prob[cp7_p5]
// + (false_prob[md2]+false_prob[md3]+false_prob[md4]+false_prob[md5]+false_prob[md5] + false_prob[md6]);
float loss = ch_prob[cp1_ch]*3 -(false_prob[cp3_p1]+false_prob[cp4_p2]+false_prob[cp5_p3]+false_prob[cp6_p4]+false_prob[cp7_p5]);

if(loss>loss_selected)
{
loss_selected = loss;
cp_list[0]= cp1_ch;
cp_list[1]= cp2_p0;
cp_list[2]= cp3_p1;
cp_list[3]= cp4_p2;
cp_list[4]= cp5_p3;
cp_list[5]= cp6_p4;
cp_list[6]= cp7_p5;
}
}
}
candidate_pts[0] = cp_list[0];
candidate_pts[1] = cp_list[1];
candidate_pts[2] = cp_list[2];
candidate_pts[3] = cp_list[3];
candidate_pts[4] = cp_list[4];
candidate_pts[5] = cp_list[5];
candidate_pts[6] = cp_list[6];

candidatePts.first = loss_selected;
candidatePts.second = candidate_pts;

};


void PlateSegmentation::segmentPlateBySlidingWindows(cv::Mat &plateImage,int windowsWidth,int stride,cv::Mat &respones){


// cv::resize(plateImage,plateImage,cv::Size(136,36));

cv::Mat plateImageGray;
cv::cvtColor(plateImage,plateImageGray,cv::COLOR_BGR2GRAY);
int padding = plateImage.cols-136 ;
// int padding = 0 ;
int height = plateImage.rows - 1;
int width = plateImage.cols - 1 - padding;
for(int i = 0 ; i < width - windowsWidth +1 ; i +=stride)
{
cv::Rect roi(i,0,windowsWidth,height);
cv::Mat roiImage = plateImageGray(roi);
cv::Mat response = classifyResponse(roiImage);
respones.push_back(response);
}




respones = respones.t();
// std::pair<float,std::vector<int>> images ;
//
//
// std::cout<<images.first<<" ";
// for(int i = 0 ; i < images.second.size() ; i++)
// {
// std::cout<<images.second[i]<<" ";
//// cv::line(plateImageGray,cv::Point(images.second[i],0),cv::Point(images.second[i],36),cv::Scalar(255,255,255),1); //DEBUG
// }

// int w = images.second[5] - images.second[4];

// cv::line(plateImageGray,cv::Point(images.second[5]+w,0),cv::Point(images.second[5]+w,36),cv::Scalar(255,255,255),1); //DEBUG
// cv::line(plateImageGray,cv::Point(images.second[5]+2*w,0),cv::Point(images.second[5]+2*w,36),cv::Scalar(255,255,255),1); //DEBUG


// RefineRegion(plateImageGray,images.second,5);

// std::cout<<w<<std::endl;

// std::cout<<<<std::endl;

// cv::resize(plateImageGray,plateImageGray,cv::Size(600,100));



}

// void filterGaussian(cv::Mat &respones,float sigma){
//
// }


void PlateSegmentation::segmentPlatePipline(PlateInfo &plateInfo,int stride,std::vector<cv::Rect> &Char_rects){
cv::Mat plateImage = plateInfo.getPlateImage(); // get src image .
cv::Mat plateImageGray;
cv::cvtColor(plateImage,plateImageGray,cv::COLOR_BGR2GRAY);
//do binarzation
//
std::pair<float,std::vector<int> > sections ; // segment points variables .

cv::Mat respones; //three response of every sub region from origin image .
segmentPlateBySlidingWindows(plateImage,DEFAULT_WIDTH,1,respones);
templateMatchFinding(respones,DEFAULT_WIDTH/stride,sections);
for(int i = 0; i < sections.second.size() ; i++)
{
sections.second[i]*=stride;

}

// std::cout<<sections<<std::endl;

refineRegion(plateImageGray,sections.second,5,Char_rects);
//#ifdef DEBUG
// for(int i = 0 ; i < sections.second.size() ; i++)
// {
// std::cout<<sections.second[i]<<" ";
// cv::line(plateImageGray,cv::Point(sections.second[i],0),cv::Point(sections.second[i],36),cv::Scalar(255,255,255),1); //DEBUG
// }
// cv::imshow("plate",plateImageGray);
// cv::waitKey(0);
//#endif
// cv::waitKey(0);

}

void PlateSegmentation::ExtractRegions(PlateInfo &plateInfo,std::vector<cv::Rect> &rects){
cv::Mat plateImage = plateInfo.getPlateImage();
for(int i = 0 ; i < rects.size() ; i++){
cv::Mat charImage;
plateImage(rects[i]).copyTo(charImage);
if(charImage.channels())
cv::cvtColor(charImage,charImage,cv::COLOR_BGR2GRAY);
// cv::imshow("image",charImage);
// cv::waitKey(0);
cv::equalizeHist(charImage,charImage);
//

//


std::pair<CharType,cv::Mat> char_instance;
if(i == 0 ){

char_instance.first = CHINESE;


} else if(i == 1){
char_instance.first = LETTER;
}
else{
char_instance.first = LETTER_NUMS;
}
char_instance.second = charImage;
plateInfo.appendPlateChar(char_instance);

}

}

}//namespace pr

+ 23
- 0
Prj-iOS/lpr/lpr/Source/src/Recognizer.cpp View File

@@ -0,0 +1,23 @@
//
// Created by Jack Yu on 22/10/2017.
//

#include "Recognizer.h"

namespace pr{
void GeneralRecognizer::SegmentBasedSequenceRecognition(PlateInfo &plateinfo){
for(auto char_instance:plateinfo.plateChars)
{
std::pair<CharType,cv::Mat> res;
if(char_instance.second.rows*char_instance.second.cols>40) {
label code_table = recognizeCharacter(char_instance.second);
res.first = char_instance.first;
code_table.copyTo(res.second);
plateinfo.appendPlateCoding(res);
} else{
res.first = INVALID;
plateinfo.appendPlateCoding(res);
}
}
}
}

+ 89
- 0
Prj-iOS/lpr/lpr/Source/src/SegmentationFreeRecognizer.cpp View File

@@ -0,0 +1,89 @@
//
// Created by Jack Yu on 28/11/2017.
//
#include "SegmentationFreeRecognizer.h"

namespace pr {
SegmentationFreeRecognizer::SegmentationFreeRecognizer(std::string prototxt, std::string caffemodel) {
net = cv::dnn::readNetFromCaffe(prototxt, caffemodel);
}
inline int judgeCharRange(int id)
{return id<31 || id>63;
}
std::pair<std::string,float> decodeResults(cv::Mat code_table,std::vector<std::string> mapping_table,float thres)
{
cv::MatSize mtsize = code_table.size;
int sequencelength = mtsize[2];
int labellength = mtsize[1];
cv::transpose(code_table.reshape(1,1).reshape(1,labellength),code_table);
std::string name = "";
std::vector<int> seq(sequencelength);
std::vector<std::pair<int,float> > seq_decode_res;
for(int i = 0 ; i < sequencelength; i++) {
float *fstart = ((float *) (code_table.data) + i * labellength );
int id = std::max_element(fstart,fstart+labellength) - fstart;
seq[i] =id;
}

float sum_confidence = 0;
int plate_lenghth = 0 ;
for(int i = 0 ; i< sequencelength ; i++)
{
if(seq[i]!=labellength-1 && (i==0 || seq[i]!=seq[i-1]))
{
float *fstart = ((float *) (code_table.data) + i * labellength );
float confidence = *(fstart+seq[i]);
std::pair<int,float> pair_(seq[i],confidence);
seq_decode_res.push_back(pair_);
}
}
int i = 0;
if (seq_decode_res.size()>1 && judgeCharRange(seq_decode_res[0].first) && judgeCharRange(seq_decode_res[1].first))
{
i=2;
int c = seq_decode_res[0].second<seq_decode_res[1].second;
name+=mapping_table[seq_decode_res[c].first];
sum_confidence+=seq_decode_res[c].second;
plate_lenghth++;
}

for(; i < seq_decode_res.size();i++)
{
name+=mapping_table[seq_decode_res[i].first];
sum_confidence +=seq_decode_res[i].second;
plate_lenghth++;
}
std::pair<std::string,float> res;
res.second = sum_confidence/plate_lenghth;
res.first = name;
return res;

}
std::string decodeResults(cv::Mat code_table,std::vector<std::string> mapping_table)
{
cv::MatSize mtsize = code_table.size;
int sequencelength = mtsize[2];
int labellength = mtsize[1];
cv::transpose(code_table.reshape(1,1).reshape(1,labellength),code_table);
std::string name = "";
std::vector<int> seq(sequencelength);
for(int i = 0 ; i < sequencelength; i++) {
float *fstart = ((float *) (code_table.data) + i * labellength );
int id = std::max_element(fstart,fstart+labellength) - fstart;
seq[i] =id;
}
for(int i = 0 ; i< sequencelength ; i++)
{
if(seq[i]!=labellength-1 && (i==0 || seq[i]!=seq[i-1]))
name+=mapping_table[seq[i]];
}
return name;
}
std::pair<std::string,float> SegmentationFreeRecognizer::SegmentationFreeForSinglePlate(cv::Mat Image,std::vector<std::string> mapping_table) {
cv::transpose(Image,Image);
cv::Mat inputBlob = cv::dnn::blobFromImage(Image, 1 / 255.0, cv::Size(40,160));
net.setInput(inputBlob, "data");
cv::Mat char_prob_mat = net.forward();
return decodeResults(char_prob_mat,mapping_table,0.00);
}
}

+ 67
- 0
Prj-iOS/lpr/lpr/Source/src/util.h View File

@@ -0,0 +1,67 @@
//
// Created by Jack Yu on 04/04/2017.
//

#include <opencv2/opencv.hpp>
namespace util{
template <class T> void swap ( T& a, T& b )
{
T c(a); a=b; b=c;
}
template <class T> T min(T& a,T& b )
{
return a>b?b:a;
}

cv::Mat cropFromImage(const cv::Mat &image,cv::Rect rect){
int w = image.cols-1;
int h = image.rows-1;
rect.x = std::max(rect.x,0);
rect.y = std::max(rect.y,0);
rect.height = std::min(rect.height,h-rect.y);
rect.width = std::min(rect.width,w-rect.x);
cv::Mat temp(rect.size(), image.type());
cv::Mat cropped;
temp = image(rect);
temp.copyTo(cropped);
return cropped;

}

cv::Mat cropBox2dFromImage(const cv::Mat &image,cv::RotatedRect rect)
{
cv::Mat M, rotated, cropped;
float angle = rect.angle;
cv::Size rect_size(rect.size.width,rect.size.height);
if (rect.angle < -45.) {
angle += 90.0;
swap(rect_size.width, rect_size.height);
}
M = cv::getRotationMatrix2D(rect.center, angle, 1.0);
cv::warpAffine(image, rotated, M, image.size(), cv::INTER_CUBIC);
cv::getRectSubPix(rotated, rect_size, rect.center, cropped);
return cropped;
}

cv::Mat calcHist(const cv::Mat &image)
{
cv::Mat hsv;
std::vector<cv::Mat> hsv_planes;
cv::cvtColor(image,hsv,cv::COLOR_BGR2HSV);
cv::split(hsv,hsv_planes);
cv::Mat hist;
int histSize = 256;
float range[] = {0,255};
const float* histRange = {range};
cv::calcHist( &hsv_planes[0], 1, 0, cv::Mat(), hist, 1, &histSize, &histRange,true, true);
return hist;
}
float computeSimilir(const cv::Mat &A,const cv::Mat &B)
{
cv::Mat histA,histB;
histA = calcHist(A);
histB = calcHist(B);
return cv::compareHist(histA,histB,CV_COMP_CORREL);
}
}//namespace util

+ 30
- 0
Prj-iOS/lpr/lpr/Utility.h View File

@@ -0,0 +1,30 @@
//
// Utility.h
// lpr
//
// Created by baotim on 2018/10/26.
// Copyright © 2018 lprSample. All rights reserved.
//
#ifdef __cplusplus
#import <opencv2/opencv.hpp>
#endif
#ifdef __OBJC__
#import <UIKit/UIKit.h>
#import <Foundation/Foundation.h>
#endif
using namespace cv;

NS_ASSUME_NONNULL_BEGIN

@interface Utility : NSObject

+ (cv::Mat)cvMatFromUIImage:(UIImage *)image;
+ (UIImage *)UIImageFromCVMat:(cv::Mat)image;
+ (UIImage *)scaleAndRotateImageFrontCamera:(UIImage *)image;
+ (UIImage *)scaleAndRotateImageBackCamera:(UIImage *)image;
+ (UIImage *)imageWithMat:(const cv::Mat&) image andImageOrientation: (UIImageOrientation) orientation;
+ (UIImage *)imageWithMat:(const cv::Mat&) image andDeviceOrientation: (UIDeviceOrientation) orientation;

@end

NS_ASSUME_NONNULL_END

+ 320
- 0
Prj-iOS/lpr/lpr/Utility.mm View File

@@ -0,0 +1,320 @@
//
// Utility.m
// lpr
//
// Created by baotim on 2018/10/26.
// Copyright © 2018 lprSample. All rights reserved.
//

#import "Utility.h"

@implementation Utility

+ (UIImage *)UIImageFromCVMat:(cv::Mat)cvMat
{
NSData *data = [NSData dataWithBytes:cvMat.data length:cvMat.elemSize()*cvMat.total()];
CGColorSpaceRef colorSpace;
if (cvMat.elemSize() == 1) {
colorSpace = CGColorSpaceCreateDeviceGray();
} else {
colorSpace = CGColorSpaceCreateDeviceRGB();
}
CGDataProviderRef provider = CGDataProviderCreateWithCFData((__bridge CFDataRef)data);
// Creating CGImage from cv::Mat
CGImageRef imageRef = CGImageCreate(cvMat.cols, //width
cvMat.rows, //height
8, //bits per component
8 * cvMat.elemSize(), //bits per pixel
cvMat.step[0], //bytesPerRow
colorSpace, //colorspace
kCGImageAlphaNone|kCGBitmapByteOrderDefault,// bitmap info
provider, //CGDataProviderRef
NULL, //decode
false, //should interpolate
kCGRenderingIntentDefault //intent
);
// Getting UIImage from CGImage
UIImage *finalImage = [UIImage imageWithCGImage:imageRef scale:1.0 orientation:UIImageOrientationUp];
CGImageRelease(imageRef);
CGDataProviderRelease(provider);
CGColorSpaceRelease(colorSpace);
return finalImage;
}

//缩放调整图片
+ (UIImage *)scaleAndRotateImageBackCamera:(UIImage *)image
{
static int kMaxResolution = 480;
CGImageRef imgRef = image.CGImage;
CGFloat width = CGImageGetWidth(imgRef);
CGFloat height = CGImageGetHeight(imgRef);
CGAffineTransform transform = CGAffineTransformIdentity;
CGRect bounds = CGRectMake(0, 0, width, height);
if (width > kMaxResolution || height > kMaxResolution) {
CGFloat ratio = width/height;
if (ratio > 1) {
bounds.size.width = kMaxResolution;
bounds.size.height = bounds.size.width / ratio;
} else {
bounds.size.height = kMaxResolution;
bounds.size.width = bounds.size.height * ratio;
}
}
CGFloat scaleRatio = bounds.size.width / width;
CGSize imageSize = CGSizeMake(CGImageGetWidth(imgRef), CGImageGetHeight(imgRef));
CGFloat boundHeight;
UIImageOrientation orient = image.imageOrientation;
switch(orient) {
case UIImageOrientationUp:
transform = CGAffineTransformIdentity;
break;
case UIImageOrientationUpMirrored:
transform = CGAffineTransformMakeTranslation(imageSize.width, 0.0);
transform = CGAffineTransformScale(transform, -1.0, 1.0);
break;
case UIImageOrientationDown:
transform = CGAffineTransformMakeTranslation(imageSize.width, imageSize.height);
transform = CGAffineTransformRotate(transform, M_PI);
break;
case UIImageOrientationDownMirrored:
transform = CGAffineTransformMakeTranslation(0.0, imageSize.height);
transform = CGAffineTransformScale(transform, 1.0, -1.0);
break;
case UIImageOrientationLeftMirrored:
boundHeight = bounds.size.height;
bounds.size.height = bounds.size.width;
bounds.size.width = boundHeight;
transform = CGAffineTransformMakeTranslation(imageSize.height, imageSize.width);
transform = CGAffineTransformScale(transform, -1.0, 1.0);
transform = CGAffineTransformRotate(transform, 3.0 * M_PI / 2.0);
break;
case UIImageOrientationLeft:
boundHeight = bounds.size.height;
bounds.size.height = bounds.size.width;
bounds.size.width = boundHeight;
transform = CGAffineTransformMakeTranslation(0.0, imageSize.width);
transform = CGAffineTransformRotate(transform, 3.0 * M_PI / 2.0);
break;
case UIImageOrientationRightMirrored:
boundHeight = bounds.size.height;
bounds.size.height = bounds.size.width;
bounds.size.width = boundHeight;
transform = CGAffineTransformMakeScale(-1.0, 1.0);
transform = CGAffineTransformRotate(transform, M_PI / 2.0);
break;
case UIImageOrientationRight:
boundHeight = bounds.size.height;
bounds.size.height = bounds.size.width;
bounds.size.width = boundHeight;
transform = CGAffineTransformMakeTranslation(imageSize.height, 0.0);
transform = CGAffineTransformRotate(transform, M_PI / 2.0);
break;
default:
[NSException raise:NSInternalInconsistencyException format:@"Invalid image orientation"];
}
UIGraphicsBeginImageContext(bounds.size);
CGContextRef context = UIGraphicsGetCurrentContext();
if (orient == UIImageOrientationRight || orient == UIImageOrientationLeft) {
CGContextScaleCTM(context, -scaleRatio, scaleRatio);
CGContextTranslateCTM(context, -height, 0);
} else {
CGContextScaleCTM(context, scaleRatio, -scaleRatio);
CGContextTranslateCTM(context, 0, -height);
}
CGContextConcatCTM(context, transform);
CGContextDrawImage(UIGraphicsGetCurrentContext(), CGRectMake(0, 0, width, height), imgRef);
UIImage *returnImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
NSLog(@"resize w%f,H%f",returnImage.size.width,returnImage.size.height);
return returnImage;
}

+ (UIImage*)imageWithMat:(const cv::Mat&) image andDeviceOrientation: (UIDeviceOrientation)orientation
{
UIImageOrientation imgOrientation = UIImageOrientationUp;
switch (orientation)
{
case UIDeviceOrientationLandscapeLeft:
imgOrientation =UIImageOrientationLeftMirrored; break;
case UIDeviceOrientationLandscapeRight:
imgOrientation = UIImageOrientationDown; break;
case UIDeviceOrientationPortraitUpsideDown:
imgOrientation = UIImageOrientationRightMirrored; break;
case UIDeviceOrientationFaceUp:
imgOrientation = UIImageOrientationRightMirrored; break;
default:
case UIDeviceOrientationPortrait:
imgOrientation = UIImageOrientationRight; break;
};
return [Utility imageWithMat:image andImageOrientation:imgOrientation];
}

+ (UIImage*)imageWithMat:(const cv::Mat&)image andImageOrientation:(UIImageOrientation)orientation;
{
cv::Mat rgbaView;
if (image.channels() == 3)
{
cv::cvtColor(image, rgbaView, COLOR_BGR2BGRA);
}
else if (image.channels() == 4)
{
cv::cvtColor(image, rgbaView, COLOR_BGR2BGRA);
}
else if (image.channels() == 1)
{
cv::cvtColor(image, rgbaView, COLOR_GRAY2RGBA);
}
NSData *data = [NSData dataWithBytes:rgbaView.data length:rgbaView.elemSize() * rgbaView.total()];
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGDataProviderRef provider = CGDataProviderCreateWithCFData((__bridge CFDataRef)data);
CGBitmapInfo bmInfo = kCGImageAlphaPremultipliedLast | kCGBitmapByteOrder32Big;
// Creating CGImage from cv::Mat
CGImageRef imageRef = CGImageCreate(rgbaView.cols, //width
rgbaView.rows, //height
8, //bits per component
8 * rgbaView.elemSize(), //bits per pixel
rgbaView.step.p[0], //bytesPerRow
colorSpace, //colorspace
bmInfo,// bitmap info
provider, //CGDataProviderRef
NULL, //decode
false, //should interpolate
kCGRenderingIntentDefault //intent
);
// Getting UIImage from CGImage
UIImage *finalImage = [UIImage imageWithCGImage:imageRef scale:1 orientation:orientation];
CGImageRelease(imageRef);
CGDataProviderRelease(provider);
CGColorSpaceRelease(colorSpace);
return finalImage;
}

+ (cv::Mat)cvMatFromUIImage:(UIImage *)image
{
CGColorSpaceRef colorSpace = CGImageGetColorSpace(image.CGImage);
CGFloat cols = image.size.width;
CGFloat rows = image.size.height;
cv::Mat cvMat(rows, cols, CV_8UC4); // 8 bits per component, 4 channels
CGContextRef contextRef = CGBitmapContextCreate(cvMat.data, // Pointer to data
cols, // Width of bitmap
rows, // Height of bitmap
8, // Bits per component
cvMat.step[0], // Bytes per row
colorSpace, // Colorspace
kCGImageAlphaNoneSkipLast |
kCGBitmapByteOrderDefault); // Bitmap info flags
CGContextDrawImage(contextRef, CGRectMake(0, 0, cols, rows), image.CGImage);
CGContextRelease(contextRef);
CGColorSpaceRelease(colorSpace);
cv::Mat cvMat3(rows, cols, CV_8UC3); // 8 bits per component, 4 channels
cv::cvtColor(cvMat, cvMat3,COLOR_RGBA2RGB);
return cvMat3;
}

+ (UIImage *)scaleAndRotateImageFrontCamera:(UIImage *)image
{
static int kMaxResolution = 640;
CGImageRef imgRef = image.CGImage;
CGFloat width = CGImageGetWidth(imgRef);
CGFloat height = CGImageGetHeight(imgRef);
CGAffineTransform transform = CGAffineTransformIdentity;
CGRect bounds = CGRectMake( 0, 0, width, height);
if (width > kMaxResolution || height > kMaxResolution) {
CGFloat ratio = width/height;
if (ratio > 1) {
bounds.size.width = kMaxResolution;
bounds.size.height = bounds.size.width / ratio;
} else {
bounds.size.height = kMaxResolution;
bounds.size.width = bounds.size.height * ratio;
}
}
CGFloat scaleRatio = bounds.size.width / width;
CGSize imageSize = CGSizeMake(CGImageGetWidth(imgRef), CGImageGetHeight(imgRef));
CGFloat boundHeight;
UIImageOrientation orient = image.imageOrientation;
switch(orient) {
case UIImageOrientationUp:
transform = CGAffineTransformIdentity;
break;
case UIImageOrientationUpMirrored:
transform = CGAffineTransformMakeTranslation(imageSize.width, 0.0);
transform = CGAffineTransformScale(transform, -1.0, 1.0);
break;
case UIImageOrientationDown:
transform = CGAffineTransformMakeTranslation(imageSize.width, imageSize.height);
transform = CGAffineTransformRotate(transform, M_PI);
break;
case UIImageOrientationDownMirrored:
transform = CGAffineTransformMakeTranslation(0.0, imageSize.height);
transform = CGAffineTransformScale(transform, 1.0, -1.0);
break;
case UIImageOrientationLeftMirrored:
boundHeight = bounds.size.height;
bounds.size.height = bounds.size.width;
bounds.size.width = boundHeight;
transform = CGAffineTransformMakeTranslation(imageSize.height, imageSize.width);
transform = CGAffineTransformScale(transform, -1.0, 1.0);
transform = CGAffineTransformRotate(transform, 3.0 * M_PI / 2.0);
break;
case UIImageOrientationLeft:
boundHeight = bounds.size.height;
bounds.size.height = bounds.size.width;
bounds.size.width = boundHeight;
transform = CGAffineTransformMakeTranslation(0.0, imageSize.width);
transform = CGAffineTransformRotate(transform, 3.0 * M_PI / 2.0);
break;
case UIImageOrientationRight:
case UIImageOrientationRightMirrored:
boundHeight = bounds.size.height;
bounds.size.height = bounds.size.width;
bounds.size.width = boundHeight;
transform = CGAffineTransformMakeScale(-1.0, 1.0);
transform = CGAffineTransformRotate(transform, M_PI / 2.0);
break;
default:
[NSException raise:NSInternalInconsistencyException format:@"Invalid image orientation"];
}
UIGraphicsBeginImageContext( bounds.size );
CGContextRef context = UIGraphicsGetCurrentContext();
if ( orient == UIImageOrientationRight || orient == UIImageOrientationLeft ) {
CGContextScaleCTM(context, -scaleRatio, scaleRatio);
CGContextTranslateCTM(context, -height, 0);
}
else {
CGContextScaleCTM(context, scaleRatio, -scaleRatio);
CGContextTranslateCTM(context, 0, -height);
}
CGContextConcatCTM( context, transform );
CGContextDrawImage( UIGraphicsGetCurrentContext(), CGRectMake(0, 0, width, height), imgRef );
UIImage *returnImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
return returnImage;
}

@end

+ 16
- 0
Prj-iOS/lpr/lpr/main.m View File

@@ -0,0 +1,16 @@
//
// main.m
// lpr
//
// Created by baotim on 2018/10/26.
// Copyright © 2018 lprSample. All rights reserved.
//

#import <UIKit/UIKit.h>
#import "AppDelegate.h"

int main(int argc, char * argv[]) {
@autoreleasepool {
return UIApplicationMain(argc, argv, nil, NSStringFromClass([AppDelegate class]));
}
}

Loading…
Cancel
Save