Browse Source

up e2e cpp

pull/1/MERGE
jackyu 6 years ago
parent
commit
e004f4e4e5
57 changed files with 501 additions and 27099 deletions
  1. +0
    -24
      Prj-IOS/include/CNNRecognizer.h
  2. +0
    -18
      Prj-IOS/include/FastDeskew.h
  3. +0
    -32
      Prj-IOS/include/FineMapping.h
  4. +0
    -48
      Prj-IOS/include/Pipeline.h
  5. +0
    -33
      Prj-IOS/include/PlateDetection.h
  6. +0
    -127
      Prj-IOS/include/PlateInfo.h
  7. +0
    -39
      Prj-IOS/include/PlateSegmentation.h
  8. +0
    -21
      Prj-IOS/include/Recognizer.h
  9. +0
    -107
      Prj-IOS/include/niBlackThreshold.h
  10. +0
    -435
      Prj-IOS/lp.xcodeproj/project.pbxproj
  11. +0
    -7
      Prj-IOS/lp.xcodeproj/project.xcworkspace/contents.xcworkspacedata
  12. BIN
      Prj-IOS/lp.xcodeproj/project.xcworkspace/xcuserdata/apple.xcuserdatad/UserInterfaceState.xcuserstate
  13. +0
    -5
      Prj-IOS/lp.xcodeproj/xcuserdata/apple.xcuserdatad/xcdebugger/Breakpoints_v2.xcbkptlist
  14. +0
    -14
      Prj-IOS/lp.xcodeproj/xcuserdata/apple.xcuserdatad/xcschemes/xcschememanagement.plist
  15. +0
    -17
      Prj-IOS/lp/AppDelegate.h
  16. +0
    -51
      Prj-IOS/lp/AppDelegate.m
  17. +0
    -98
      Prj-IOS/lp/Assets.xcassets/AppIcon.appiconset/Contents.json
  18. +0
    -25
      Prj-IOS/lp/Base.lproj/LaunchScreen.storyboard
  19. +0
    -28
      Prj-IOS/lp/Base.lproj/Main.storyboard
  20. +0
    -45
      Prj-IOS/lp/Info.plist
  21. +0
    -32
      Prj-IOS/lp/UIImageCVMatConverter.h
  22. +0
    -317
      Prj-IOS/lp/UIImageCVMatConverter.mm
  23. +0
    -40
      Prj-IOS/lp/ViewController.h
  24. +0
    -216
      Prj-IOS/lp/ViewController.mm
  25. +0
    -16
      Prj-IOS/lp/main.m
  26. +0
    -12117
      Prj-IOS/model.bundle/cascade.xml
  27. +0
    -19
      Prj-IOS/src/CNNRecognizer.cpp
  28. +0
    -133
      Prj-IOS/src/FastDeskew.cpp
  29. +0
    -205
      Prj-IOS/src/FineMapping.cpp
  30. +0
    -69
      Prj-IOS/src/Pipeline.cpp
  31. +0
    -61
      Prj-IOS/src/PlateDetection.cpp
  32. +0
    -402
      Prj-IOS/src/PlateSegmentation.cpp
  33. +0
    -26
      Prj-IOS/src/Recognizer.cpp
  34. +0
    -79
      Prj-IOS/src/util.h
  35. +6
    -2
      Prj-Linux/lpr/CMakeLists.txt
  36. BIN
      Prj-Linux/lpr/TRST_PIPLINE
  37. +17
    -2
      Prj-Linux/lpr/include/Pipeline.h
  38. +9
    -10
      Prj-Linux/lpr/include/PlateInfo.h
  39. +2
    -0
      Prj-Linux/lpr/include/Recognizer.h
  40. +28
    -0
      Prj-Linux/lpr/include/SegmentationFreeRecognizer.h
  41. BIN
      Prj-Linux/lpr/model/Archive.zip
  42. BIN
      Prj-Linux/lpr/res/cache/2.png
  43. BIN
      Prj-Linux/lpr/res/cache/chars_segment.jpg
  44. BIN
      Prj-Linux/lpr/res/cache/finemapping.jpg
  45. +7
    -7
      Prj-Linux/lpr/src/FineMapping.cpp
  46. +50
    -18
      Prj-Linux/lpr/src/Pipeline.cpp
  47. +4
    -4
      Prj-Linux/lpr/src/PlateDetection.cpp
  48. +16
    -14
      Prj-Linux/lpr/src/PlateSegmentation.cpp
  49. +10
    -7
      Prj-Linux/lpr/src/Recognizer.cpp
  50. +118
    -0
      Prj-Linux/lpr/src/SegmentationFreeRecognizer.cpp
  51. BIN
      Prj-Linux/lpr/test.png
  52. +169
    -11
      Prj-Linux/lpr/tests/test_pipeline.cpp
  53. +1
    -0
      Prj-Linux/lpr/tests/test_recognization.cpp
  54. +54
    -0
      Prj-Linux/lpr/tests/test_segmentationFree.cpp
  55. +6
    -0
      Prj-Linux/main.cpp
  56. +0
    -12117
      Prj-Linux/model/cascade.xml
  57. +4
    -1
      README.md

+ 0
- 24
Prj-IOS/include/CNNRecognizer.h View File

@@ -1,24 +0,0 @@
//
// Created by 庾金科 on 21/10/2017.
//

#ifndef SWIFTPR_CNNRECOGNIZER_H
#define SWIFTPR_CNNRECOGNIZER_H

#include "Recognizer.h"
namespace pr{
class CNNRecognizer: public GeneralRecognizer{
public:
const int CHAR_INPUT_W = 14;
const int CHAR_INPUT_H = 30;

CNNRecognizer(std::string prototxt,std::string caffemodel);
label recognizeCharacter(cv::Mat character);
private:
cv::dnn::Net net;

};

}

#endif //SWIFTPR_CNNRECOGNIZER_H

+ 0
- 18
Prj-IOS/include/FastDeskew.h View File

@@ -1,18 +0,0 @@
//
// Created by 庾金科 on 22/09/2017.
//

#ifndef SWIFTPR_FASTDESKEW_H
#define SWIFTPR_FASTDESKEW_H

#include <math.h>
#include <opencv2/opencv.hpp>
namespace pr{

cv::Mat fastdeskew(cv::Mat skewImage,int blockSize);
// cv::Mat spatialTransformer(cv::Mat skewImage);

}//namepace pr


#endif //SWIFTPR_FASTDESKEW_H

+ 0
- 32
Prj-IOS/include/FineMapping.h View File

@@ -1,32 +0,0 @@
//
// Created by 庾金科 on 22/09/2017.
//

#ifndef SWIFTPR_FINEMAPPING_H
#define SWIFTPR_FINEMAPPING_H

#include <opencv2/opencv.hpp>
#include <opencv2/dnn.hpp>

#include <string>
namespace pr{
class FineMapping{
public:
FineMapping();


FineMapping(std::string prototxt,std::string caffemodel);
static cv::Mat FineMappingVertical(cv::Mat InputProposal,int sliceNum=15,int upper=0,int lower=-50,int windows_size=17);
cv::Mat FineMappingHorizon(cv::Mat FinedVertical,int leftPadding,int rightPadding);


private:
cv::dnn::Net net;

};




}
#endif //SWIFTPR_FINEMAPPING_H

+ 0
- 48
Prj-IOS/include/Pipeline.h View File

@@ -1,48 +0,0 @@
//
// Created by 庾金科 on 22/10/2017.
//

#ifndef SWIFTPR_PIPLINE_H
#define SWIFTPR_PIPLINE_H

#include "PlateDetection.h"
#include "PlateSegmentation.h"
#include "CNNRecognizer.h"
#include "PlateInfo.h"
#include "FastDeskew.h"
#include "FineMapping.h"
#include "Recognizer.h"

namespace pr{
class PipelinePR{
public:
GeneralRecognizer *generalRecognizer;
PlateDetection *plateDetection;
PlateSegmentation *plateSegmentation;
FineMapping *fineMapping;
PipelinePR(std::string detector_filename,
std::string finemapping_prototxt,std::string finemapping_caffemodel,
std::string segmentation_prototxt,std::string segmentation_caffemodel,
std::string charRecognization_proto,std::string charRecognization_caffemodel
);
~PipelinePR();



std::vector<std::string> plateRes;
std::vector<PlateInfo> RunPiplineAsImage(cv::Mat plateImage);







};


}
#endif //SWIFTPR_PIPLINE_H

+ 0
- 33
Prj-IOS/include/PlateDetection.h View File

@@ -1,33 +0,0 @@
//
// Created by 庾金科 on 20/09/2017.
//

#ifndef SWIFTPR_PLATEDETECTION_H
#define SWIFTPR_PLATEDETECTION_H

#include <opencv2/opencv.hpp>
#include <PlateInfo.h>
#include <vector>
namespace pr{
class PlateDetection{
public:
PlateDetection(std::string filename_cascade);
PlateDetection();
void LoadModel(std::string filename_cascade);
void plateDetectionRough(cv::Mat InputImage,std::vector<pr::PlateInfo> &plateInfos,int min_w=36,int max_w=800);
// std::vector<pr::PlateInfo> plateDetectionRough(cv::Mat InputImage,int min_w= 60,int max_h = 400);


// std::vector<pr::PlateInfo> plateDetectionRoughByMultiScaleEdge(cv::Mat InputImage);



private:
cv::CascadeClassifier cascade;


};

}// namespace pr

#endif //SWIFTPR_PLATEDETECTION_H

+ 0
- 127
Prj-IOS/include/PlateInfo.h View File

@@ -1,127 +0,0 @@
//
// Created by 庾金科 on 20/09/2017.
//

#ifndef SWIFTPR_PLATEINFO_H
#define SWIFTPR_PLATEINFO_H
#include <opencv2/opencv.hpp>
namespace pr {

typedef std::vector<cv::Mat> Character;

enum PlateColor { BLUE, YELLOW, WHITE, GREEN, BLACK,UNKNOWN};
enum CharType {CHINESE,LETTER,LETTER_NUMS};


class PlateInfo {
public:
std::vector<std::pair<CharType,cv::Mat>> plateChars;
std::vector<std::pair<CharType,cv::Mat>> plateCoding;

float confidence = 0;


PlateInfo(const cv::Mat &plateData, std::string plateName, cv::Rect plateRect, PlateColor plateType) {
licensePlate = plateData;
name = plateName;
ROI = plateRect;
Type = plateType;
}
PlateInfo(const cv::Mat &plateData, cv::Rect plateRect, PlateColor plateType) {
licensePlate = plateData;
ROI = plateRect;
Type = plateType;
}
PlateInfo(const cv::Mat &plateData, cv::Rect plateRect) {
licensePlate = plateData;
ROI = plateRect;
}
PlateInfo() {

}

cv::Mat getPlateImage() {
return licensePlate;
}

void setPlateImage(cv::Mat plateImage){
licensePlate = plateImage;
}

cv::Rect getPlateRect() {
return ROI;
}

void setPlateRect(cv::Rect plateRect) {
ROI = plateRect;
}
cv::String getPlateName() {
return name;

}
void setPlateName(cv::String plateName) {
name = plateName;
}
int getPlateType() {
return Type;
}

void appendPlateChar(const std::pair<CharType,cv::Mat> &plateChar)
{
plateChars.push_back(plateChar);
}

void appendPlateCoding(const std::pair<CharType,cv::Mat> &charProb){
plateCoding.push_back(charProb);
}

// cv::Mat getPlateChars(int id) {
// if(id<PlateChars.size())
// return PlateChars[id];
// }
std::string decodePlateNormal(std::vector<std::string> mappingTable) {
std::string decode;
for(auto plate:plateCoding) {
float *prob = (float *)plate.second.data;
if(plate.first == CHINESE) {

decode += mappingTable[std::max_element(prob,prob+31) - prob];
confidence+=*std::max_element(prob,prob+31);


// std::cout<<*std::max_element(prob,prob+31)<<std::endl;

}

if(plate.first == LETTER) {
decode += mappingTable[std::max_element(prob+41,prob+65)- prob];
confidence+=*std::max_element(prob+41,prob+65);
}

if(plate.first == LETTER_NUMS) {
decode += mappingTable[std::max_element(prob+31,prob+65)- prob];
confidence+=*std::max_element(prob+31,prob+65);
// std::cout<<*std::max_element(prob+31,prob+65)<<std::endl;

}

}
name = decode;

confidence/=7;

return decode;
}



private:
cv::Mat licensePlate;
cv::Rect ROI;
std::string name;
PlateColor Type;
};
}


#endif //SWIFTPR_PLATEINFO_H

+ 0
- 39
Prj-IOS/include/PlateSegmentation.h View File

@@ -1,39 +0,0 @@
//
// Created by 庾金科 on 16/10/2017.
//

#ifndef SWIFTPR_PLATESEGMENTATION_H
#define SWIFTPR_PLATESEGMENTATION_H

#include "opencv2/opencv.hpp"
#include <opencv2/dnn.hpp>
#include "PlateInfo.h"

namespace pr{


class PlateSegmentation{
public:
const int PLATE_NORMAL = 6;
const int PLATE_NORMAL_GREEN = 7;
const int DEFAULT_WIDTH = 20;
PlateSegmentation(std::string phototxt,std::string caffemodel);
PlateSegmentation(){}
void segmentPlatePipline(PlateInfo &plateInfo,int stride,std::vector<cv::Rect> &Char_rects);

void segmentPlateBySlidingWindows(cv::Mat &plateImage,int windowsWidth,int stride,cv::Mat &respones);
void templateMatchFinding(const cv::Mat &respones,int windowsWidth,std::pair<float,std::vector<int>> &candidatePts);
void refineRegion(cv::Mat &plateImage,const std::vector<int> &candidatePts,const int padding,std::vector<cv::Rect> &rects);
void ExtractRegions(PlateInfo &plateInfo,std::vector<cv::Rect> &rects);
cv::Mat classifyResponse(const cv::Mat &cropped);
private:
cv::dnn::Net net;


// RefineRegion()

};

}//namespace pr

#endif //SWIFTPR_PLATESEGMENTATION_H

+ 0
- 21
Prj-IOS/include/Recognizer.h View File

@@ -1,21 +0,0 @@
//
// Created by 庾金科 on 20/10/2017.
//


#ifndef SWIFTPR_RECOGNIZER_H
#define SWIFTPR_RECOGNIZER_H

#include "PlateInfo.h"
#include "opencv2/dnn.hpp"
namespace pr{
typedef cv::Mat label;
class GeneralRecognizer{
public:
virtual label recognizeCharacter(cv::Mat character) = 0;
void SegmentBasedSequenceRecognition(PlateInfo &plateinfo);

};

}
#endif //SWIFTPR_RECOGNIZER_H

+ 0
- 107
Prj-IOS/include/niBlackThreshold.h View File

@@ -1,107 +0,0 @@
//
// Created by 庾金科 on 26/10/2017.
//

#ifndef SWIFTPR_NIBLACKTHRESHOLD_H
#define SWIFTPR_NIBLACKTHRESHOLD_H


#include <opencv2/opencv.hpp>
using namespace cv;

enum LocalBinarizationMethods{
BINARIZATION_NIBLACK = 0, //!< Classic Niblack binarization. See @cite Niblack1985 .
BINARIZATION_SAUVOLA = 1, //!< Sauvola's technique. See @cite Sauvola1997 .
BINARIZATION_WOLF = 2, //!< Wolf's technique. See @cite Wolf2004 .
BINARIZATION_NICK = 3 //!< NICK technique. See @cite Khurshid2009 .
};


void niBlackThreshold( InputArray _src, OutputArray _dst, double maxValue,
int type, int blockSize, double k, int binarizationMethod )
{
// Input grayscale image
Mat src = _src.getMat();
CV_Assert(src.channels() == 1);
CV_Assert(blockSize % 2 == 1 && blockSize > 1);
if (binarizationMethod == BINARIZATION_SAUVOLA) {
CV_Assert(src.depth() == CV_8U);
}
type &= THRESH_MASK;
// Compute local threshold (T = mean + k * stddev)
// using mean and standard deviation in the neighborhood of each pixel
// (intermediate calculations are done with floating-point precision)
Mat test;
Mat thresh;
{
// note that: Var[X] = E[X^2] - E[X]^2
Mat mean, sqmean, variance, stddev, sqrtVarianceMeanSum;
double srcMin, stddevMax;
boxFilter(src, mean, CV_32F, Size(blockSize, blockSize),
Point(-1,-1), true, BORDER_REPLICATE);
sqrBoxFilter(src, sqmean, CV_32F, Size(blockSize, blockSize),
Point(-1,-1), true, BORDER_REPLICATE);
variance = sqmean - mean.mul(mean);
sqrt(variance, stddev);
switch (binarizationMethod)
{
case BINARIZATION_NIBLACK:
thresh = mean + stddev * static_cast<float>(k);

break;
case BINARIZATION_SAUVOLA:
thresh = mean.mul(1. + static_cast<float>(k) * (stddev / 128.0 - 1.));
break;
case BINARIZATION_WOLF:
minMaxIdx(src, &srcMin,NULL);
minMaxIdx(stddev, NULL, &stddevMax);
thresh = mean - static_cast<float>(k) * (mean - srcMin - stddev.mul(mean - srcMin) / stddevMax);
break;
case BINARIZATION_NICK:
sqrt(variance + sqmean, sqrtVarianceMeanSum);
thresh = mean + static_cast<float>(k) * sqrtVarianceMeanSum;
break;
default:
CV_Error( CV_StsBadArg, "Unknown binarization method" );
break;
}
thresh.convertTo(thresh, src.depth());

thresh.convertTo(test, src.depth());
//
// cv::imshow("imagex",test);
// cv::waitKey(0);

}
// Prepare output image
_dst.create(src.size(), src.type());
Mat dst = _dst.getMat();
CV_Assert(src.data != dst.data); // no inplace processing
// Apply thresholding: ( pixel > threshold ) ? foreground : background
Mat mask;
switch (type)
{
case THRESH_BINARY: // dst = (src > thresh) ? maxval : 0
case THRESH_BINARY_INV: // dst = (src > thresh) ? 0 : maxval
compare(src, thresh, mask, (type == THRESH_BINARY ? CMP_GT : CMP_LE));
dst.setTo(0);
dst.setTo(maxValue, mask);
break;
case THRESH_TRUNC: // dst = (src > thresh) ? thresh : src
compare(src, thresh, mask, CMP_GT);
src.copyTo(dst);
thresh.copyTo(dst, mask);
break;
case THRESH_TOZERO: // dst = (src > thresh) ? src : 0
case THRESH_TOZERO_INV: // dst = (src > thresh) ? 0 : src
compare(src, thresh, mask, (type == THRESH_TOZERO ? CMP_GT : CMP_LE));
dst.setTo(0);
src.copyTo(dst, mask);
break;
default:
CV_Error( CV_StsBadArg, "Unknown threshold type" );
break;
}
}

#endif //SWIFTPR_NIBLACKTHRESHOLD_H

+ 0
- 435
Prj-IOS/lp.xcodeproj/project.pbxproj View File

@@ -1,435 +0,0 @@
// !$*UTF8*$!
{
archiveVersion = 1;
classes = {
};
objectVersion = 48;
objects = {

/* Begin PBXBuildFile section */
18034F491FD28DF500787983 /* AppDelegate.m in Sources */ = {isa = PBXBuildFile; fileRef = 18034F481FD28DF500787983 /* AppDelegate.m */; };
18034F4C1FD28DF500787983 /* ViewController.mm in Sources */ = {isa = PBXBuildFile; fileRef = 18034F4B1FD28DF500787983 /* ViewController.mm */; };
18034F4F1FD28DF500787983 /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 18034F4D1FD28DF500787983 /* Main.storyboard */; };
18034F511FD28DF500787983 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 18034F501FD28DF500787983 /* Assets.xcassets */; };
18034F541FD28DF500787983 /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 18034F521FD28DF500787983 /* LaunchScreen.storyboard */; };
18034F571FD28DF500787983 /* main.m in Sources */ = {isa = PBXBuildFile; fileRef = 18034F561FD28DF500787983 /* main.m */; };
18034F5E1FD299EE00787983 /* opencv2.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 18034F5D1FD299EE00787983 /* opencv2.framework */; };
1820B1D81FD300D3003CFE6C /* UIImageCVMatConverter.mm in Sources */ = {isa = PBXBuildFile; fileRef = 1820B1D71FD300D3003CFE6C /* UIImageCVMatConverter.mm */; };
18CEE3D51FD5AA8300CC138A /* CNNRecognizer.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 18CEE3D41FD5AA8300CC138A /* CNNRecognizer.cpp */; };
18CEE3DC1FD5AA9600CC138A /* FastDeskew.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 18CEE3D61FD5AA9500CC138A /* FastDeskew.cpp */; };
18CEE3DD1FD5AA9600CC138A /* PlateSegmentation.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 18CEE3D71FD5AA9600CC138A /* PlateSegmentation.cpp */; };
18CEE3DE1FD5AA9600CC138A /* Recognizer.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 18CEE3D81FD5AA9600CC138A /* Recognizer.cpp */; };
18CEE3DF1FD5AA9600CC138A /* FineMapping.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 18CEE3D91FD5AA9600CC138A /* FineMapping.cpp */; };
18CEE3E01FD5AA9600CC138A /* Pipeline.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 18CEE3DA1FD5AA9600CC138A /* Pipeline.cpp */; };
18CEE3E11FD5AA9600CC138A /* PlateDetection.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 18CEE3DB1FD5AA9600CC138A /* PlateDetection.cpp */; };
18CEE3EE1FD5ACFA00CC138A /* model.bundle in Resources */ = {isa = PBXBuildFile; fileRef = 18CEE3ED1FD5ACFA00CC138A /* model.bundle */; };
/* End PBXBuildFile section */

/* Begin PBXFileReference section */
18034F441FD28DF500787983 /* lp.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = lp.app; sourceTree = BUILT_PRODUCTS_DIR; };
18034F471FD28DF500787983 /* AppDelegate.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = AppDelegate.h; sourceTree = "<group>"; };
18034F481FD28DF500787983 /* AppDelegate.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = AppDelegate.m; sourceTree = "<group>"; };
18034F4A1FD28DF500787983 /* ViewController.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = ViewController.h; sourceTree = "<group>"; };
18034F4B1FD28DF500787983 /* ViewController.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = ViewController.mm; sourceTree = "<group>"; };
18034F4E1FD28DF500787983 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = "<group>"; };
18034F501FD28DF500787983 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = "<group>"; };
18034F531FD28DF500787983 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = "<group>"; };
18034F551FD28DF500787983 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = "<group>"; };
18034F561FD28DF500787983 /* main.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = main.m; sourceTree = "<group>"; };
18034F5D1FD299EE00787983 /* opencv2.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; path = opencv2.framework; sourceTree = "<group>"; };
1820B1D61FD300D3003CFE6C /* UIImageCVMatConverter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = UIImageCVMatConverter.h; sourceTree = "<group>"; };
1820B1D71FD300D3003CFE6C /* UIImageCVMatConverter.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = UIImageCVMatConverter.mm; sourceTree = "<group>"; };
18CEE3D41FD5AA8300CC138A /* CNNRecognizer.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; name = CNNRecognizer.cpp; path = src/CNNRecognizer.cpp; sourceTree = SOURCE_ROOT; };
18CEE3D61FD5AA9500CC138A /* FastDeskew.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; name = FastDeskew.cpp; path = src/FastDeskew.cpp; sourceTree = SOURCE_ROOT; };
18CEE3D71FD5AA9600CC138A /* PlateSegmentation.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; name = PlateSegmentation.cpp; path = src/PlateSegmentation.cpp; sourceTree = SOURCE_ROOT; };
18CEE3D81FD5AA9600CC138A /* Recognizer.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; name = Recognizer.cpp; path = src/Recognizer.cpp; sourceTree = SOURCE_ROOT; };
18CEE3D91FD5AA9600CC138A /* FineMapping.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; name = FineMapping.cpp; path = src/FineMapping.cpp; sourceTree = SOURCE_ROOT; };
18CEE3DA1FD5AA9600CC138A /* Pipeline.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; name = Pipeline.cpp; path = src/Pipeline.cpp; sourceTree = SOURCE_ROOT; };
18CEE3DB1FD5AA9600CC138A /* PlateDetection.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; name = PlateDetection.cpp; path = src/PlateDetection.cpp; sourceTree = SOURCE_ROOT; };
18CEE3E21FD5AAE900CC138A /* util.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = util.h; path = src/util.h; sourceTree = SOURCE_ROOT; };
18CEE3E41FD5AB3800CC138A /* FastDeskew.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = FastDeskew.h; path = include/FastDeskew.h; sourceTree = SOURCE_ROOT; };
18CEE3E51FD5AB3800CC138A /* PlateDetection.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = PlateDetection.h; path = include/PlateDetection.h; sourceTree = SOURCE_ROOT; };
18CEE3E61FD5AB3800CC138A /* niBlackThreshold.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = niBlackThreshold.h; path = include/niBlackThreshold.h; sourceTree = SOURCE_ROOT; };
18CEE3E71FD5AB3800CC138A /* Pipeline.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = Pipeline.h; path = include/Pipeline.h; sourceTree = SOURCE_ROOT; };
18CEE3E81FD5AB3800CC138A /* PlateInfo.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = PlateInfo.h; path = include/PlateInfo.h; sourceTree = SOURCE_ROOT; };
18CEE3E91FD5AB3800CC138A /* PlateSegmentation.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = PlateSegmentation.h; path = include/PlateSegmentation.h; sourceTree = SOURCE_ROOT; };
18CEE3EA1FD5AB3800CC138A /* CNNRecognizer.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = CNNRecognizer.h; path = include/CNNRecognizer.h; sourceTree = SOURCE_ROOT; };
18CEE3EB1FD5AB3800CC138A /* FineMapping.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = FineMapping.h; path = include/FineMapping.h; sourceTree = SOURCE_ROOT; };
18CEE3EC1FD5AB3900CC138A /* Recognizer.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = Recognizer.h; path = include/Recognizer.h; sourceTree = SOURCE_ROOT; };
18CEE3ED1FD5ACFA00CC138A /* model.bundle */ = {isa = PBXFileReference; lastKnownFileType = "wrapper.plug-in"; path = model.bundle; sourceTree = "<group>"; };
/* End PBXFileReference section */

/* Begin PBXFrameworksBuildPhase section */
18034F411FD28DF500787983 /* Frameworks */ = {
isa = PBXFrameworksBuildPhase;
buildActionMask = 2147483647;
files = (
18034F5E1FD299EE00787983 /* opencv2.framework in Frameworks */,
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXFrameworksBuildPhase section */

/* Begin PBXGroup section */
18034F3B1FD28DF500787983 = {
isa = PBXGroup;
children = (
18CEE3ED1FD5ACFA00CC138A /* model.bundle */,
18CEE3E31FD5AB2600CC138A /* include */,
18CEE3D31FD5AA5C00CC138A /* src */,
18034F5D1FD299EE00787983 /* opencv2.framework */,
18034F461FD28DF500787983 /* lp */,
18034F451FD28DF500787983 /* Products */,
1820B1D01FD2F9EB003CFE6C /* Frameworks */,
);
sourceTree = "<group>";
};
18034F451FD28DF500787983 /* Products */ = {
isa = PBXGroup;
children = (
18034F441FD28DF500787983 /* lp.app */,
);
name = Products;
sourceTree = "<group>";
};
18034F461FD28DF500787983 /* lp */ = {
isa = PBXGroup;
children = (
1820B1D61FD300D3003CFE6C /* UIImageCVMatConverter.h */,
1820B1D71FD300D3003CFE6C /* UIImageCVMatConverter.mm */,
18034F471FD28DF500787983 /* AppDelegate.h */,
18034F481FD28DF500787983 /* AppDelegate.m */,
18034F4A1FD28DF500787983 /* ViewController.h */,
18034F4B1FD28DF500787983 /* ViewController.mm */,
18034F4D1FD28DF500787983 /* Main.storyboard */,
18034F501FD28DF500787983 /* Assets.xcassets */,
18034F521FD28DF500787983 /* LaunchScreen.storyboard */,
18034F551FD28DF500787983 /* Info.plist */,
18034F561FD28DF500787983 /* main.m */,
);
path = lp;
sourceTree = "<group>";
};
1820B1D01FD2F9EB003CFE6C /* Frameworks */ = {
isa = PBXGroup;
children = (
);
name = Frameworks;
sourceTree = "<group>";
};
18CEE3D31FD5AA5C00CC138A /* src */ = {
isa = PBXGroup;
children = (
18CEE3E21FD5AAE900CC138A /* util.h */,
18CEE3D61FD5AA9500CC138A /* FastDeskew.cpp */,
18CEE3D91FD5AA9600CC138A /* FineMapping.cpp */,
18CEE3DA1FD5AA9600CC138A /* Pipeline.cpp */,
18CEE3DB1FD5AA9600CC138A /* PlateDetection.cpp */,
18CEE3D71FD5AA9600CC138A /* PlateSegmentation.cpp */,
18CEE3D81FD5AA9600CC138A /* Recognizer.cpp */,
18CEE3D41FD5AA8300CC138A /* CNNRecognizer.cpp */,
);
name = src;
path = "New Group";
sourceTree = "<group>";
};
18CEE3E31FD5AB2600CC138A /* include */ = {
isa = PBXGroup;
children = (
18CEE3EA1FD5AB3800CC138A /* CNNRecognizer.h */,
18CEE3E41FD5AB3800CC138A /* FastDeskew.h */,
18CEE3EB1FD5AB3800CC138A /* FineMapping.h */,
18CEE3E61FD5AB3800CC138A /* niBlackThreshold.h */,
18CEE3E71FD5AB3800CC138A /* Pipeline.h */,
18CEE3E51FD5AB3800CC138A /* PlateDetection.h */,
18CEE3E81FD5AB3800CC138A /* PlateInfo.h */,
18CEE3E91FD5AB3800CC138A /* PlateSegmentation.h */,
18CEE3EC1FD5AB3900CC138A /* Recognizer.h */,
);
name = include;
path = "New Group1";
sourceTree = "<group>";
};
/* End PBXGroup section */

/* Begin PBXNativeTarget section */
18034F431FD28DF500787983 /* lp */ = {
isa = PBXNativeTarget;
buildConfigurationList = 18034F5A1FD28DF500787983 /* Build configuration list for PBXNativeTarget "lp" */;
buildPhases = (
18034F401FD28DF500787983 /* Sources */,
18034F411FD28DF500787983 /* Frameworks */,
18034F421FD28DF500787983 /* Resources */,
);
buildRules = (
);
dependencies = (
);
name = lp;
productName = lp;
productReference = 18034F441FD28DF500787983 /* lp.app */;
productType = "com.apple.product-type.application";
};
/* End PBXNativeTarget section */

/* Begin PBXProject section */
18034F3C1FD28DF500787983 /* Project object */ = {
isa = PBXProject;
attributes = {
LastUpgradeCheck = 0910;
ORGANIZATIONNAME = xiaojun;
TargetAttributes = {
18034F431FD28DF500787983 = {
CreatedOnToolsVersion = 9.1;
ProvisioningStyle = Automatic;
};
};
};
buildConfigurationList = 18034F3F1FD28DF500787983 /* Build configuration list for PBXProject "lp" */;
compatibilityVersion = "Xcode 8.0";
developmentRegion = en;
hasScannedForEncodings = 0;
knownRegions = (
en,
Base,
);
mainGroup = 18034F3B1FD28DF500787983;
productRefGroup = 18034F451FD28DF500787983 /* Products */;
projectDirPath = "";
projectRoot = "";
targets = (
18034F431FD28DF500787983 /* lp */,
);
};
/* End PBXProject section */

/* Begin PBXResourcesBuildPhase section */
18034F421FD28DF500787983 /* Resources */ = {
isa = PBXResourcesBuildPhase;
buildActionMask = 2147483647;
files = (
18034F541FD28DF500787983 /* LaunchScreen.storyboard in Resources */,
18CEE3EE1FD5ACFA00CC138A /* model.bundle in Resources */,
18034F511FD28DF500787983 /* Assets.xcassets in Resources */,
18034F4F1FD28DF500787983 /* Main.storyboard in Resources */,
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXResourcesBuildPhase section */

/* Begin PBXSourcesBuildPhase section */
18034F401FD28DF500787983 /* Sources */ = {
isa = PBXSourcesBuildPhase;
buildActionMask = 2147483647;
files = (
18CEE3DF1FD5AA9600CC138A /* FineMapping.cpp in Sources */,
1820B1D81FD300D3003CFE6C /* UIImageCVMatConverter.mm in Sources */,
18034F4C1FD28DF500787983 /* ViewController.mm in Sources */,
18CEE3DD1FD5AA9600CC138A /* PlateSegmentation.cpp in Sources */,
18CEE3DC1FD5AA9600CC138A /* FastDeskew.cpp in Sources */,
18CEE3D51FD5AA8300CC138A /* CNNRecognizer.cpp in Sources */,
18034F571FD28DF500787983 /* main.m in Sources */,
18CEE3E11FD5AA9600CC138A /* PlateDetection.cpp in Sources */,
18CEE3E01FD5AA9600CC138A /* Pipeline.cpp in Sources */,
18034F491FD28DF500787983 /* AppDelegate.m in Sources */,
18CEE3DE1FD5AA9600CC138A /* Recognizer.cpp in Sources */,
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXSourcesBuildPhase section */

/* Begin PBXVariantGroup section */
18034F4D1FD28DF500787983 /* Main.storyboard */ = {
isa = PBXVariantGroup;
children = (
18034F4E1FD28DF500787983 /* Base */,
);
name = Main.storyboard;
sourceTree = "<group>";
};
18034F521FD28DF500787983 /* LaunchScreen.storyboard */ = {
isa = PBXVariantGroup;
children = (
18034F531FD28DF500787983 /* Base */,
);
name = LaunchScreen.storyboard;
sourceTree = "<group>";
};
/* End PBXVariantGroup section */

/* Begin XCBuildConfiguration section */
18034F581FD28DF500787983 /* Debug */ = {
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
CLANG_ANALYZER_NONNULL = YES;
CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
CLANG_CXX_LANGUAGE_STANDARD = "gnu++14";
CLANG_CXX_LIBRARY = "libc++";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_COMMA = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
CLANG_WARN_STRICT_PROTOTYPES = YES;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
CODE_SIGN_IDENTITY = "iPhone Developer";
COPY_PHASE_STRIP = NO;
DEBUG_INFORMATION_FORMAT = dwarf;
ENABLE_STRICT_OBJC_MSGSEND = YES;
ENABLE_TESTABILITY = YES;
GCC_C_LANGUAGE_STANDARD = gnu11;
GCC_DYNAMIC_NO_PIC = NO;
GCC_INPUT_FILETYPE = automatic;
GCC_NO_COMMON_BLOCKS = YES;
GCC_OPTIMIZATION_LEVEL = 0;
GCC_PREPROCESSOR_DEFINITIONS = (
"DEBUG=1",
"$(inherited)",
);
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
GCC_WARN_UNDECLARED_SELECTOR = YES;
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
"HEADER_SEARCH_PATHS[arch=*]" = (
"\"$(SRCROOT)/include/\"",
"\"$(SRCROOT)\"",
);
IPHONEOS_DEPLOYMENT_TARGET = 11.1;
MTL_ENABLE_DEBUG_INFO = YES;
ONLY_ACTIVE_ARCH = YES;
SDKROOT = iphoneos;
};
name = Debug;
};
18034F591FD28DF500787983 /* Release */ = {
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
CLANG_ANALYZER_NONNULL = YES;
CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
CLANG_CXX_LANGUAGE_STANDARD = "gnu++14";
CLANG_CXX_LIBRARY = "libc++";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_COMMA = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
CLANG_WARN_STRICT_PROTOTYPES = YES;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
CODE_SIGN_IDENTITY = "iPhone Developer";
COPY_PHASE_STRIP = NO;
DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
ENABLE_NS_ASSERTIONS = NO;
ENABLE_STRICT_OBJC_MSGSEND = YES;
GCC_C_LANGUAGE_STANDARD = gnu11;
GCC_INPUT_FILETYPE = automatic;
GCC_NO_COMMON_BLOCKS = YES;
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
GCC_WARN_UNDECLARED_SELECTOR = YES;
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
"HEADER_SEARCH_PATHS[arch=*]" = (
"\"$(SRCROOT)/include/\"",
"\"$(SRCROOT)\"",
);
IPHONEOS_DEPLOYMENT_TARGET = 11.1;
MTL_ENABLE_DEBUG_INFO = NO;
SDKROOT = iphoneos;
VALIDATE_PRODUCT = YES;
};
name = Release;
};
18034F5B1FD28DF500787983 /* Debug */ = {
isa = XCBuildConfiguration;
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
CODE_SIGN_STYLE = Automatic;
DEVELOPMENT_TEAM = V74HML5NEB;
FRAMEWORK_SEARCH_PATHS = (
"$(inherited)",
"$(PROJECT_DIR)",
);
INFOPLIST_FILE = lp/Info.plist;
LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
PRODUCT_BUNDLE_IDENTIFIER = tjs.lp;
PRODUCT_NAME = "$(TARGET_NAME)";
TARGETED_DEVICE_FAMILY = "1,2";
};
name = Debug;
};
18034F5C1FD28DF500787983 /* Release */ = {
isa = XCBuildConfiguration;
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
CODE_SIGN_STYLE = Automatic;
DEVELOPMENT_TEAM = V74HML5NEB;
FRAMEWORK_SEARCH_PATHS = (
"$(inherited)",
"$(PROJECT_DIR)",
);
INFOPLIST_FILE = lp/Info.plist;
LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
PRODUCT_BUNDLE_IDENTIFIER = tjs.lp;
PRODUCT_NAME = "$(TARGET_NAME)";
TARGETED_DEVICE_FAMILY = "1,2";
};
name = Release;
};
/* End XCBuildConfiguration section */

/* Begin XCConfigurationList section */
18034F3F1FD28DF500787983 /* Build configuration list for PBXProject "lp" */ = {
isa = XCConfigurationList;
buildConfigurations = (
18034F581FD28DF500787983 /* Debug */,
18034F591FD28DF500787983 /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
18034F5A1FD28DF500787983 /* Build configuration list for PBXNativeTarget "lp" */ = {
isa = XCConfigurationList;
buildConfigurations = (
18034F5B1FD28DF500787983 /* Debug */,
18034F5C1FD28DF500787983 /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
/* End XCConfigurationList section */
};
rootObject = 18034F3C1FD28DF500787983 /* Project object */;
}

+ 0
- 7
Prj-IOS/lp.xcodeproj/project.xcworkspace/contents.xcworkspacedata View File

@@ -1,7 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<Workspace
version = "1.0">
<FileRef
location = "self:lp.xcodeproj">
</FileRef>
</Workspace>

BIN
Prj-IOS/lp.xcodeproj/project.xcworkspace/xcuserdata/apple.xcuserdatad/UserInterfaceState.xcuserstate View File


+ 0
- 5
Prj-IOS/lp.xcodeproj/xcuserdata/apple.xcuserdatad/xcdebugger/Breakpoints_v2.xcbkptlist View File

@@ -1,5 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<Bucket
type = "1"
version = "2.0">
</Bucket>

+ 0
- 14
Prj-IOS/lp.xcodeproj/xcuserdata/apple.xcuserdatad/xcschemes/xcschememanagement.plist View File

@@ -1,14 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>SchemeUserState</key>
<dict>
<key>lp.xcscheme</key>
<dict>
<key>orderHint</key>
<integer>0</integer>
</dict>
</dict>
</dict>
</plist>

+ 0
- 17
Prj-IOS/lp/AppDelegate.h View File

@@ -1,17 +0,0 @@
//
// AppDelegate.h
// lp
//
// Created by xiaojun on 2017/12/2.
// Copyright © 2017年 xiaojun. All rights reserved.
//

#import <UIKit/UIKit.h>

@interface AppDelegate : UIResponder <UIApplicationDelegate>

@property (strong, nonatomic) UIWindow *window;


@end


+ 0
- 51
Prj-IOS/lp/AppDelegate.m View File

@@ -1,51 +0,0 @@
//
// AppDelegate.m
// lp
//
// Created by xiaojun on 2017/12/2.
// Copyright © 2017年 xiaojun. All rights reserved.
//

#import "AppDelegate.h"

@interface AppDelegate ()

@end

@implementation AppDelegate


- (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptions {
// Override point for customization after application launch.
return YES;
}


- (void)applicationWillResignActive:(UIApplication *)application {
// Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state.
// Use this method to pause ongoing tasks, disable timers, and invalidate graphics rendering callbacks. Games should use this method to pause the game.
}


- (void)applicationDidEnterBackground:(UIApplication *)application {
// Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later.
// If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits.
}


- (void)applicationWillEnterForeground:(UIApplication *)application {
// Called as part of the transition from the background to the active state; here you can undo many of the changes made on entering the background.
}


- (void)applicationDidBecomeActive:(UIApplication *)application {
// Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface.
}


- (void)applicationWillTerminate:(UIApplication *)application {
// Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:.
}


@end

+ 0
- 98
Prj-IOS/lp/Assets.xcassets/AppIcon.appiconset/Contents.json View File

@@ -1,98 +0,0 @@
{
"images" : [
{
"idiom" : "iphone",
"size" : "20x20",
"scale" : "2x"
},
{
"idiom" : "iphone",
"size" : "20x20",
"scale" : "3x"
},
{
"idiom" : "iphone",
"size" : "29x29",
"scale" : "2x"
},
{
"idiom" : "iphone",
"size" : "29x29",
"scale" : "3x"
},
{
"idiom" : "iphone",
"size" : "40x40",
"scale" : "2x"
},
{
"idiom" : "iphone",
"size" : "40x40",
"scale" : "3x"
},
{
"idiom" : "iphone",
"size" : "60x60",
"scale" : "2x"
},
{
"idiom" : "iphone",
"size" : "60x60",
"scale" : "3x"
},
{
"idiom" : "ipad",
"size" : "20x20",
"scale" : "1x"
},
{
"idiom" : "ipad",
"size" : "20x20",
"scale" : "2x"
},
{
"idiom" : "ipad",
"size" : "29x29",
"scale" : "1x"
},
{
"idiom" : "ipad",
"size" : "29x29",
"scale" : "2x"
},
{
"idiom" : "ipad",
"size" : "40x40",
"scale" : "1x"
},
{
"idiom" : "ipad",
"size" : "40x40",
"scale" : "2x"
},
{
"idiom" : "ipad",
"size" : "76x76",
"scale" : "1x"
},
{
"idiom" : "ipad",
"size" : "76x76",
"scale" : "2x"
},
{
"idiom" : "ipad",
"size" : "83.5x83.5",
"scale" : "2x"
},
{
"idiom" : "ios-marketing",
"size" : "1024x1024",
"scale" : "1x"
}
],
"info" : {
"version" : 1,
"author" : "xcode"
}
}

+ 0
- 25
Prj-IOS/lp/Base.lproj/LaunchScreen.storyboard View File

@@ -1,25 +0,0 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="13122.16" systemVersion="17A277" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" launchScreen="YES" useTraitCollections="YES" useSafeAreas="YES" colorMatched="YES" initialViewController="01J-lp-oVM">
<dependencies>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="13104.12"/>
<capability name="Safe area layout guides" minToolsVersion="9.0"/>
<capability name="documents saved in the Xcode 8 format" minToolsVersion="8.0"/>
</dependencies>
<scenes>
<!--View Controller-->
<scene sceneID="EHf-IW-A2E">
<objects>
<viewController id="01J-lp-oVM" sceneMemberID="viewController">
<view key="view" contentMode="scaleToFill" id="Ze5-6b-2t3">
<rect key="frame" x="0.0" y="0.0" width="375" height="667"/>
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
<color key="backgroundColor" red="1" green="1" blue="1" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
<viewLayoutGuide key="safeArea" id="6Tk-OE-BBY"/>
</view>
</viewController>
<placeholder placeholderIdentifier="IBFirstResponder" id="iYj-Kq-Ea1" userLabel="First Responder" sceneMemberID="firstResponder"/>
</objects>
<point key="canvasLocation" x="53" y="375"/>
</scene>
</scenes>
</document>

+ 0
- 28
Prj-IOS/lp/Base.lproj/Main.storyboard View File

@@ -1,28 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="13529" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" useSafeAreas="YES" colorMatched="YES" initialViewController="BYZ-38-t0r">
<device id="retina4_0" orientation="portrait">
<adaptation id="fullscreen"/>
</device>
<dependencies>
<deployment identifier="iOS"/>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="13527"/>
<capability name="Safe area layout guides" minToolsVersion="9.0"/>
<capability name="documents saved in the Xcode 8 format" minToolsVersion="8.0"/>
</dependencies>
<scenes>
<!--View Controller-->
<scene sceneID="tne-QT-ifu">
<objects>
<viewController id="BYZ-38-t0r" customClass="ViewController" sceneMemberID="viewController">
<view key="view" contentMode="scaleToFill" id="8bC-Xf-vdC">
<rect key="frame" x="0.0" y="0.0" width="320" height="568"/>
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
<color key="backgroundColor" red="1" green="1" blue="1" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
<viewLayoutGuide key="safeArea" id="6Tk-OE-BBY"/>
</view>
</viewController>
<placeholder placeholderIdentifier="IBFirstResponder" id="dkx-z0-nzr" sceneMemberID="firstResponder"/>
</objects>
</scene>
</scenes>
</document>

+ 0
- 45
Prj-IOS/lp/Info.plist View File

@@ -1,45 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>CFBundleDevelopmentRegion</key>
<string>$(DEVELOPMENT_LANGUAGE)</string>
<key>CFBundleExecutable</key>
<string>$(EXECUTABLE_NAME)</string>
<key>CFBundleIdentifier</key>
<string>$(PRODUCT_BUNDLE_IDENTIFIER)</string>
<key>CFBundleInfoDictionaryVersion</key>
<string>6.0</string>
<key>CFBundleName</key>
<string>$(PRODUCT_NAME)</string>
<key>CFBundlePackageType</key>
<string>APPL</string>
<key>CFBundleShortVersionString</key>
<string>1.0</string>
<key>CFBundleVersion</key>
<string>1</string>
<key>LSRequiresIPhoneOS</key>
<true/>
<key>UILaunchStoryboardName</key>
<string>LaunchScreen</string>
<key>UIMainStoryboardFile</key>
<string>Main</string>
<key>UIRequiredDeviceCapabilities</key>
<array>
<string>armv7</string>
</array>
<key>UISupportedInterfaceOrientations</key>
<array>
<string>UIInterfaceOrientationPortrait</string>
<string>UIInterfaceOrientationLandscapeLeft</string>
<string>UIInterfaceOrientationLandscapeRight</string>
</array>
<key>UISupportedInterfaceOrientations~ipad</key>
<array>
<string>UIInterfaceOrientationPortrait</string>
<string>UIInterfaceOrientationPortraitUpsideDown</string>
<string>UIInterfaceOrientationLandscapeLeft</string>
<string>UIInterfaceOrientationLandscapeRight</string>
</array>
</dict>
</plist>

+ 0
- 32
Prj-IOS/lp/UIImageCVMatConverter.h View File

@@ -1,32 +0,0 @@
//
// UIImageCVMatConverter.h
// lp
//
// Created by xiaojun on 2017/12/2.
// Copyright © 2017年 xiaojun. All rights reserved.
//

#ifndef UIImageCVMatConverter_h
#define UIImageCVMatConverter_h

#ifdef __cplusplus
#import <opencv2/opencv.hpp>
#endif
#ifdef __OBJC__
#import <UIKit/UIKit.h>
#import <Foundation/Foundation.h>
#endif
using namespace cv;

@interface UIImageCVMatConverter : NSObject

+ (cv::Mat)cvMatFromUIImage:(UIImage *)image;
+ (UIImage *)UIImageFromCVMat:(cv::Mat)image;
+ (UIImage *)scaleAndRotateImageFrontCamera:(UIImage *)image;
+ (UIImage *)scaleAndRotateImageBackCamera:(UIImage *)image;
+(UIImage*) imageWithMat:(const cv::Mat&) image andImageOrientation: (UIImageOrientation) orientation;
+(UIImage*) imageWithMat:(const cv::Mat&) image andDeviceOrientation: (UIDeviceOrientation) orientation;

@end

#endif /* UIImageCVMatConverter_h */

+ 0
- 317
Prj-IOS/lp/UIImageCVMatConverter.mm View File

@@ -1,317 +0,0 @@
//
// UIImageCVMatConverter.m
// lp
//
// Created by xiaojun on 2017/12/2.
// Copyright © 2017年 xiaojun. All rights reserved.
//


#include "UIImageCVMatConverter.h"
@implementation UIImageCVMatConverter

+(UIImage *)UIImageFromCVMat:(cv::Mat)cvMat{
NSData *data = [NSData dataWithBytes:cvMat.data length:cvMat.elemSize()*cvMat.total()];
CGColorSpaceRef colorSpace;
if (cvMat.elemSize() == 1) {
colorSpace = CGColorSpaceCreateDeviceGray();
} else {
colorSpace = CGColorSpaceCreateDeviceRGB();
}
CGDataProviderRef provider = CGDataProviderCreateWithCFData((__bridge CFDataRef)data);
// Creating CGImage from cv::Mat
CGImageRef imageRef = CGImageCreate(cvMat.cols, //width
cvMat.rows, //height
8, //bits per component
8 * cvMat.elemSize(), //bits per pixel
cvMat.step[0], //bytesPerRow
colorSpace, //colorspace
kCGImageAlphaNone|kCGBitmapByteOrderDefault,// bitmap info
provider, //CGDataProviderRef
NULL, //decode
false, //should interpolate
kCGRenderingIntentDefault //intent
);
// Getting UIImage from CGImage
UIImage *finalImage = [UIImage imageWithCGImage:imageRef scale:1.0 orientation:UIImageOrientationUp];
CGImageRelease(imageRef);
CGDataProviderRelease(provider);
CGColorSpaceRelease(colorSpace);
return finalImage;
}
//缩放调整图片
+ (UIImage *)scaleAndRotateImageBackCamera:(UIImage *)image
{
static int kMaxResolution = 480;
CGImageRef imgRef = image.CGImage;
CGFloat width = CGImageGetWidth(imgRef);
CGFloat height = CGImageGetHeight(imgRef);
CGAffineTransform transform = CGAffineTransformIdentity;
CGRect bounds = CGRectMake(0, 0, width, height);
if (width > kMaxResolution || height > kMaxResolution) {
CGFloat ratio = width/height;
if (ratio > 1) {
bounds.size.width = kMaxResolution;
bounds.size.height = bounds.size.width / ratio;
} else {
bounds.size.height = kMaxResolution;
bounds.size.width = bounds.size.height * ratio;
}
}
CGFloat scaleRatio = bounds.size.width / width;
CGSize imageSize = CGSizeMake(CGImageGetWidth(imgRef), CGImageGetHeight(imgRef));
CGFloat boundHeight;
UIImageOrientation orient = image.imageOrientation;
switch(orient) {
case UIImageOrientationUp:
transform = CGAffineTransformIdentity;
break;
case UIImageOrientationUpMirrored:
transform = CGAffineTransformMakeTranslation(imageSize.width, 0.0);
transform = CGAffineTransformScale(transform, -1.0, 1.0);
break;
case UIImageOrientationDown:
transform = CGAffineTransformMakeTranslation(imageSize.width, imageSize.height);
transform = CGAffineTransformRotate(transform, M_PI);
break;
case UIImageOrientationDownMirrored:
transform = CGAffineTransformMakeTranslation(0.0, imageSize.height);
transform = CGAffineTransformScale(transform, 1.0, -1.0);
break;
case UIImageOrientationLeftMirrored:
boundHeight = bounds.size.height;
bounds.size.height = bounds.size.width;
bounds.size.width = boundHeight;
transform = CGAffineTransformMakeTranslation(imageSize.height, imageSize.width);
transform = CGAffineTransformScale(transform, -1.0, 1.0);
transform = CGAffineTransformRotate(transform, 3.0 * M_PI / 2.0);
break;
case UIImageOrientationLeft:
boundHeight = bounds.size.height;
bounds.size.height = bounds.size.width;
bounds.size.width = boundHeight;
transform = CGAffineTransformMakeTranslation(0.0, imageSize.width);
transform = CGAffineTransformRotate(transform, 3.0 * M_PI / 2.0);
break;
case UIImageOrientationRightMirrored:
boundHeight = bounds.size.height;
bounds.size.height = bounds.size.width;
bounds.size.width = boundHeight;
transform = CGAffineTransformMakeScale(-1.0, 1.0);
transform = CGAffineTransformRotate(transform, M_PI / 2.0);
break;
case UIImageOrientationRight:
boundHeight = bounds.size.height;
bounds.size.height = bounds.size.width;
bounds.size.width = boundHeight;
transform = CGAffineTransformMakeTranslation(imageSize.height, 0.0);
transform = CGAffineTransformRotate(transform, M_PI / 2.0);
break;
default:
[NSException raise:NSInternalInconsistencyException format:@"Invalid image orientation"];
}
UIGraphicsBeginImageContext(bounds.size);
CGContextRef context = UIGraphicsGetCurrentContext();
if (orient == UIImageOrientationRight || orient == UIImageOrientationLeft) {
CGContextScaleCTM(context, -scaleRatio, scaleRatio);
CGContextTranslateCTM(context, -height, 0);
} else {
CGContextScaleCTM(context, scaleRatio, -scaleRatio);
CGContextTranslateCTM(context, 0, -height);
}
CGContextConcatCTM(context, transform);
CGContextDrawImage(UIGraphicsGetCurrentContext(), CGRectMake(0, 0, width, height), imgRef);
UIImage *returnImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
NSLog(@"resize w%f,H%f",returnImage.size.width,returnImage.size.height);
return returnImage;
}

+(UIImage*) imageWithMat:(const cv::Mat&) image andDeviceOrientation: (UIDeviceOrientation) orientation
{
UIImageOrientation imgOrientation = UIImageOrientationUp;
switch (orientation)
{
case UIDeviceOrientationLandscapeLeft:
imgOrientation =UIImageOrientationLeftMirrored; break;
case UIDeviceOrientationLandscapeRight:
imgOrientation = UIImageOrientationDown; break;
case UIDeviceOrientationPortraitUpsideDown:
imgOrientation = UIImageOrientationRightMirrored; break;
case UIDeviceOrientationFaceUp:
imgOrientation = UIImageOrientationRightMirrored; break;
default:
case UIDeviceOrientationPortrait:
imgOrientation = UIImageOrientationRight; break;
};
return [UIImageCVMatConverter imageWithMat:image andImageOrientation:imgOrientation];
}

+(UIImage*) imageWithMat:(const cv::Mat&) image andImageOrientation: (UIImageOrientation) orientation;
{
cv::Mat rgbaView;
if (image.channels() == 3)
{
cv::cvtColor(image, rgbaView, COLOR_BGR2BGRA);
}
else if (image.channels() == 4)
{
cv::cvtColor(image, rgbaView, COLOR_BGR2BGRA);
}
else if (image.channels() == 1)
{
cv::cvtColor(image, rgbaView, COLOR_GRAY2RGBA);
}
NSData *data = [NSData dataWithBytes:rgbaView.data length:rgbaView.elemSize() * rgbaView.total()];
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGDataProviderRef provider = CGDataProviderCreateWithCFData((__bridge CFDataRef)data);
CGBitmapInfo bmInfo = kCGImageAlphaPremultipliedLast | kCGBitmapByteOrder32Big;
// Creating CGImage from cv::Mat
CGImageRef imageRef = CGImageCreate(rgbaView.cols, //width
rgbaView.rows, //height
8, //bits per component
8 * rgbaView.elemSize(), //bits per pixel
rgbaView.step.p[0], //bytesPerRow
colorSpace, //colorspace
bmInfo,// bitmap info
provider, //CGDataProviderRef
NULL, //decode
false, //should interpolate
kCGRenderingIntentDefault //intent
);
// Getting UIImage from CGImage
UIImage *finalImage = [UIImage imageWithCGImage:imageRef scale:1 orientation:orientation];
CGImageRelease(imageRef);
CGDataProviderRelease(provider);
CGColorSpaceRelease(colorSpace);
return finalImage;
}


+ (cv::Mat)cvMatFromUIImage:(UIImage *)image
{
CGColorSpaceRef colorSpace = CGImageGetColorSpace(image.CGImage);
CGFloat cols = image.size.width;
CGFloat rows = image.size.height;
cv::Mat cvMat(rows, cols, CV_8UC4); // 8 bits per component, 4 channels
CGContextRef contextRef = CGBitmapContextCreate(cvMat.data, // Pointer to data
cols, // Width of bitmap
rows, // Height of bitmap
8, // Bits per component
cvMat.step[0], // Bytes per row
colorSpace, // Colorspace
kCGImageAlphaNoneSkipLast |
kCGBitmapByteOrderDefault); // Bitmap info flags
CGContextDrawImage(contextRef, CGRectMake(0, 0, cols, rows), image.CGImage);
CGContextRelease(contextRef);
CGColorSpaceRelease(colorSpace);
cv::Mat cvMat3(rows, cols, CV_8UC3); // 8 bits per component, 4 channels
cv::cvtColor(cvMat, cvMat3,COLOR_RGBA2RGB);
return cvMat3;
}
+ (UIImage *)scaleAndRotateImageFrontCamera:(UIImage *)image
{
static int kMaxResolution = 640;
CGImageRef imgRef = image.CGImage;
CGFloat width = CGImageGetWidth(imgRef);
CGFloat height = CGImageGetHeight(imgRef);
CGAffineTransform transform = CGAffineTransformIdentity;
CGRect bounds = CGRectMake( 0, 0, width, height);
if (width > kMaxResolution || height > kMaxResolution) {
CGFloat ratio = width/height;
if (ratio > 1) {
bounds.size.width = kMaxResolution;
bounds.size.height = bounds.size.width / ratio;
} else {
bounds.size.height = kMaxResolution;
bounds.size.width = bounds.size.height * ratio;
}
}
CGFloat scaleRatio = bounds.size.width / width;
CGSize imageSize = CGSizeMake(CGImageGetWidth(imgRef), CGImageGetHeight(imgRef));
CGFloat boundHeight;
UIImageOrientation orient = image.imageOrientation;
switch(orient) {
case UIImageOrientationUp:
transform = CGAffineTransformIdentity;
break;
case UIImageOrientationUpMirrored:
transform = CGAffineTransformMakeTranslation(imageSize.width, 0.0);
transform = CGAffineTransformScale(transform, -1.0, 1.0);
break;
case UIImageOrientationDown:
transform = CGAffineTransformMakeTranslation(imageSize.width, imageSize.height);
transform = CGAffineTransformRotate(transform, M_PI);
break;
case UIImageOrientationDownMirrored:
transform = CGAffineTransformMakeTranslation(0.0, imageSize.height);
transform = CGAffineTransformScale(transform, 1.0, -1.0);
break;
case UIImageOrientationLeftMirrored:
boundHeight = bounds.size.height;
bounds.size.height = bounds.size.width;
bounds.size.width = boundHeight;
transform = CGAffineTransformMakeTranslation(imageSize.height, imageSize.width);
transform = CGAffineTransformScale(transform, -1.0, 1.0);
transform = CGAffineTransformRotate(transform, 3.0 * M_PI / 2.0);
break;
case UIImageOrientationLeft:
boundHeight = bounds.size.height;
bounds.size.height = bounds.size.width;
bounds.size.width = boundHeight;
transform = CGAffineTransformMakeTranslation(0.0, imageSize.width);
transform = CGAffineTransformRotate(transform, 3.0 * M_PI / 2.0);
break;
case UIImageOrientationRight:
case UIImageOrientationRightMirrored:
boundHeight = bounds.size.height;
bounds.size.height = bounds.size.width;
bounds.size.width = boundHeight;
transform = CGAffineTransformMakeScale(-1.0, 1.0);
transform = CGAffineTransformRotate(transform, M_PI / 2.0);
break;
default:
[NSException raise:NSInternalInconsistencyException format:@"Invalid image orientation"];
}
UIGraphicsBeginImageContext( bounds.size );
CGContextRef context = UIGraphicsGetCurrentContext();
if ( orient == UIImageOrientationRight || orient == UIImageOrientationLeft ) {
CGContextScaleCTM(context, -scaleRatio, scaleRatio);
CGContextTranslateCTM(context, -height, 0);
}
else {
CGContextScaleCTM(context, scaleRatio, -scaleRatio);
CGContextTranslateCTM(context, 0, -height);
}
CGContextConcatCTM( context, transform );
CGContextDrawImage( UIGraphicsGetCurrentContext(), CGRectMake(0, 0, width, height), imgRef );
UIImage *returnImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
return returnImage;
}@end

+ 0
- 40
Prj-IOS/lp/ViewController.h View File

@@ -1,40 +0,0 @@
//
// ViewController.h
// lp
//
// Created by xiaojun on 2017/12/2.
// Copyright © 2017年 xiaojun. All rights reserved.
//

//#import <UIKit/UIKit.h>

#ifdef __cplusplus
#import <opencv2/opencv.hpp>

//#import <opencv2/imgproc/types_c.h>
#import <opencv2/imgcodecs/ios.h>
#endif

#ifdef __OBJC__
#import <UIKit/UIKit.h>
#import <Foundation/Foundation.h>
#import <AVFoundation/AVFoundation.h>
#endif

@interface ViewController : UIViewController<UIImagePickerControllerDelegate,
UINavigationControllerDelegate,UIToolbarDelegate> {
cv::Mat source_image;
}

@property (nonatomic, retain) UILabel *textLabel;
@property (nonatomic, retain) UIImageView *imageView;
@property (nonatomic, retain) UIImageView *textView;
@property (nonatomic, retain) UIToolbar *toolbar;

-(void)loadButtonPressed:(id)sender;
-(void)simpleRecognition:(cv::Mat&)src;
-(NSString *)getPath:(NSString *)fileName;

@end


+ 0
- 216
Prj-IOS/lp/ViewController.mm View File

@@ -1,216 +0,0 @@
//
// ViewController.m
// lp
//
// Created by xiaojun on 2017/12/2.
// Copyright © 2017年 xiaojun. All rights reserved.
//



#import "ViewController.h"
#import "UIImageCVMatConverter.h"

#import "Pipeline.h"

using namespace pr;


@interface ViewController ()

@end

@implementation ViewController

@synthesize imageView;
@synthesize textView;
@synthesize toolbar;
@synthesize textLabel;

-(NSString *)getPath:(NSString*)fileName{
NSString *bundlePath = [[NSBundle mainBundle].resourcePath stringByAppendingPathComponent:@"model.bundle"];
NSString *path = [bundlePath stringByAppendingPathComponent:fileName];
return path;
}

-(void)simpleRecognition:(cv::Mat&)src{
NSString *path_1 = [self getPath:@"cascade.xml"];
NSString *path_7 = [self getPath:@"CharacterRecognization.caffemodel"];
NSString *path_6 = [self getPath:@"CharacterRecognization.prototxt"];
NSString *path_3 = [self getPath:@"HorizonalFinemapping.caffemodel"];
NSString *path_2 = [self getPath:@"HorizonalFinemapping.prototxt"];
NSString *path_5 = [self getPath:@"Segmentation.caffemodel"];
NSString *path_4 = [self getPath:@"Segmentation.prototxt"];
std::string *cpath_1 = new std::string([path_1 UTF8String]);
std::string *cpath_2 = new std::string([path_2 UTF8String]);
std::string *cpath_3 = new std::string([path_3 UTF8String]);
std::string *cpath_4 = new std::string([path_4 UTF8String]);
std::string *cpath_5 = new std::string([path_5 UTF8String]);
std::string *cpath_6 = new std::string([path_6 UTF8String]);
std::string *cpath_7 = new std::string([path_7 UTF8String]);
PipelinePR pr2 = PipelinePR::PipelinePR(*cpath_1,*cpath_2,*cpath_3,*cpath_4,*cpath_5,*cpath_6,*cpath_7);
std::vector<pr::PlateInfo> list_res = pr2.RunPiplineAsImage(src);
std::string concat_results="";
for(auto one:list_res){
if(one.confidence>0.7){
concat_results += one.getPlateName()+",";
//std::cout<<"1-----"+one.getPlateName()+"----1";
}
}
//std::cout<<"2--"+concat_results+"--2";
NSString *str=[NSString stringWithCString:concat_results.c_str() encoding:NSUTF8StringEncoding];
if(str.length > 0){
str = [str substringToIndex:str.length-1];
str = [NSString stringWithFormat:@"result:%@",str];
}else{
str = [NSString stringWithFormat:@"result:null"];
}
[self.textLabel setText:str];
}


- (void)imagePickerController:(UIImagePickerController*)picker didFinishPickingMediaWithInfo:(NSDictionary *)info
{
[picker dismissViewControllerAnimated:YES completion:nil];
UIImage* temp = [info objectForKey:@"UIImagePickerControllerOriginalImage"];
UIImage *temp_image=[UIImageCVMatConverter scaleAndRotateImageBackCamera:temp];
source_image=[UIImageCVMatConverter cvMatFromUIImage:temp_image];
[self simpleRecognition:source_image];
imageView.image = temp;
//cv::Mat cvImage,cv2;
//UIImageToMat(temp, cvImage);
// if(!cvImage.empty()){
// cv::Mat gray;
// // 将图像转换为灰度显示
// cv::cvtColor(cvImage,gray,CV_RGB2GRAY);
// // 应用高斯滤波器去除小的边缘
// cv::GaussianBlur(gray, gray, cv::Size(5,5), 1.2,1.2);
// // 计算与画布边缘
// cv::Mat edges;
// cv::Canny(gray, edges, 0, 50);
// // 使用白色填充
// cvImage.setTo(cv::Scalar::all(225));
// // 修改边缘颜色
// cvImage.setTo(cv::Scalar(0,128,255,255),edges);
// // 将Mat转换为Xcode的UIImageView显示
// self.imageView.image = MatToUIImage(cvImage);
// }
}

-(void)imagePickerControllerDidCancel:(UIImagePickerController *)picker
{
[picker dismissViewControllerAnimated:YES completion:nil];
}


- (void)loadButtonPressed:(id)sender
{
UIImagePickerController* picker = [[UIImagePickerController alloc] init];
picker.delegate = self;
if (![UIImagePickerController isSourceTypeAvailable:
UIImagePickerControllerSourceTypePhotoLibrary])
return;
picker.sourceType = UIImagePickerControllerSourceTypePhotoLibrary;
[self presentViewController:picker animated:YES completion:nil];
}

- (void)viewDidLoad {
[super viewDidLoad];
// Do any additional setup after loading the view, typically from a nib.
CGRect bounds = [UIScreen mainScreen].bounds;
imageView = [[UIImageView alloc] init];
imageView.frame = CGRectMake(0, 160, bounds.size.width, bounds.size.height-210);
imageView.contentMode=UIViewContentModeScaleAspectFit;
imageView.backgroundColor = [UIColor clearColor];
[self.view addSubview:imageView];
/* Add the fps Label */
UILabel *fps = [[UILabel alloc] initWithFrame:CGRectMake(20, 120, 180, 20)];
fps.font=[UIFont fontWithName:@"华文细黑" size:14.0f];
fps.backgroundColor=[UIColor clearColor];
fps.textColor=[UIColor redColor];
fps.textAlignment=NSTextAlignmentLeft;
// fps.transform = CGAffineTransformMakeRotation(90);
fps.text=@"result";
self.textLabel = fps;
[self.view addSubview:self.textLabel];
[self.view bringSubviewToFront:self.textLabel];
toolbar=[[UIToolbar alloc] initWithFrame:CGRectMake(0, bounds.size.height- 44, bounds.size.width, 44)];
[toolbar setBackgroundColor:[UIColor clearColor]];
// toolbar.barStyle=UIBarStyleDefault;
toolbar.tintColor=[UIColor blackColor];
toolbar.translucent=YES;
// [toolbar setTranslucent:YES];
[self.toolbar setBackgroundImage:[UIImage new]
forToolbarPosition:UIBarPositionAny
barMetrics:UIBarMetricsDefault];
toolbar.delegate=self;
UIBarButtonItem*flexitem=[[UIBarButtonItem alloc] initWithBarButtonSystemItem:UIBarButtonSystemItemFlexibleSpace target:nil action:nil];
UIBarButtonItem*albumitem=[[UIBarButtonItem alloc]
initWithTitle:@"相册"
style:UIBarButtonItemStylePlain
target:self
action:@selector(loadButtonPressed:)];
[toolbar setItems:[NSArray arrayWithObjects:albumitem,flexitem,nil]];
[self.view addSubview:toolbar];
// Do any additional setup after loading the view, typically from a nib
toolbar.autoresizingMask = UIViewAutoresizingNone;
}


- (void)didReceiveMemoryWarning {
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}


@end

+ 0
- 16
Prj-IOS/lp/main.m View File

@@ -1,16 +0,0 @@
//
// main.m
// lp
//
// Created by xiaojun on 2017/12/2.
// Copyright © 2017年 xiaojun. All rights reserved.
//

#import <UIKit/UIKit.h>
#import "AppDelegate.h"

int main(int argc, char * argv[]) {
@autoreleasepool {
return UIApplicationMain(argc, argv, nil, NSStringFromClass([AppDelegate class]));
}
}

+ 0
- 12117
Prj-IOS/model.bundle/cascade.xml
File diff suppressed because it is too large
View File


+ 0
- 19
Prj-IOS/src/CNNRecognizer.cpp View File

@@ -1,19 +0,0 @@
//
// Created by 庾金科 on 21/10/2017.
//

#include "../include/CNNRecognizer.h"

namespace pr{
CNNRecognizer::CNNRecognizer(std::string prototxt,std::string caffemodel){
net = cv::dnn::readNetFromCaffe(prototxt, caffemodel);
}

label CNNRecognizer::recognizeCharacter(cv::Mat charImage){
if(charImage.channels()== 3)
cv::cvtColor(charImage,charImage,cv::COLOR_BGR2GRAY);
cv::Mat inputBlob = cv::dnn::blobFromImage(charImage, 1/255.0, cv::Size(CHAR_INPUT_W,CHAR_INPUT_H), cv::Scalar(0,0,0),false);
net.setInput(inputBlob,"data");
return net.forward();
}
}

+ 0
- 133
Prj-IOS/src/FastDeskew.cpp View File

@@ -1,133 +0,0 @@
//
// Created by 庾金科 on 02/10/2017.
//



#include <../include/FastDeskew.h>

namespace pr{


const int ANGLE_MIN = 30 ;
const int ANGLE_MAX = 150 ;
const int PLATE_H = 36;
const int PLATE_W = 136;

int angle(float x,float y)
{
return atan2(x,y)*180/3.1415;
}

std::vector<float> avgfilter(std::vector<float> angle_list,int windowsSize) {
std::vector<float> angle_list_filtered(angle_list.size() - windowsSize + 1);
for (int i = 0; i < angle_list.size() - windowsSize + 1; i++) {
float avg = 0.00f;
for (int j = 0; j < windowsSize; j++) {
avg += angle_list[i + j];
}
avg = avg / windowsSize;
angle_list_filtered[i] = avg;
}

return angle_list_filtered;
}


void drawHist(std::vector<float> seq){
cv::Mat image(300,seq.size(),CV_8U);
image.setTo(0);

for(int i = 0;i<seq.size();i++)
{
float l = *std::max_element(seq.begin(),seq.end());

int p = int(float(seq[i])/l*300);

cv::line(image,cv::Point(i,300),cv::Point(i,300-p),cv::Scalar(255,255,255));
}
//cv::imshow("vis",image);
}

cv::Mat correctPlateImage(cv::Mat skewPlate,float angle,float maxAngle)
{

cv::Mat dst;

cv::Size size_o(skewPlate.cols,skewPlate.rows);


int extend_padding = 0;
// if(angle<0)
extend_padding = static_cast<int>(skewPlate.rows*tan(cv::abs(angle)/180* 3.14) );
// else
// extend_padding = static_cast<int>(skewPlate.rows/tan(cv::abs(angle)/180* 3.14) );

// std::cout<<"extend:"<<extend_padding<<std::endl;

cv::Size size(skewPlate.cols + extend_padding ,skewPlate.rows);

float interval = abs(sin((angle /180) * 3.14)* skewPlate.rows);
// std::cout<<interval<<std::endl;

cv::Point2f pts1[4] = {cv::Point2f(0,0),cv::Point2f(0,size_o.height),cv::Point2f(size_o.width,0),cv::Point2f(size_o.width,size_o.height)};
if(angle>0) {
cv::Point2f pts2[4] = {cv::Point2f(interval, 0), cv::Point2f(0, size_o.height),
cv::Point2f(size_o.width, 0), cv::Point2f(size_o.width - interval, size_o.height)};
cv::Mat M = cv::getPerspectiveTransform(pts1,pts2);
cv::warpPerspective(skewPlate,dst,M,size);


}
else {
cv::Point2f pts2[4] = {cv::Point2f(0, 0), cv::Point2f(interval, size_o.height), cv::Point2f(size_o.width-interval, 0),
cv::Point2f(size_o.width, size_o.height)};
cv::Mat M = cv::getPerspectiveTransform(pts1,pts2);
cv::warpPerspective(skewPlate,dst,M,size,cv::INTER_CUBIC);

}
return dst;
}
cv::Mat fastdeskew(cv::Mat skewImage,int blockSize){


const int FILTER_WINDOWS_SIZE = 5;
std::vector<float> angle_list(180);
memset(angle_list.data(),0,angle_list.size()*sizeof(int));

cv::Mat bak;
skewImage.copyTo(bak);
if(skewImage.channels() == 3)
cv::cvtColor(skewImage,skewImage,cv::COLOR_RGB2GRAY);

if(skewImage.channels() == 1)
{
cv::Mat eigen;

cv::cornerEigenValsAndVecs(skewImage,eigen,blockSize,5);
for( int j = 0; j < skewImage.rows; j+=blockSize )
{ for( int i = 0; i < skewImage.cols; i+=blockSize )
{
float x2 = eigen.at<cv::Vec6f>(j, i)[4];
float y2 = eigen.at<cv::Vec6f>(j, i)[5];
int angle_cell = angle(x2,y2);
angle_list[(angle_cell + 180)%180]+=1.0;

}
}
}
std::vector<float> filtered = avgfilter(angle_list,5);

int maxPos = std::max_element(filtered.begin(),filtered.end()) - filtered.begin() + FILTER_WINDOWS_SIZE/2;
if(maxPos>ANGLE_MAX)
maxPos = (-maxPos+90+180)%180;
if(maxPos<ANGLE_MIN)
maxPos-=90;
maxPos=90-maxPos;
cv::Mat deskewed = correctPlateImage(bak, static_cast<float>(maxPos),60.0f);
return deskewed;
}



}//namespace pr

+ 0
- 205
Prj-IOS/src/FineMapping.cpp View File

@@ -1,205 +0,0 @@
//
// Created by 庾金科 on 22/09/2017.
//

#include "FineMapping.h"
namespace pr{

const int FINEMAPPING_H = 50;
const int FINEMAPPING_W = 120;
const int PADDING_UP_DOWN = 30;
void drawRect(cv::Mat image,cv::Rect rect)
{
cv::Point p1(rect.x,rect.y);
cv::Point p2(rect.x+rect.width,rect.y+rect.height);
cv::rectangle(image,p1,p2,cv::Scalar(0,255,0),1);
}


FineMapping::FineMapping(std::string prototxt,std::string caffemodel) {
net = cv::dnn::readNetFromCaffe(prototxt, caffemodel);

}

cv::Mat FineMapping::FineMappingHorizon(cv::Mat FinedVertical,int leftPadding,int rightPadding)
{

// if(FinedVertical.channels()==1)
// cv::cvtColor(FinedVertical,FinedVertical,cv::COLOR_GRAY2BGR);
cv::Mat inputBlob = cv::dnn::blobFromImage(FinedVertical, 1/255.0, cv::Size(66,16),
cv::Scalar(0,0,0),false);

net.setInput(inputBlob,"data");
cv::Mat prob = net.forward();
int front = static_cast<int>(prob.at<float>(0,0)*FinedVertical.cols);
int back = static_cast<int>(prob.at<float>(0,1)*FinedVertical.cols);
front -= leftPadding ;
if(front<0) front = 0;
back +=rightPadding;
if(back>FinedVertical.cols-1) back=FinedVertical.cols - 1;
cv::Mat cropped = FinedVertical.colRange(front,back).clone();
return cropped;


}
std::pair<int,int> FitLineRansac(std::vector<cv::Point> pts,int zeroadd = 0 )
{
std::pair<int,int> res;
if(pts.size()>2)
{
cv::Vec4f line;
cv::fitLine(pts,line,cv::DIST_HUBER,0,0.01,0.01);
float vx = line[0];
float vy = line[1];
float x = line[2];
float y = line[3];
int lefty = static_cast<int>((-x * vy / vx) + y);
int righty = static_cast<int>(((136- x) * vy / vx) + y);
res.first = lefty+PADDING_UP_DOWN+zeroadd;
res.second = righty+PADDING_UP_DOWN+zeroadd;
return res;
}
res.first = zeroadd;
res.second = zeroadd;
return res;
}

cv::Mat FineMapping::FineMappingVertical(cv::Mat InputProposal,int sliceNum,int upper,int lower,int windows_size){


cv::Mat PreInputProposal;
cv::Mat proposal;

cv::resize(InputProposal,PreInputProposal,cv::Size(FINEMAPPING_W,FINEMAPPING_H));
if(InputProposal.channels() == 3)
cv::cvtColor(PreInputProposal,proposal,cv::COLOR_BGR2GRAY);
else
PreInputProposal.copyTo(proposal);

// proposal = PreInputProposal;

// this will improve some sen
cv::Mat kernal = cv::getStructuringElement(cv::MORPH_ELLIPSE,cv::Size(1,3));
// cv::erode(proposal,proposal,kernal);


float diff = static_cast<float>(upper-lower);
diff/=static_cast<float>(sliceNum-1);
cv::Mat binary_adaptive;
std::vector<cv::Point> line_upper;
std::vector<cv::Point> line_lower;
int contours_nums=0;

for(int i = 0 ; i < sliceNum ; i++)
{
std::vector<std::vector<cv::Point> > contours;
float k =lower + i*diff;
cv::adaptiveThreshold(proposal,binary_adaptive,255,cv::ADAPTIVE_THRESH_MEAN_C,cv::THRESH_BINARY,windows_size,k);
cv::Mat draw;
binary_adaptive.copyTo(draw);
cv::findContours(binary_adaptive,contours,cv::RETR_EXTERNAL,cv::CHAIN_APPROX_SIMPLE);
for(auto contour: contours)
{
cv::Rect bdbox =cv::boundingRect(contour);
float lwRatio = bdbox.height/static_cast<float>(bdbox.width);
int bdboxAera = bdbox.width*bdbox.height;
if (( lwRatio>0.7&&bdbox.width*bdbox.height>100 && bdboxAera<300)
|| (lwRatio>3.0 && bdboxAera<100 && bdboxAera>10))
{

cv::Point p1(bdbox.x, bdbox.y);
cv::Point p2(bdbox.x + bdbox.width, bdbox.y + bdbox.height);
line_upper.push_back(p1);
line_lower.push_back(p2);
contours_nums+=1;
}
}
}

std:: cout<<"contours_nums "<<contours_nums<<std::endl;

if(contours_nums<41)
{
cv::bitwise_not(InputProposal,InputProposal);
cv::Mat kernal = cv::getStructuringElement(cv::MORPH_ELLIPSE,cv::Size(1,5));
cv::Mat bak;
cv::resize(InputProposal,bak,cv::Size(FINEMAPPING_W,FINEMAPPING_H));
cv::erode(bak,bak,kernal);
if(InputProposal.channels() == 3)
cv::cvtColor(bak,proposal,cv::COLOR_BGR2GRAY);
else
proposal = bak;
int contours_nums=0;

for(int i = 0 ; i < sliceNum ; i++)
{
std::vector<std::vector<cv::Point> > contours;
float k =lower + i*diff;
cv::adaptiveThreshold(proposal,binary_adaptive,255,cv::ADAPTIVE_THRESH_MEAN_C,cv::THRESH_BINARY,windows_size,k);
// cv::imshow("image",binary_adaptive);
// cv::waitKey(0);
cv::Mat draw;
binary_adaptive.copyTo(draw);
cv::findContours(binary_adaptive,contours,cv::RETR_EXTERNAL,cv::CHAIN_APPROX_SIMPLE);
for(auto contour: contours)
{
cv::Rect bdbox =cv::boundingRect(contour);
float lwRatio = bdbox.height/static_cast<float>(bdbox.width);
int bdboxAera = bdbox.width*bdbox.height;
if (( lwRatio>0.7&&bdbox.width*bdbox.height>120 && bdboxAera<300)
|| (lwRatio>3.0 && bdboxAera<100 && bdboxAera>10))
{

cv::Point p1(bdbox.x, bdbox.y);
cv::Point p2(bdbox.x + bdbox.width, bdbox.y + bdbox.height);
line_upper.push_back(p1);
line_lower.push_back(p2);
contours_nums+=1;
}
}
}
// std:: cout<<"contours_nums "<<contours_nums<<std::endl;
}

cv::Mat rgb;
cv::copyMakeBorder(PreInputProposal, rgb, 30, 30, 0, 0, cv::BORDER_REPLICATE);
// cv::imshow("rgb",rgb);
// cv::waitKey(0);
//


std::pair<int, int> A;
std::pair<int, int> B;
A = FitLineRansac(line_upper, -2);
B = FitLineRansac(line_lower, 2);
int leftyB = A.first;
int rightyB = A.second;
int leftyA = B.first;
int rightyA = B.second;
int cols = rgb.cols;
int rows = rgb.rows;
// pts_map1 = np.float32([[cols - 1, rightyA], [0, leftyA],[cols - 1, rightyB], [0, leftyB]])
// pts_map2 = np.float32([[136,36],[0,36],[136,0],[0,0]])
// mat = cv2.getPerspectiveTransform(pts_map1,pts_map2)
// image = cv2.warpPerspective(rgb,mat,(136,36),flags=cv2.INTER_CUBIC)
std::vector<cv::Point2f> corners(4);
corners[0] = cv::Point2f(cols - 1, rightyA);
corners[1] = cv::Point2f(0, leftyA);
corners[2] = cv::Point2f(cols - 1, rightyB);
corners[3] = cv::Point2f(0, leftyB);
std::vector<cv::Point2f> corners_trans(4);
corners_trans[0] = cv::Point2f(136, 36);
corners_trans[1] = cv::Point2f(0, 36);
corners_trans[2] = cv::Point2f(136, 0);
corners_trans[3] = cv::Point2f(0, 0);
cv::Mat transform = cv::getPerspectiveTransform(corners, corners_trans);
cv::Mat quad = cv::Mat::zeros(36, 136, CV_8UC3);
cv::warpPerspective(rgb, quad, transform, quad.size());
return quad;

}


}



+ 0
- 69
Prj-IOS/src/Pipeline.cpp View File

@@ -1,69 +0,0 @@
//
// Created by 庾金科 on 23/10/2017.
//

#include "../include/Pipeline.h"


namespace pr {

std::vector<std::string> chars_code{"京","沪","津","渝","冀","晋","蒙","辽","吉","黑","苏","浙","皖","闽","赣","鲁","豫","鄂","湘","粤","桂","琼","川","贵","云","藏","陕","甘","青","宁","新","0","1","2","3","4","5","6","7","8","9","A","B","C","D","E","F","G","H","J","K","L","M","N","P","Q","R","S","T","U","V","W","X","Y","Z"};



PipelinePR::PipelinePR(std::string detector_filename,
std::string finemapping_prototxt, std::string finemapping_caffemodel,
std::string segmentation_prototxt, std::string segmentation_caffemodel,
std::string charRecognization_proto, std::string charRecognization_caffemodel) {
plateDetection = new PlateDetection(detector_filename);
fineMapping = new FineMapping(finemapping_prototxt, finemapping_caffemodel);
plateSegmentation = new PlateSegmentation(segmentation_prototxt, segmentation_caffemodel);
generalRecognizer = new CNNRecognizer(charRecognization_proto, charRecognization_caffemodel);
}

PipelinePR::~PipelinePR() {

delete plateDetection;
delete fineMapping;
delete plateSegmentation;
delete generalRecognizer;

}

std::vector<PlateInfo> PipelinePR:: RunPiplineAsImage(cv::Mat plateImage) {
std::vector<PlateInfo> results;
std::vector<pr::PlateInfo> plates;
plateDetection->plateDetectionRough(plateImage,plates);

for (pr::PlateInfo plateinfo:plates) {

cv::Mat image_finemapping = plateinfo.getPlateImage();
image_finemapping = fineMapping->FineMappingVertical(image_finemapping);
image_finemapping = pr::fastdeskew(image_finemapping, 5);
image_finemapping = fineMapping->FineMappingHorizon(image_finemapping, 2, 5);
cv::resize(image_finemapping, image_finemapping, cv::Size(136, 36));
plateinfo.setPlateImage(image_finemapping);
std::vector<cv::Rect> rects;
plateSegmentation->segmentPlatePipline(plateinfo, 1, rects);
plateSegmentation->ExtractRegions(plateinfo, rects);
cv::copyMakeBorder(image_finemapping, image_finemapping, 0, 0, 0, 20, cv::BORDER_REPLICATE);

plateinfo.setPlateImage(image_finemapping);
generalRecognizer->SegmentBasedSequenceRecognition(plateinfo);
plateinfo.decodePlateNormal(chars_code);
results.push_back(plateinfo);
std::cout << plateinfo.getPlateName() << std::endl;


}

// for (auto str:results) {
// std::cout << str << std::endl;
// }
return results;

}//namespace pr



}

+ 0
- 61
Prj-IOS/src/PlateDetection.cpp View File

@@ -1,61 +0,0 @@
//
// Created by 庾金科 on 20/09/2017.
//
#include "../include/PlateDetection.h"

#include "util.h"

namespace pr{


PlateDetection::PlateDetection(std::string filename_cascade){
cascade.load(filename_cascade);

};


void PlateDetection::plateDetectionRough(cv::Mat InputImage,std::vector<pr::PlateInfo> &plateInfos,int min_w,int max_w){

cv::Mat processImage;

cv::cvtColor(InputImage,processImage,cv::COLOR_BGR2GRAY);


std::vector<cv::Rect> platesRegions;
// std::vector<PlateInfo> plates;
cv::Size minSize(min_w,min_w/4);
cv::Size maxSize(max_w,max_w/4);
// cv::imshow("input",InputImage);
// cv::waitKey(0);
cascade.detectMultiScale( processImage, platesRegions,
1.1, 3, cv::CASCADE_SCALE_IMAGE,minSize,maxSize);
for(auto plate:platesRegions)
{
// extend rects
// x -= w * 0.14
// w += w * 0.28
// y -= h * 0.6
// h += h * 1.1;
int zeroadd_w = static_cast<int>(plate.width*0.28);
int zeroadd_h = static_cast<int>(plate.height*1.2);
int zeroadd_x = static_cast<int>(plate.width*0.14);
int zeroadd_y = static_cast<int>(plate.height*0.6);
plate.x-=zeroadd_x;
plate.y-=zeroadd_y;
plate.height += zeroadd_h;
plate.width += zeroadd_w;
cv::Mat plateImage = util::cropFromImage(InputImage,plate);
PlateInfo plateInfo(plateImage,plate);
plateInfos.push_back(plateInfo);

}
}
// std::vector<pr::PlateInfo> PlateDetection::plateDetectionRough(cv::Mat InputImage,cv::Rect roi,int min_w,int max_w){
// cv::Mat roi_region = util::cropFromImage(InputImage,roi);
// return plateDetectionRough(roi_region,min_w,max_w);
// }




}//namespace pr

+ 0
- 402
Prj-IOS/src/PlateSegmentation.cpp View File

@@ -1,402 +0,0 @@
//
// Created by 庾金科 on 16/10/2017.
//

#include "../include/PlateSegmentation.h"
#include "../include/niBlackThreshold.h"


//#define DEBUG
namespace pr{

PlateSegmentation::PlateSegmentation(std::string prototxt,std::string caffemodel) {
net = cv::dnn::readNetFromCaffe(prototxt, caffemodel);
}
cv::Mat PlateSegmentation::classifyResponse(const cv::Mat &cropped){
cv::Mat inputBlob = cv::dnn::blobFromImage(cropped, 1/255.0, cv::Size(22,22), cv::Scalar(0,0,0),false);
net.setInput(inputBlob,"data");
return net.forward();
}

void drawHist(float* seq,int size,const char* name){
cv::Mat image(300,size,CV_8U);
image.setTo(0);
float* start =seq;
float* end = seq+size;
float l = *std::max_element(start,end);
for(int i = 0;i<size;i++)
{
int p = int(float(seq[i])/l*300);
cv::line(image,cv::Point(i,300),cv::Point(i,300-p),cv::Scalar(255,255,255));
}
cv::resize(image,image,cv::Size(600,100));
//cv::imshow(name,image);
}

inline void computeSafeMargin(int &val,const int &rows){
val = std::min(val,rows);
val = std::max(val,0);
}

cv::Rect boxFromCenter(const cv::Point center,int left,int right,int top,int bottom,cv::Size bdSize)
{
cv::Point p1(center.x - left ,center.y - top);
cv::Point p2( center.x + right, center.y + bottom);
p1.x = std::max(0,p1.x);
p1.y = std::max(0,p1.y);
p2.x = std::min(p2.x,bdSize.width-1);
p2.y = std::min(p2.y,bdSize.height-1);
cv::Rect rect(p1,p2);
return rect;
}

cv::Rect boxPadding(cv::Rect rect,int left,int right,int top,int bottom,cv::Size bdSize)
{

cv::Point center(rect.x+(rect.width>>1),rect.y + (rect.height>>1));
int rebuildLeft = (rect.width>>1 )+ left;
int rebuildRight = (rect.width>>1 )+ right;
int rebuildTop = (rect.height>>1 )+ top;
int rebuildBottom = (rect.height>>1 )+ bottom;
return boxFromCenter(center,rebuildLeft,rebuildRight,rebuildTop,rebuildBottom,bdSize);

}



void PlateSegmentation:: refineRegion(cv::Mat &plateImage,const std::vector<int> &candidatePts,const int padding,std::vector<cv::Rect> &rects){
int w = candidatePts[5] - candidatePts[4];
int cols = plateImage.cols;
int rows = plateImage.rows;
for(int i = 0 ; i < candidatePts.size() ; i++)
{
int left = 0;
int right = 0 ;

if(i == 0 ){
left= candidatePts[i];
right = left+w+padding;
}
else {
left = candidatePts[i] - padding;
right = left + w + padding * 2;
}

computeSafeMargin(right,cols);
computeSafeMargin(left,cols);
cv::Rect roi(left,0,right - left,rows-1);
cv::Mat roiImage;
plateImage(roi).copyTo(roiImage);

if (i>=1)
{

cv::Mat roi_thres;
// cv::threshold(roiImage,roi_thres,0,255,cv::THRESH_OTSU|cv::THRESH_BINARY);

niBlackThreshold(roiImage,roi_thres,255,cv::THRESH_BINARY,15,0.3,BINARIZATION_NIBLACK);

std::vector<std::vector<cv::Point>> contours;
cv::findContours(roi_thres,contours,cv::RETR_LIST,cv::CHAIN_APPROX_SIMPLE);
cv::Point boxCenter(roiImage.cols>>1,roiImage.rows>>1);

cv::Rect final_bdbox;
cv::Point final_center;
int final_dist = INT_MAX;


for(auto contour:contours)
{
cv::Rect bdbox = cv::boundingRect(contour);
cv::Point center(bdbox.x+(bdbox.width>>1),bdbox.y + (bdbox.height>>1));
int dist = (center.x - boxCenter.x)*(center.x - boxCenter.x);
if(dist<final_dist and bdbox.height > rows>>1)
{ final_dist =dist;
final_center = center;
final_bdbox = bdbox;
}
}

//rebuild box
if(final_bdbox.height/ static_cast<float>(final_bdbox.width) > 3.5 && final_bdbox.width*final_bdbox.height<10)
final_bdbox = boxFromCenter(final_center,8,8,(rows>>1)-3 , (rows>>1) - 2,roiImage.size());
else {
if(i == candidatePts.size()-1)
final_bdbox = boxPadding(final_bdbox, padding/2, padding, padding/2, padding/2, roiImage.size());
else
final_bdbox = boxPadding(final_bdbox, padding, padding, padding, padding, roiImage.size());


// std::cout<<final_bdbox<<std::endl;
// std::cout<<roiImage.size()<<std::endl;
#ifdef DEBUG
//cv::imshow("char_thres",roi_thres);

//cv::imshow("char",roiImage(final_bdbox));
//cv::waitKey(0);
#endif


}


final_bdbox.x += left;

rects.push_back(final_bdbox);
//

}
else
{
rects.push_back(roi);
}

// else
// {
//
// }

// cv::GaussianBlur(roiImage,roiImage,cv::Size(7,7),3);
//
// cv::imshow("image",roiImage);
// cv::waitKey(0);


}



}
void avgfilter(float *angle_list,int size,int windowsSize) {
float *filterd = new float[size];
for(int i = 0 ; i < size ; i++) filterd [i] = angle_list[i];
// memcpy(filterd,angle_list,size);

cv::Mat kernal_gaussian = cv::getGaussianKernel(windowsSize,3,CV_32F);
float *kernal = (float*)kernal_gaussian.data;
// kernal+=windowsSize;
int r = windowsSize/2;




for (int i = 0; i < size; i++) {
float avg = 0.00f;
for (int j = 0; j < windowsSize; j++) {
if(i+j-r>0&&i+j+r<size-1)
avg += filterd[i + j-r]*kernal[j];
}
// avg = avg / windowsSize;
angle_list[i] = avg;

}

delete filterd;
}

void PlateSegmentation::templateMatchFinding(const cv::Mat &respones,int windowsWidth,std::pair<float,std::vector<int>> &candidatePts){
int rows = respones.rows;
int cols = respones.cols;



float *data = (float*)respones.data;
float *engNum_prob = data;
float *false_prob = data+cols;
float *ch_prob = data+cols*2;

avgfilter(engNum_prob,cols,5);
avgfilter(false_prob,cols,5);
// avgfilter(ch_prob,cols,5);
std::vector<int> candidate_pts(7);
#ifdef DEBUG
drawHist(engNum_prob,cols,"engNum_prob");
drawHist(false_prob,cols,"false_prob");
drawHist(ch_prob,cols,"ch_prob");
//cv::waitKey(0);
#endif




int cp_list[7];
float loss_selected = -1;

for(int start = 0 ; start < 20 ; start+=2)
for(int width = windowsWidth-5; width < windowsWidth+5 ; width++ ){
for(int interval = windowsWidth/2; interval < windowsWidth; interval++)
{
int cp1_ch = start;
int cp2_p0 = cp1_ch+ width;
int cp3_p1 = cp2_p0+ width + interval;
int cp4_p2 = cp3_p1 + width;
int cp5_p3 = cp4_p2 + width+1;
int cp6_p4 = cp5_p3 + width+2;
int cp7_p5= cp6_p4+ width+2;

int md1 = (cp1_ch+cp2_p0)>>1;
int md2 = (cp2_p0+cp3_p1)>>1;
int md3 = (cp3_p1+cp4_p2)>>1;
int md4 = (cp4_p2+cp5_p3)>>1;
int md5 = (cp5_p3+cp6_p4)>>1;
int md6 = (cp6_p4+cp7_p5)>>1;




if(cp7_p5>=cols)
continue;
float loss = ch_prob[cp1_ch]+
engNum_prob[cp2_p0] +engNum_prob[cp3_p1]+engNum_prob[cp4_p2]+engNum_prob[cp5_p3]+engNum_prob[cp6_p4] +engNum_prob[cp7_p5]
+ (false_prob[md2]+false_prob[md3]+false_prob[md4]+false_prob[md5]+false_prob[md5] + false_prob[md6]);
// float loss = ch_prob[cp1_ch]*3 -(false_prob[cp3_p1]+false_prob[cp4_p2]+false_prob[cp5_p3]+false_prob[cp6_p4]+false_prob[cp7_p5]);




if(loss>loss_selected)
{
loss_selected = loss;
cp_list[0]= cp1_ch;
cp_list[1]= cp2_p0;
cp_list[2]= cp3_p1;
cp_list[3]= cp4_p2;
cp_list[4]= cp5_p3;
cp_list[5]= cp6_p4;
cp_list[6]= cp7_p5;
}
}
}
candidate_pts[0] = cp_list[0];
candidate_pts[1] = cp_list[1];
candidate_pts[2] = cp_list[2];
candidate_pts[3] = cp_list[3];
candidate_pts[4] = cp_list[4];
candidate_pts[5] = cp_list[5];
candidate_pts[6] = cp_list[6];

candidatePts.first = loss_selected;
candidatePts.second = candidate_pts;

};


void PlateSegmentation::segmentPlateBySlidingWindows(cv::Mat &plateImage,int windowsWidth,int stride,cv::Mat &respones){


cv::resize(plateImage,plateImage,cv::Size(136,36));

cv::Mat plateImageGray;
cv::cvtColor(plateImage,plateImageGray,cv::COLOR_BGR2GRAY);

int height = plateImage.rows - 1;
int width = plateImage.cols - 1;

for(int i = 0 ; i < plateImage.cols - windowsWidth +1 ; i +=stride)
{
cv::Rect roi(i,0,windowsWidth,height);
cv::Mat roiImage = plateImageGray(roi);
cv::Mat response = classifyResponse(roiImage);
respones.push_back(response);
}




respones = respones.t();
// std::pair<float,std::vector<int>> images ;
//
//
// std::cout<<images.first<<" ";
// for(int i = 0 ; i < images.second.size() ; i++)
// {
// std::cout<<images.second[i]<<" ";
//// cv::line(plateImageGray,cv::Point(images.second[i],0),cv::Point(images.second[i],36),cv::Scalar(255,255,255),1); //DEBUG
// }

// int w = images.second[5] - images.second[4];

// cv::line(plateImageGray,cv::Point(images.second[5]+w,0),cv::Point(images.second[5]+w,36),cv::Scalar(255,255,255),1); //DEBUG
// cv::line(plateImageGray,cv::Point(images.second[5]+2*w,0),cv::Point(images.second[5]+2*w,36),cv::Scalar(255,255,255),1); //DEBUG


// RefineRegion(plateImageGray,images.second,5);

// std::cout<<w<<std::endl;

// std::cout<<<<std::endl;

// cv::resize(plateImageGray,plateImageGray,cv::Size(600,100));



}

// void filterGaussian(cv::Mat &respones,float sigma){
//
// }


void PlateSegmentation::segmentPlatePipline(PlateInfo &plateInfo,int stride,std::vector<cv::Rect> &Char_rects){
cv::Mat plateImage = plateInfo.getPlateImage(); // get src image .
cv::Mat plateImageGray;
cv::cvtColor(plateImage,plateImageGray,cv::COLOR_BGR2GRAY);
//do binarzation
//
std::pair<float,std::vector<int>> sections ; // segment points variables .

cv::Mat respones; //three response of every sub region from origin image .
segmentPlateBySlidingWindows(plateImage,DEFAULT_WIDTH,1,respones);
templateMatchFinding(respones,DEFAULT_WIDTH/stride,sections);

// std::cout<<sections<<std::endl;

refineRegion(plateImageGray,sections.second,5,Char_rects);
#ifdef DEBUG
for(int i = 0 ; i < sections.second.size() ; i++)
{
std::cout<<sections.second[i]<<" ";
cv::line(plateImageGray,cv::Point(sections.second[i],0),cv::Point(sections.second[i],36),cv::Scalar(255,255,255),1); //DEBUG
}
//cv::imshow("plate",plateImageGray);
//cv::waitKey(0);
#endif
// cv::waitKey(0);

}

void PlateSegmentation::ExtractRegions(PlateInfo &plateInfo,std::vector<cv::Rect> &rects){
cv::Mat plateImage = plateInfo.getPlateImage();
for(int i = 0 ; i < rects.size() ; i++){
cv::Mat charImage;
plateImage(rects[i]).copyTo(charImage);
if(charImage.channels())
cv::cvtColor(charImage,charImage,cv::COLOR_BGR2GRAY);
// cv::imshow("image",charImage);
// cv::waitKey(0);
cv::equalizeHist(charImage,charImage);
//

//


std::pair<CharType,cv::Mat> char_instance;
if(i == 0 ){

char_instance.first = CHINESE;


} else if(i == 1){
char_instance.first = LETTER;
}
else{
char_instance.first = LETTER_NUMS;
}
char_instance.second = charImage;
plateInfo.appendPlateChar(char_instance);

}

}

}//namespace pr

+ 0
- 26
Prj-IOS/src/Recognizer.cpp View File

@@ -1,26 +0,0 @@
//
// Created by 庾金科 on 22/10/2017.
//

#include "../include/Recognizer.h"

namespace pr{
void GeneralRecognizer::SegmentBasedSequenceRecognition(PlateInfo &plateinfo){


for(auto char_instance:plateinfo.plateChars)
{


std::pair<CharType,cv::Mat> res;
cv::Mat code_table= recognizeCharacter(char_instance.second);
res.first = char_instance.first;
code_table.copyTo(res.second);
plateinfo.appendPlateCoding(res);

}



}
}

+ 0
- 79
Prj-IOS/src/util.h View File

@@ -1,79 +0,0 @@
//
// Created by 庾金科 on 04/04/2017.
//

#include <opencv2/opencv.hpp>

namespace util{

template <class T> void swap ( T& a, T& b )
{
T c(a); a=b; b=c;
}

template <class T> T min(T& a,T& b )
{
return a>b?b:a;

}

cv::Mat cropFromImage(const cv::Mat &image,cv::Rect rect){
int w = image.cols-1;
int h = image.rows-1;
rect.x = std::max(rect.x,0);
rect.y = std::max(rect.y,0);
rect.height = std::min(rect.height,h-rect.y);
rect.width = std::min(rect.width,w-rect.x);
cv::Mat temp(rect.size(), image.type());
cv::Mat cropped;
temp = image(rect);
temp.copyTo(cropped);
return cropped;

}

cv::Mat cropBox2dFromImage(const cv::Mat &image,cv::RotatedRect rect)
{
cv::Mat M, rotated, cropped;
float angle = rect.angle;
cv::Size rect_size(rect.size.width,rect.size.height);
if (rect.angle < -45.) {
angle += 90.0;
swap(rect_size.width, rect_size.height);
}
M = cv::getRotationMatrix2D(rect.center, angle, 1.0);
cv::warpAffine(image, rotated, M, image.size(), cv::INTER_CUBIC);
cv::getRectSubPix(rotated, rect_size, rect.center, cropped);
return cropped;
}

cv::Mat calcHist(const cv::Mat &image)
{
cv::Mat hsv;
std::vector<cv::Mat> hsv_planes;
cv::cvtColor(image,hsv,cv::COLOR_BGR2HSV);
cv::split(hsv,hsv_planes);
cv::Mat hist;
int histSize = 256;
float range[] = {0,255};
const float* histRange = {range};

cv::calcHist( &hsv_planes[0], 1, 0, cv::Mat(), hist, 1, &histSize, &histRange,true, true);
return hist;

}
float computeSimilir(const cv::Mat &A,const cv::Mat &B)
{

cv::Mat histA,histB;
histA = calcHist(A);
histB = calcHist(B);
return cv::compareHist(histA,histB,CV_COMP_CORREL);

}





}//namespace util

+ 6
- 2
Prj-Linux/lpr/CMakeLists.txt View File

@@ -3,7 +3,7 @@ project(SwiftPR)


set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11") set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11")
set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}) set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR})
find_package(OpenCV 3.3.0 REQUIRED)
find_package(OPENCV 3.3.0 REQUIRED)
include_directories( ${OpenCV_INCLUDE_DIRS}) include_directories( ${OpenCV_INCLUDE_DIRS})
include_directories(include) include_directories(include)


@@ -19,6 +19,7 @@ set(SRC_RECOGNIZE src/Recognizer.cpp src/CNNRecognizer.cpp)


set(SRC_PIPLINE src/Pipeline.cpp) set(SRC_PIPLINE src/Pipeline.cpp)


set(SRC_SEGMENTATIONFREE src/SegmentationFreeRecognizer.cpp )


#set(SOURCE_FILES main.cpp) #set(SOURCE_FILES main.cpp)
#add_executable(HyperLPR_cpp ${SOURCE_FILES}) #add_executable(HyperLPR_cpp ${SOURCE_FILES})
@@ -46,8 +47,11 @@ target_link_libraries(TEST_SEGMENTATION ${OpenCV_LIBS})
add_executable(TEST_RECOGNIZATION ${SRC_RECOGNIZE} tests/test_recognization.cpp) add_executable(TEST_RECOGNIZATION ${SRC_RECOGNIZE} tests/test_recognization.cpp)
target_link_libraries(TEST_RECOGNIZATION ${OpenCV_LIBS}) target_link_libraries(TEST_RECOGNIZATION ${OpenCV_LIBS})


#TEST_SEGMENTATIONFREE
add_executable(TEST_SEGMENTATIONFREE ${SRC_SEGMENTATIONFREE} tests/test_segmentationFree.cpp)
target_link_libraries(TEST_SEGMENTATIONFREE ${OpenCV_LIBS})


#TEST_PIPELINE #TEST_PIPELINE


add_executable(TRST_PIPLINE ${SRC_DETECTION} ${SRC_FINEMAPPING} ${SRC_FASTDESKEW} ${SRC_SEGMENTATION} ${SRC_RECOGNIZE} ${SRC_PIPLINE} tests/test_pipeline.cpp)
add_executable(TRST_PIPLINE ${SRC_DETECTION} ${SRC_FINEMAPPING} ${SRC_FASTDESKEW} ${SRC_SEGMENTATION} ${SRC_RECOGNIZE} ${SRC_PIPLINE} ${SRC_SEGMENTATIONFREE} tests/test_pipeline.cpp)
target_link_libraries(TRST_PIPLINE ${OpenCV_LIBS}) target_link_libraries(TRST_PIPLINE ${OpenCV_LIBS})

BIN
Prj-Linux/lpr/TRST_PIPLINE View File


+ 17
- 2
Prj-Linux/lpr/include/Pipeline.h View File

@@ -12,25 +12,40 @@
#include "FastDeskew.h" #include "FastDeskew.h"
#include "FineMapping.h" #include "FineMapping.h"
#include "Recognizer.h" #include "Recognizer.h"
#include "SegmentationFreeRecognizer.h"


namespace pr{ namespace pr{

const std::vector<std::string> CH_PLATE_CODE{"京", "沪", "津", "渝", "冀", "晋", "蒙", "辽", "吉", "黑", "苏", "浙", "皖", "闽", "赣", "鲁", "豫", "鄂", "湘", "粤", "桂",
"琼", "川", "贵", "云", "藏", "陕", "甘", "青", "宁", "新", "0", "1", "2", "3", "4", "5", "6", "7", "8", "9", "A",
"B", "C", "D", "E", "F", "G", "H", "J", "K", "L", "M", "N", "P", "Q", "R", "S", "T", "U", "V", "W", "X",
"Y", "Z","港","学","使","警","澳","挂","军","北","南","广","沈","兰","成","济","海","民","航","空"};



const int SEGMENTATION_FREE_METHOD = 0;
const int SEGMENTATION_BASED_METHOD = 1;

class PipelinePR{ class PipelinePR{
public: public:
GeneralRecognizer *generalRecognizer; GeneralRecognizer *generalRecognizer;
PlateDetection *plateDetection; PlateDetection *plateDetection;
PlateSegmentation *plateSegmentation; PlateSegmentation *plateSegmentation;
FineMapping *fineMapping; FineMapping *fineMapping;
SegmentationFreeRecognizer *segmentationFreeRecognizer;

PipelinePR(std::string detector_filename, PipelinePR(std::string detector_filename,
std::string finemapping_prototxt,std::string finemapping_caffemodel, std::string finemapping_prototxt,std::string finemapping_caffemodel,
std::string segmentation_prototxt,std::string segmentation_caffemodel, std::string segmentation_prototxt,std::string segmentation_caffemodel,
std::string charRecognization_proto,std::string charRecognization_caffemodel
std::string charRecognization_proto,std::string charRecognization_caffemodel,
std::string segmentationfree_proto,std::string segmentationfree_caffemodel
); );
~PipelinePR(); ~PipelinePR();






std::vector<std::string> plateRes; std::vector<std::string> plateRes;
std::vector<PlateInfo> RunPiplineAsImage(cv::Mat plateImage);
std::vector<PlateInfo> RunPiplineAsImage(cv::Mat plateImage,int method);








+ 9
- 10
Prj-Linux/lpr/include/PlateInfo.h View File

@@ -10,17 +10,14 @@ namespace pr {
typedef std::vector<cv::Mat> Character; typedef std::vector<cv::Mat> Character;


enum PlateColor { BLUE, YELLOW, WHITE, GREEN, BLACK,UNKNOWN}; enum PlateColor { BLUE, YELLOW, WHITE, GREEN, BLACK,UNKNOWN};
enum CharType {CHINESE,LETTER,LETTER_NUMS};
enum CharType {CHINESE,LETTER,LETTER_NUMS,INVALID};




class PlateInfo { class PlateInfo {
public: public:
std::vector<std::pair<CharType,cv::Mat>> plateChars;
std::vector<std::pair<CharType,cv::Mat>> plateChars;
std::vector<std::pair<CharType,cv::Mat>> plateCoding; std::vector<std::pair<CharType,cv::Mat>> plateCoding;

float confidence = 0; float confidence = 0;


PlateInfo(const cv::Mat &plateData, std::string plateName, cv::Rect plateRect, PlateColor plateType) { PlateInfo(const cv::Mat &plateData, std::string plateName, cv::Rect plateRect, PlateColor plateType) {
licensePlate = plateData; licensePlate = plateData;
name = plateName; name = plateName;
@@ -93,17 +90,21 @@ namespace pr {


} }


if(plate.first == LETTER) {
else if(plate.first == LETTER) {
decode += mappingTable[std::max_element(prob+41,prob+65)- prob]; decode += mappingTable[std::max_element(prob+41,prob+65)- prob];
confidence+=*std::max_element(prob+41,prob+65); confidence+=*std::max_element(prob+41,prob+65);
} }


if(plate.first == LETTER_NUMS) {
else if(plate.first == LETTER_NUMS) {
decode += mappingTable[std::max_element(prob+31,prob+65)- prob]; decode += mappingTable[std::max_element(prob+31,prob+65)- prob];
confidence+=*std::max_element(prob+31,prob+65); confidence+=*std::max_element(prob+31,prob+65);
// std::cout<<*std::max_element(prob+31,prob+65)<<std::endl; // std::cout<<*std::max_element(prob+31,prob+65)<<std::endl;


} }
else if(plate.first == INVALID)
{
decode+='*';
}


} }
name = decode; name = decode;
@@ -113,12 +114,10 @@ namespace pr {
return decode; return decode;
} }




private: private:
cv::Mat licensePlate; cv::Mat licensePlate;
cv::Rect ROI; cv::Rect ROI;
std::string name;
std::string name ;
PlateColor Type; PlateColor Type;
}; };
} }


+ 2
- 0
Prj-Linux/lpr/include/Recognizer.h View File

@@ -13,7 +13,9 @@ namespace pr{
class GeneralRecognizer{ class GeneralRecognizer{
public: public:
virtual label recognizeCharacter(cv::Mat character) = 0; virtual label recognizeCharacter(cv::Mat character) = 0;
// virtual cv::Mat SegmentationFreeForSinglePlate(cv::Mat plate) = 0;
void SegmentBasedSequenceRecognition(PlateInfo &plateinfo); void SegmentBasedSequenceRecognition(PlateInfo &plateinfo);
void SegmentationFreeSequenceRecognition(PlateInfo &plateInfo);


}; };




+ 28
- 0
Prj-Linux/lpr/include/SegmentationFreeRecognizer.h View File

@@ -0,0 +1,28 @@
//
// Created by 庾金科 on 28/11/2017.
//

#ifndef SWIFTPR_SEGMENTATIONFREERECOGNIZER_H
#define SWIFTPR_SEGMENTATIONFREERECOGNIZER_H

#include "Recognizer.h"
namespace pr{


class SegmentationFreeRecognizer{
public:
const int CHAR_INPUT_W = 14;
const int CHAR_INPUT_H = 30;
const int CHAR_LEN = 84;

SegmentationFreeRecognizer(std::string prototxt,std::string caffemodel);
std::pair<std::string,float> SegmentationFreeForSinglePlate(cv::Mat plate,std::vector<std::string> mapping_table);


private:
cv::dnn::Net net;

};

}
#endif //SWIFTPR_SEGMENTATIONFREERECOGNIZER_H

BIN
Prj-Linux/lpr/model/Archive.zip View File


BIN
Prj-Linux/lpr/res/cache/2.png View File

Before After
Width: 40  |  Height: 160  |  Size: 14 kB

BIN
Prj-Linux/lpr/res/cache/chars_segment.jpg View File

Before After
Width: 136  |  Height: 36  |  Size: 3.3 kB

BIN
Prj-Linux/lpr/res/cache/finemapping.jpg View File

Before After
Width: 140  |  Height: 60  |  Size: 5.2 kB

+ 7
- 7
Prj-Linux/lpr/src/FineMapping.cpp View File

@@ -5,8 +5,8 @@
#include "FineMapping.h" #include "FineMapping.h"
namespace pr{ namespace pr{


const int FINEMAPPING_H = 50;
const int FINEMAPPING_W = 120;
const int FINEMAPPING_H = 60 ;
const int FINEMAPPING_W = 140;
const int PADDING_UP_DOWN = 30; const int PADDING_UP_DOWN = 30;
void drawRect(cv::Mat image,cv::Rect rect) void drawRect(cv::Mat image,cv::Rect rect)
{ {
@@ -71,6 +71,8 @@ namespace pr{
cv::Mat proposal; cv::Mat proposal;


cv::resize(InputProposal,PreInputProposal,cv::Size(FINEMAPPING_W,FINEMAPPING_H)); cv::resize(InputProposal,PreInputProposal,cv::Size(FINEMAPPING_W,FINEMAPPING_H));
// cv::imwrite("res/cache/finemapping.jpg",PreInputProposal);

if(InputProposal.channels() == 3) if(InputProposal.channels() == 3)
cv::cvtColor(PreInputProposal,proposal,cv::COLOR_BGR2GRAY); cv::cvtColor(PreInputProposal,proposal,cv::COLOR_BGR2GRAY);
else else
@@ -106,7 +108,6 @@ namespace pr{
if (( lwRatio>0.7&&bdbox.width*bdbox.height>100 && bdboxAera<300) if (( lwRatio>0.7&&bdbox.width*bdbox.height>100 && bdboxAera<300)
|| (lwRatio>3.0 && bdboxAera<100 && bdboxAera>10)) || (lwRatio>3.0 && bdboxAera<100 && bdboxAera>10))
{ {

cv::Point p1(bdbox.x, bdbox.y); cv::Point p1(bdbox.x, bdbox.y);
cv::Point p2(bdbox.x + bdbox.width, bdbox.y + bdbox.height); cv::Point p2(bdbox.x + bdbox.width, bdbox.y + bdbox.height);
line_upper.push_back(p1); line_upper.push_back(p1);
@@ -116,7 +117,6 @@ namespace pr{
} }
} }


std:: cout<<"contours_nums "<<contours_nums<<std::endl;


if(contours_nums<41) if(contours_nums<41)
{ {
@@ -162,7 +162,7 @@ namespace pr{
} }


cv::Mat rgb; cv::Mat rgb;
cv::copyMakeBorder(PreInputProposal, rgb, 30, 30, 0, 0, cv::BORDER_REPLICATE);
cv::copyMakeBorder(PreInputProposal, rgb, PADDING_UP_DOWN, PADDING_UP_DOWN, 0, 0, cv::BORDER_REPLICATE);
// cv::imshow("rgb",rgb); // cv::imshow("rgb",rgb);
// cv::waitKey(0); // cv::waitKey(0);
// //
@@ -170,8 +170,8 @@ namespace pr{


std::pair<int, int> A; std::pair<int, int> A;
std::pair<int, int> B; std::pair<int, int> B;
A = FitLineRansac(line_upper, -2);
B = FitLineRansac(line_lower, 2);
A = FitLineRansac(line_upper, -1);
B = FitLineRansac(line_lower, 1);
int leftyB = A.first; int leftyB = A.first;
int rightyB = A.second; int rightyB = A.second;
int leftyA = B.first; int leftyA = B.first;


+ 50
- 18
Prj-Linux/lpr/src/Pipeline.cpp View File

@@ -7,18 +7,20 @@


namespace pr { namespace pr {


std::vector<std::string> chars_code{"京","沪","津","渝","冀","晋","蒙","辽","吉","黑","苏","浙","皖","闽","赣","鲁","豫","鄂","湘","粤","桂","琼","川","贵","云","藏","陕","甘","青","宁","新","0","1","2","3","4","5","6","7","8","9","A","B","C","D","E","F","G","H","J","K","L","M","N","P","Q","R","S","T","U","V","W","X","Y","Z"};





const int HorizontalPadding = 4;
PipelinePR::PipelinePR(std::string detector_filename, PipelinePR::PipelinePR(std::string detector_filename,
std::string finemapping_prototxt, std::string finemapping_caffemodel, std::string finemapping_prototxt, std::string finemapping_caffemodel,
std::string segmentation_prototxt, std::string segmentation_caffemodel, std::string segmentation_prototxt, std::string segmentation_caffemodel,
std::string charRecognization_proto, std::string charRecognization_caffemodel) {
std::string charRecognization_proto, std::string charRecognization_caffemodel,
std::string segmentationfree_proto,std::string segmentationfree_caffemodel) {
plateDetection = new PlateDetection(detector_filename); plateDetection = new PlateDetection(detector_filename);
fineMapping = new FineMapping(finemapping_prototxt, finemapping_caffemodel); fineMapping = new FineMapping(finemapping_prototxt, finemapping_caffemodel);
plateSegmentation = new PlateSegmentation(segmentation_prototxt, segmentation_caffemodel); plateSegmentation = new PlateSegmentation(segmentation_prototxt, segmentation_caffemodel);
generalRecognizer = new CNNRecognizer(charRecognization_proto, charRecognization_caffemodel); generalRecognizer = new CNNRecognizer(charRecognization_proto, charRecognization_caffemodel);
segmentationFreeRecognizer = new SegmentationFreeRecognizer(segmentationfree_proto,segmentationfree_caffemodel);

} }


PipelinePR::~PipelinePR() { PipelinePR::~PipelinePR() {
@@ -27,34 +29,64 @@ namespace pr {
delete fineMapping; delete fineMapping;
delete plateSegmentation; delete plateSegmentation;
delete generalRecognizer; delete generalRecognizer;
delete segmentationFreeRecognizer;



} }


std::vector<PlateInfo> PipelinePR:: RunPiplineAsImage(cv::Mat plateImage) {
std::vector<PlateInfo> PipelinePR:: RunPiplineAsImage(cv::Mat plateImage,int method) {
std::vector<PlateInfo> results; std::vector<PlateInfo> results;
std::vector<pr::PlateInfo> plates; std::vector<pr::PlateInfo> plates;
plateDetection->plateDetectionRough(plateImage,plates);
plateDetection->plateDetectionRough(plateImage,plates,36,700);


for (pr::PlateInfo plateinfo:plates) { for (pr::PlateInfo plateinfo:plates) {


cv::Mat image_finemapping = plateinfo.getPlateImage(); cv::Mat image_finemapping = plateinfo.getPlateImage();
image_finemapping = fineMapping->FineMappingVertical(image_finemapping); image_finemapping = fineMapping->FineMappingVertical(image_finemapping);
image_finemapping = pr::fastdeskew(image_finemapping, 5); image_finemapping = pr::fastdeskew(image_finemapping, 5);
image_finemapping = fineMapping->FineMappingHorizon(image_finemapping, 2, 5);
cv::resize(image_finemapping, image_finemapping, cv::Size(136, 36));
plateinfo.setPlateImage(image_finemapping);
std::vector<cv::Rect> rects;
plateSegmentation->segmentPlatePipline(plateinfo, 1, rects);
plateSegmentation->ExtractRegions(plateinfo, rects);
cv::copyMakeBorder(image_finemapping, image_finemapping, 0, 0, 0, 20, cv::BORDER_REPLICATE);

plateinfo.setPlateImage(image_finemapping);
generalRecognizer->SegmentBasedSequenceRecognition(plateinfo);
plateinfo.decodePlateNormal(chars_code);
results.push_back(plateinfo);
std::cout << plateinfo.getPlateName() << std::endl;





//Segmentation-based

if(method==SEGMENTATION_BASED_METHOD)
{
image_finemapping = fineMapping->FineMappingHorizon(image_finemapping, 2, HorizontalPadding);
cv::resize(image_finemapping, image_finemapping, cv::Size(136+HorizontalPadding, 36));
// cv::imshow("image_finemapping",image_finemapping);
// cv::waitKey(0);
plateinfo.setPlateImage(image_finemapping);
std::vector<cv::Rect> rects;

plateSegmentation->segmentPlatePipline(plateinfo, 1, rects);
plateSegmentation->ExtractRegions(plateinfo, rects);
cv::copyMakeBorder(image_finemapping, image_finemapping, 0, 0, 0, 20, cv::BORDER_REPLICATE);
plateinfo.setPlateImage(image_finemapping);
generalRecognizer->SegmentBasedSequenceRecognition(plateinfo);
plateinfo.decodePlateNormal(pr::CH_PLATE_CODE);

}
//Segmentation-free
else if(method==SEGMENTATION_FREE_METHOD)
{

image_finemapping = fineMapping->FineMappingHorizon(image_finemapping, 4, HorizontalPadding+3);

cv::resize(image_finemapping, image_finemapping, cv::Size(136+HorizontalPadding, 36));
// cv::imwrite("./test.png",image_finemapping);
// cv::imshow("image_finemapping",image_finemapping);
// cv::waitKey(0);
plateinfo.setPlateImage(image_finemapping);
// std::vector<cv::Rect> rects;

std::pair<std::string,float> res = segmentationFreeRecognizer->SegmentationFreeForSinglePlate(plateinfo.getPlateImage(),pr::CH_PLATE_CODE);
plateinfo.confidence = res.second;
plateinfo.setPlateName(res.first);
}



results.push_back(plateinfo);
} }


// for (auto str:results) { // for (auto str:results) {


+ 4
- 4
Prj-Linux/lpr/src/PlateDetection.cpp View File

@@ -36,10 +36,10 @@ namespace pr{
// w += w * 0.28 // w += w * 0.28
// y -= h * 0.6 // y -= h * 0.6
// h += h * 1.1; // h += h * 1.1;
int zeroadd_w = static_cast<int>(plate.width*0.28);
int zeroadd_h = static_cast<int>(plate.height*1.2);
int zeroadd_x = static_cast<int>(plate.width*0.14);
int zeroadd_y = static_cast<int>(plate.height*0.6);
int zeroadd_w = static_cast<int>(plate.width*0.30);
int zeroadd_h = static_cast<int>(plate.height*2);
int zeroadd_x = static_cast<int>(plate.width*0.15);
int zeroadd_y = static_cast<int>(plate.height*1);
plate.x-=zeroadd_x; plate.x-=zeroadd_x;
plate.y-=zeroadd_y; plate.y-=zeroadd_y;
plate.height += zeroadd_h; plate.height += zeroadd_h;


+ 16
- 14
Prj-Linux/lpr/src/PlateSegmentation.cpp View File

@@ -94,7 +94,7 @@ namespace pr{
cv::Mat roi_thres; cv::Mat roi_thres;
// cv::threshold(roiImage,roi_thres,0,255,cv::THRESH_OTSU|cv::THRESH_BINARY); // cv::threshold(roiImage,roi_thres,0,255,cv::THRESH_OTSU|cv::THRESH_BINARY);


niBlackThreshold(roiImage,roi_thres,255,cv::THRESH_BINARY,15,0.3,BINARIZATION_NIBLACK);
niBlackThreshold(roiImage,roi_thres,255,cv::THRESH_BINARY,15,0.27,BINARIZATION_NIBLACK);


std::vector<std::vector<cv::Point>> contours; std::vector<std::vector<cv::Point>> contours;
cv::findContours(roi_thres,contours,cv::RETR_LIST,cv::CHAIN_APPROX_SIMPLE); cv::findContours(roi_thres,contours,cv::RETR_LIST,cv::CHAIN_APPROX_SIMPLE);
@@ -220,7 +220,7 @@ namespace pr{




int cp_list[7]; int cp_list[7];
float loss_selected = -1;
float loss_selected = -10;


for(int start = 0 ; start < 20 ; start+=2) for(int start = 0 ; start < 20 ; start+=2)
for(int width = windowsWidth-5; width < windowsWidth+5 ; width++ ){ for(int width = windowsWidth-5; width < windowsWidth+5 ; width++ ){
@@ -246,13 +246,10 @@ namespace pr{


if(cp7_p5>=cols) if(cp7_p5>=cols)
continue; continue;
float loss = ch_prob[cp1_ch]+
engNum_prob[cp2_p0] +engNum_prob[cp3_p1]+engNum_prob[cp4_p2]+engNum_prob[cp5_p3]+engNum_prob[cp6_p4] +engNum_prob[cp7_p5]
+ (false_prob[md2]+false_prob[md3]+false_prob[md4]+false_prob[md5]+false_prob[md5] + false_prob[md6]);
// float loss = ch_prob[cp1_ch]*3 -(false_prob[cp3_p1]+false_prob[cp4_p2]+false_prob[cp5_p3]+false_prob[cp6_p4]+false_prob[cp7_p5]);



// float loss = ch_prob[cp1_ch]+
// engNum_prob[cp2_p0] +engNum_prob[cp3_p1]+engNum_prob[cp4_p2]+engNum_prob[cp5_p3]+engNum_prob[cp6_p4] +engNum_prob[cp7_p5]
// + (false_prob[md2]+false_prob[md3]+false_prob[md4]+false_prob[md5]+false_prob[md5] + false_prob[md6]);
float loss = ch_prob[cp1_ch]*3 -(false_prob[cp3_p1]+false_prob[cp4_p2]+false_prob[cp5_p3]+false_prob[cp6_p4]+false_prob[cp7_p5]);


if(loss>loss_selected) if(loss>loss_selected)
{ {
@@ -284,15 +281,15 @@ namespace pr{
void PlateSegmentation::segmentPlateBySlidingWindows(cv::Mat &plateImage,int windowsWidth,int stride,cv::Mat &respones){ void PlateSegmentation::segmentPlateBySlidingWindows(cv::Mat &plateImage,int windowsWidth,int stride,cv::Mat &respones){




cv::resize(plateImage,plateImage,cv::Size(136,36));
// cv::resize(plateImage,plateImage,cv::Size(136,36));


cv::Mat plateImageGray; cv::Mat plateImageGray;
cv::cvtColor(plateImage,plateImageGray,cv::COLOR_BGR2GRAY); cv::cvtColor(plateImage,plateImageGray,cv::COLOR_BGR2GRAY);

int padding = plateImage.cols-136 ;
// int padding = 0 ;
int height = plateImage.rows - 1; int height = plateImage.rows - 1;
int width = plateImage.cols - 1;

for(int i = 0 ; i < plateImage.cols - windowsWidth +1 ; i +=stride)
int width = plateImage.cols - 1 - padding;
for(int i = 0 ; i < width - windowsWidth +1 ; i +=stride)
{ {
cv::Rect roi(i,0,windowsWidth,height); cv::Rect roi(i,0,windowsWidth,height);
cv::Mat roiImage = plateImageGray(roi); cv::Mat roiImage = plateImageGray(roi);
@@ -348,6 +345,11 @@ namespace pr{
cv::Mat respones; //three response of every sub region from origin image . cv::Mat respones; //three response of every sub region from origin image .
segmentPlateBySlidingWindows(plateImage,DEFAULT_WIDTH,1,respones); segmentPlateBySlidingWindows(plateImage,DEFAULT_WIDTH,1,respones);
templateMatchFinding(respones,DEFAULT_WIDTH/stride,sections); templateMatchFinding(respones,DEFAULT_WIDTH/stride,sections);
for(int i = 0; i < sections.second.size() ; i++)
{
sections.second[i]*=stride;

}


// std::cout<<sections<<std::endl; // std::cout<<sections<<std::endl;




+ 10
- 7
Prj-Linux/lpr/src/Recognizer.cpp View File

@@ -6,17 +6,20 @@


namespace pr{ namespace pr{
void GeneralRecognizer::SegmentBasedSequenceRecognition(PlateInfo &plateinfo){ void GeneralRecognizer::SegmentBasedSequenceRecognition(PlateInfo &plateinfo){


for(auto char_instance:plateinfo.plateChars) for(auto char_instance:plateinfo.plateChars)
{ {
std::pair<CharType,cv::Mat> res;
if(char_instance.second.rows*char_instance.second.cols>40) {
label code_table = recognizeCharacter(char_instance.second);
res.first = char_instance.first;
code_table.copyTo(res.second);
plateinfo.appendPlateCoding(res);
} else{
res.first = INVALID;
plateinfo.appendPlateCoding(res);


}


std::pair<CharType,cv::Mat> res;
cv::Mat code_table= recognizeCharacter(char_instance.second);
res.first = char_instance.first;
code_table.copyTo(res.second);
plateinfo.appendPlateCoding(res);


} }




+ 118
- 0
Prj-Linux/lpr/src/SegmentationFreeRecognizer.cpp View File

@@ -0,0 +1,118 @@
//
// Created by 庾金科 on 28/11/2017.
//
#include "../include/SegmentationFreeRecognizer.h"

namespace pr {
SegmentationFreeRecognizer::SegmentationFreeRecognizer(std::string prototxt, std::string caffemodel) {
net = cv::dnn::readNetFromCaffe(prototxt, caffemodel);
}


inline int judgeCharRange(int id)
{return id<31 || id>63;
}


std::pair<std::string,float> decodeResults(cv::Mat code_table,std::vector<std::string> mapping_table,float thres)
{
// cv::imshow("imagea",code_table);
// cv::waitKey(0);

cv::MatSize mtsize = code_table.size;
int sequencelength = mtsize[2];
int labellength = mtsize[1];
cv::transpose(code_table.reshape(1,1).reshape(1,labellength),code_table);
std::string name = "";


std::vector<int> seq(sequencelength);
std::vector<std::pair<int,float>> seq_decode_res;

for(int i = 0 ; i < sequencelength; i++) {
float *fstart = ((float *) (code_table.data) + i * labellength );
int id = std::max_element(fstart,fstart+labellength) - fstart;
seq[i] =id;
}

float sum_confidence = 0;

int plate_lenghth = 0 ;


for(int i = 0 ; i< sequencelength ; i++)
{
if(seq[i]!=labellength-1 && (i==0 || seq[i]!=seq[i-1]))
{
float *fstart = ((float *) (code_table.data) + i * labellength );
float confidence = *(fstart+seq[i]);
std::pair<int,float> pair_(seq[i],confidence);
seq_decode_res.push_back(pair_);
//

}
}

int i = 0;

if(judgeCharRange(seq_decode_res[0].first) && judgeCharRange(seq_decode_res[1].first))
{
i=2;
int c = seq_decode_res[0].second<seq_decode_res[1].second;
name+=mapping_table[seq_decode_res[c].first];
sum_confidence+=seq_decode_res[c].second;
plate_lenghth++;
}

for(; i < seq_decode_res.size();i++)
{
name+=mapping_table[seq_decode_res[i].first];
sum_confidence +=seq_decode_res[i].second;
plate_lenghth++;
}


std::pair<std::string,float> res;

res.second = sum_confidence/plate_lenghth;
res.first = name;
return res;

}

std::string decodeResults(cv::Mat code_table,std::vector<std::string> mapping_table)
{
cv::MatSize mtsize = code_table.size;
int sequencelength = mtsize[2];
int labellength = mtsize[1];
cv::transpose(code_table.reshape(1,1).reshape(1,labellength),code_table);
std::string name = "";
std::vector<int> seq(sequencelength);
for(int i = 0 ; i < sequencelength; i++) {
float *fstart = ((float *) (code_table.data) + i * labellength );
int id = std::max_element(fstart,fstart+labellength) - fstart;
seq[i] =id;
}
for(int i = 0 ; i< sequencelength ; i++)
{
if(seq[i]!=labellength-1 && (i==0 || seq[i]!=seq[i-1]))
name+=mapping_table[seq[i]];
}

std::cout<<name;
return name;
}



std::pair<std::string,float> SegmentationFreeRecognizer::SegmentationFreeForSinglePlate(cv::Mat Image,std::vector<std::string> mapping_table) {
cv::transpose(Image,Image);
cv::Mat inputBlob = cv::dnn::blobFromImage(Image, 1 / 255.0, cv::Size(40,160));
net.setInput(inputBlob, "data");
cv::Mat char_prob_mat = net.forward();
return decodeResults(char_prob_mat,mapping_table,0.00);

}


}

BIN
Prj-Linux/lpr/test.png View File

Before After
Width: 140  |  Height: 36  |  Size: 12 kB

+ 169
- 11
Prj-Linux/lpr/tests/test_pipeline.cpp View File

@@ -6,34 +6,192 @@







using namespace std;

template<class T>
static unsigned int levenshtein_distance(const T &s1, const T &s2) {
const size_t len1 = s1.size(), len2 = s2.size();
std::vector<unsigned int> col(len2 + 1), prevCol(len2 + 1);

for (unsigned int i = 0; i < prevCol.size(); i++) prevCol[i] = i;
for (unsigned int i = 0; i < len1; i++) {
col[0] = i + 1;
for (unsigned int j = 0; j < len2; j++)
col[j + 1] = min(
min(prevCol[1 + j] + 1, col[j] + 1),
prevCol[j] + (s1[i] == s2[j] ? 0 : 1));
col.swap(prevCol);
}
return prevCol[len2];
}




void TEST_ACC(){

pr::PipelinePR prc("model/cascade.xml",
"model/HorizonalFinemapping.prototxt","model/HorizonalFinemapping.caffemodel",
"model/Segmentation.prototxt","model/Segmentation.caffemodel",
"model/CharacterRecognization.prototxt","model/CharacterRecognization.caffemodel",
"model/SegmenationFree-Inception.prototxt","model/SegmenationFree-Inception.caffemodel"
);

ifstream file;
string imagename;
int n = 0,correct = 0,j = 0,sum = 0;
char filename[] = "/Users/yujinke/Downloads/general_test/1.txt";
string pathh = "/Users/yujinke/Downloads/general_test/";
file.open(filename, ios::in);
while (!file.eof())
{
file >> imagename;
string imgpath = pathh + imagename;
std::cout << "------------------------------------------------" << endl;
cout << "图片名:" << imagename << endl;
cv::Mat image = cv::imread(imgpath);
// cv::imshow("image", image);
// cv::waitKey(0);

std::vector<pr::PlateInfo> res = prc.RunPiplineAsImage(image,pr::SEGMENTATION_FREE_METHOD);

float conf = 0;
vector<float> con ;
vector<string> name;
for (auto st : res) {
if (st.confidence > 0.1) {
//std::cout << st.getPlateName() << " " << st.confidence << std::endl;
con.push_back(st.confidence);
name.push_back(st.getPlateName());
//conf += st.confidence;
}
else
cout << "no string" << endl;
}
// std::cout << conf << std::endl;
int num = con.size();
float max = 0;
string platestr, chpr, ch;
int diff = 0,dif = 0;
for (int i = 0; i < num; i++) {

if (con.at(i) > max)
{
max = con.at(i);
platestr = name.at(i);
}

}
// cout << "max:"<<max << endl;
cout << "string:" << platestr << endl;
chpr = platestr.substr(0, 2);
ch = imagename.substr(0, 2);
diff = levenshtein_distance(imagename, platestr);
dif = diff - 4;
cout << "差距:" <<dif << endl;
sum += dif;
if (ch != chpr) n++;
if (diff == 0) correct++;
j++;
}
float cha = 1 - float(n) / float(j);
std::cout << "------------------------------------------------" << endl;
cout << "车牌总数:" << j << endl;
cout << "汉字识别准确率:"<<cha << endl;
float chaccuracy = 1 - float(sum - n * 2) /float(j * 8);
cout << "字符识别准确率:" << chaccuracy << endl;

}


void TEST_PIPELINE(){ void TEST_PIPELINE(){


pr::PipelinePR prc("model/cascade.xml", pr::PipelinePR prc("model/cascade.xml",
"model/HorizonalFinemapping.prototxt","model/HorizonalFinemapping.caffemodel", "model/HorizonalFinemapping.prototxt","model/HorizonalFinemapping.caffemodel",
"model/Segmentation.prototxt","model/Segmentation.caffemodel", "model/Segmentation.prototxt","model/Segmentation.caffemodel",
"model/CharacterRecognization.prototxt","model/CharacterRecognization.caffemodel"
"model/CharacterRecognization.prototxt","model/CharacterRecognization.caffemodel",
"model/SegmentationFree.prototxt","model/SegmentationFree.caffemodel"
); );


cv::Mat image = cv::imread("/Users/yujinke/车牌图片/云南车牌/云A1DZ32.jpg");
cv::imshow("image",image);
cv::waitKey(0);
cv::Mat image = cv::imread("/Users/yujinke/ClionProjects/cpp_ocr_demo/test.png");


std::vector<pr::PlateInfo> res = prc.RunPiplineAsImage(image,pr::SEGMENTATION_FREE_METHOD);


std::vector<pr::PlateInfo> res = prc.RunPiplineAsImage(image);
float conf = 0 ;
for(auto st:res) { for(auto st:res) {
if(st.confidence>0.1) {
if(st.confidence>0.75) {
std::cout << st.getPlateName() << " " << st.confidence << std::endl; std::cout << st.getPlateName() << " " << st.confidence << std::endl;
conf += st.confidence;
cv::Rect region = st.getPlateRect();

cv::rectangle(image,cv::Point(region.x,region.y),cv::Point(region.x+region.width,region.y+region.height),cv::Scalar(255,255,0),2);
} }
} }
std::cout<<conf<<std::endl;

cv::imshow("image",image);
cv::waitKey(0);


} }
int main()


void TEST_CAM()
{ {


TEST_PIPELINE();
cv::VideoCapture capture("test1.mp4");
cv::Mat frame;

pr::PipelinePR prc("model/cascade.xml",
"model/HorizonalFinemapping.prototxt","model/HorizonalFinemapping.caffemodel",
"model/Segmentation.prototxt","model/Segmentation.caffemodel",
"model/CharacterRecognization.prototxt","model/CharacterRecognization.caffemodel",
"model/SegmentationFree.prototxt","model/SegmentationFree.caffemodel"
);





while(1) {
//读取下一帧
if (!capture.read(frame)) {
std::cout << "读取视频失败" << std::endl;
exit(1);
}
//
// cv::transpose(frame,frame);
// cv::flip(frame,frame,2);

// cv::resize(frame,frame,cv::Size(frame.cols/2,frame.rows/2));



std::vector<pr::PlateInfo> res = prc.RunPiplineAsImage(frame,pr::SEGMENTATION_FREE_METHOD);

for(auto st:res) {
if(st.confidence>0.75) {
std::cout << st.getPlateName() << " " << st.confidence << std::endl;
cv::Rect region = st.getPlateRect();

cv::rectangle(frame,cv::Point(region.x,region.y),cv::Point(region.x+region.width,region.y+region.height),cv::Scalar(255,255,0),2);
}
}

cv::imshow("image",frame);
cv::waitKey(1);



}

}


int main()
{
TEST_ACC();


// TEST_CAM();
// TEST_PIPELINE();


return 0 ; return 0 ;




+ 1
- 0
Prj-Linux/lpr/tests/test_recognization.cpp View File

@@ -16,6 +16,7 @@ void getMaxClass(cv::Mat &probBlob, int *classId, double *classProb)
cv::Point classNumber; cv::Point classNumber;


cv::minMaxLoc(probBlob, NULL, classProb, NULL, &classNumber); cv::minMaxLoc(probBlob, NULL, classProb, NULL, &classNumber);

*classId = classNumber.x; *classId = classNumber.x;
} }




+ 54
- 0
Prj-Linux/lpr/tests/test_segmentationFree.cpp View File

@@ -0,0 +1,54 @@
//
// Created by 庾金科 on 29/11/2017.
//
#include "../include/SegmentationFreeRecognizer.h"
#include "../include/Pipeline.h"

#include "../include/PlateInfo.h"



std::string decodeResults(cv::Mat code_table,std::vector<std::string> mapping_table)
{
cv::MatSize mtsize = code_table.size;
int sequencelength = mtsize[2];
int labellength = mtsize[1];
cv::transpose(code_table.reshape(1,1).reshape(1,labellength),code_table);
std::string name = "";
std::vector<int> seq(sequencelength);
for(int i = 0 ; i < sequencelength; i++) {
float *fstart = ((float *) (code_table.data) + i * labellength );
int id = std::max_element(fstart,fstart+labellength) - fstart;
seq[i] =id;
}
for(int i = 0 ; i< sequencelength ; i++)
{
if(seq[i]!=labellength-1 && (i==0 || seq[i]!=seq[i-1]))
name+=mapping_table[seq[i]];
}
std::cout<<name;
return name;
}


int main()
{
cv::Mat image = cv::imread("res/cache/chars_segment.jpg");
// cv::transpose(image,image);

// cv::resize(image,image,cv::Size(160,40));
cv::imshow("xxx",image);
cv::waitKey(0);
pr::SegmentationFreeRecognizer recognizr("model/SegmentFreeModel.prototxt","model/InceptionV3.caffemodel");
std::pair<std::string,float> res = recognizr.SegmentationFreeForSinglePlate(image,pr::CH_PLATE_CODE);
std::cout<<res.first<<" "
<<res.second<<std::endl;


// decodeResults(plate,pr::CH_PLATE_CODE);
cv::imshow("image",image);
cv::waitKey(0);

return 0;

}

+ 6
- 0
Prj-Linux/main.cpp View File

@@ -0,0 +1,6 @@
#include <iostream>

int main() {
std::cout << "Hello, World!" << std::endl;
return 0;
}

+ 0
- 12117
Prj-Linux/model/cascade.xml
File diff suppressed because it is too large
View File


+ 4
- 1
README.md View File

@@ -16,7 +16,10 @@ HyperLPR是一个使用深度学习针对对中文车牌识别的实现,与较


### 更新 ### 更新


+ 增加字符分割[训练代码和字符分割介绍](https://github.com/zeusees/HyperLPR-Training)
+ 增加了端到端模型的cpp实现(Linux)(2018.1.31)


+ 增加字符分割[训练代码和字符分割介绍](https://github.com/zeusees/HyperLPR-Training)(2018.1.)
+ 更新了Android实现,大幅提高准确率和速度 (骁龙835 (*720*x*1280*) ~50ms )(2017.12.27) + 更新了Android实现,大幅提高准确率和速度 (骁龙835 (*720*x*1280*) ~50ms )(2017.12.27)
+ 添加了IOS版本的实现(感谢[xiaojun123456](https://github.com/xiaojun123456)的工作) + 添加了IOS版本的实现(感谢[xiaojun123456](https://github.com/xiaojun123456)的工作)
+ 添加端到端的序列识别模型识别率大幅度提升,使得无需分割字符即可识别,识别速度提高20% (2017.11.17) + 添加端到端的序列识别模型识别率大幅度提升,使得无需分割字符即可识别,识别速度提高20% (2017.11.17)


Loading…
Cancel
Save