Browse Source

add ios implementation and change readme

pull/1/MERGE
jackyu 6 years ago
parent
commit
90d42be1c9
35 changed files with 14923 additions and 18 deletions
  1. +24
    -0
      Prj-IOS/include/CNNRecognizer.h
  2. +18
    -0
      Prj-IOS/include/FastDeskew.h
  3. +32
    -0
      Prj-IOS/include/FineMapping.h
  4. +48
    -0
      Prj-IOS/include/Pipeline.h
  5. +33
    -0
      Prj-IOS/include/PlateDetection.h
  6. +127
    -0
      Prj-IOS/include/PlateInfo.h
  7. +39
    -0
      Prj-IOS/include/PlateSegmentation.h
  8. +21
    -0
      Prj-IOS/include/Recognizer.h
  9. +107
    -0
      Prj-IOS/include/niBlackThreshold.h
  10. +435
    -0
      Prj-IOS/lp.xcodeproj/project.pbxproj
  11. +7
    -0
      Prj-IOS/lp.xcodeproj/project.xcworkspace/contents.xcworkspacedata
  12. BIN
      Prj-IOS/lp.xcodeproj/project.xcworkspace/xcuserdata/apple.xcuserdatad/UserInterfaceState.xcuserstate
  13. +5
    -0
      Prj-IOS/lp.xcodeproj/xcuserdata/apple.xcuserdatad/xcdebugger/Breakpoints_v2.xcbkptlist
  14. +14
    -0
      Prj-IOS/lp.xcodeproj/xcuserdata/apple.xcuserdatad/xcschemes/xcschememanagement.plist
  15. +17
    -0
      Prj-IOS/lp/AppDelegate.h
  16. +51
    -0
      Prj-IOS/lp/AppDelegate.m
  17. +98
    -0
      Prj-IOS/lp/Assets.xcassets/AppIcon.appiconset/Contents.json
  18. +25
    -0
      Prj-IOS/lp/Base.lproj/LaunchScreen.storyboard
  19. +28
    -0
      Prj-IOS/lp/Base.lproj/Main.storyboard
  20. +45
    -0
      Prj-IOS/lp/Info.plist
  21. +32
    -0
      Prj-IOS/lp/UIImageCVMatConverter.h
  22. +317
    -0
      Prj-IOS/lp/UIImageCVMatConverter.mm
  23. +40
    -0
      Prj-IOS/lp/ViewController.h
  24. +216
    -0
      Prj-IOS/lp/ViewController.mm
  25. +16
    -0
      Prj-IOS/lp/main.m
  26. +12117
    -0
      Prj-IOS/model.bundle/cascade.xml
  27. +19
    -0
      Prj-IOS/src/CNNRecognizer.cpp
  28. +133
    -0
      Prj-IOS/src/FastDeskew.cpp
  29. +205
    -0
      Prj-IOS/src/FineMapping.cpp
  30. +69
    -0
      Prj-IOS/src/Pipeline.cpp
  31. +61
    -0
      Prj-IOS/src/PlateDetection.cpp
  32. +402
    -0
      Prj-IOS/src/PlateSegmentation.cpp
  33. +26
    -0
      Prj-IOS/src/Recognizer.cpp
  34. +79
    -0
      Prj-IOS/src/util.h
  35. +17
    -18
      README.md

+ 24
- 0
Prj-IOS/include/CNNRecognizer.h View File

@@ -0,0 +1,24 @@
//
// Created by 庾金科 on 21/10/2017.
//

#ifndef SWIFTPR_CNNRECOGNIZER_H
#define SWIFTPR_CNNRECOGNIZER_H

#include "Recognizer.h"
namespace pr{
class CNNRecognizer: public GeneralRecognizer{
public:
const int CHAR_INPUT_W = 14;
const int CHAR_INPUT_H = 30;

CNNRecognizer(std::string prototxt,std::string caffemodel);
label recognizeCharacter(cv::Mat character);
private:
cv::dnn::Net net;

};

}

#endif //SWIFTPR_CNNRECOGNIZER_H

+ 18
- 0
Prj-IOS/include/FastDeskew.h View File

@@ -0,0 +1,18 @@
//
// Created by 庾金科 on 22/09/2017.
//

#ifndef SWIFTPR_FASTDESKEW_H
#define SWIFTPR_FASTDESKEW_H

#include <math.h>
#include <opencv2/opencv.hpp>
namespace pr{

cv::Mat fastdeskew(cv::Mat skewImage,int blockSize);
// cv::Mat spatialTransformer(cv::Mat skewImage);

}//namepace pr


#endif //SWIFTPR_FASTDESKEW_H

+ 32
- 0
Prj-IOS/include/FineMapping.h View File

@@ -0,0 +1,32 @@
//
// Created by 庾金科 on 22/09/2017.
//

#ifndef SWIFTPR_FINEMAPPING_H
#define SWIFTPR_FINEMAPPING_H

#include <opencv2/opencv.hpp>
#include <opencv2/dnn.hpp>

#include <string>
namespace pr{
class FineMapping{
public:
FineMapping();


FineMapping(std::string prototxt,std::string caffemodel);
static cv::Mat FineMappingVertical(cv::Mat InputProposal,int sliceNum=15,int upper=0,int lower=-50,int windows_size=17);
cv::Mat FineMappingHorizon(cv::Mat FinedVertical,int leftPadding,int rightPadding);


private:
cv::dnn::Net net;

};




}
#endif //SWIFTPR_FINEMAPPING_H

+ 48
- 0
Prj-IOS/include/Pipeline.h View File

@@ -0,0 +1,48 @@
//
// Created by 庾金科 on 22/10/2017.
//

#ifndef SWIFTPR_PIPLINE_H
#define SWIFTPR_PIPLINE_H

#include "PlateDetection.h"
#include "PlateSegmentation.h"
#include "CNNRecognizer.h"
#include "PlateInfo.h"
#include "FastDeskew.h"
#include "FineMapping.h"
#include "Recognizer.h"

namespace pr{
class PipelinePR{
public:
GeneralRecognizer *generalRecognizer;
PlateDetection *plateDetection;
PlateSegmentation *plateSegmentation;
FineMapping *fineMapping;
PipelinePR(std::string detector_filename,
std::string finemapping_prototxt,std::string finemapping_caffemodel,
std::string segmentation_prototxt,std::string segmentation_caffemodel,
std::string charRecognization_proto,std::string charRecognization_caffemodel
);
~PipelinePR();



std::vector<std::string> plateRes;
std::vector<PlateInfo> RunPiplineAsImage(cv::Mat plateImage);







};


}
#endif //SWIFTPR_PIPLINE_H

+ 33
- 0
Prj-IOS/include/PlateDetection.h View File

@@ -0,0 +1,33 @@
//
// Created by 庾金科 on 20/09/2017.
//

#ifndef SWIFTPR_PLATEDETECTION_H
#define SWIFTPR_PLATEDETECTION_H

#include <opencv2/opencv.hpp>
#include <PlateInfo.h>
#include <vector>
namespace pr{
class PlateDetection{
public:
PlateDetection(std::string filename_cascade);
PlateDetection();
void LoadModel(std::string filename_cascade);
void plateDetectionRough(cv::Mat InputImage,std::vector<pr::PlateInfo> &plateInfos,int min_w=36,int max_w=800);
// std::vector<pr::PlateInfo> plateDetectionRough(cv::Mat InputImage,int min_w= 60,int max_h = 400);


// std::vector<pr::PlateInfo> plateDetectionRoughByMultiScaleEdge(cv::Mat InputImage);



private:
cv::CascadeClassifier cascade;


};

}// namespace pr

#endif //SWIFTPR_PLATEDETECTION_H

+ 127
- 0
Prj-IOS/include/PlateInfo.h View File

@@ -0,0 +1,127 @@
//
// Created by 庾金科 on 20/09/2017.
//

#ifndef SWIFTPR_PLATEINFO_H
#define SWIFTPR_PLATEINFO_H
#include <opencv2/opencv.hpp>
namespace pr {

typedef std::vector<cv::Mat> Character;

enum PlateColor { BLUE, YELLOW, WHITE, GREEN, BLACK,UNKNOWN};
enum CharType {CHINESE,LETTER,LETTER_NUMS};


class PlateInfo {
public:
std::vector<std::pair<CharType,cv::Mat>> plateChars;
std::vector<std::pair<CharType,cv::Mat>> plateCoding;

float confidence = 0;


PlateInfo(const cv::Mat &plateData, std::string plateName, cv::Rect plateRect, PlateColor plateType) {
licensePlate = plateData;
name = plateName;
ROI = plateRect;
Type = plateType;
}
PlateInfo(const cv::Mat &plateData, cv::Rect plateRect, PlateColor plateType) {
licensePlate = plateData;
ROI = plateRect;
Type = plateType;
}
PlateInfo(const cv::Mat &plateData, cv::Rect plateRect) {
licensePlate = plateData;
ROI = plateRect;
}
PlateInfo() {

}

cv::Mat getPlateImage() {
return licensePlate;
}

void setPlateImage(cv::Mat plateImage){
licensePlate = plateImage;
}

cv::Rect getPlateRect() {
return ROI;
}

void setPlateRect(cv::Rect plateRect) {
ROI = plateRect;
}
cv::String getPlateName() {
return name;

}
void setPlateName(cv::String plateName) {
name = plateName;
}
int getPlateType() {
return Type;
}

void appendPlateChar(const std::pair<CharType,cv::Mat> &plateChar)
{
plateChars.push_back(plateChar);
}

void appendPlateCoding(const std::pair<CharType,cv::Mat> &charProb){
plateCoding.push_back(charProb);
}

// cv::Mat getPlateChars(int id) {
// if(id<PlateChars.size())
// return PlateChars[id];
// }
std::string decodePlateNormal(std::vector<std::string> mappingTable) {
std::string decode;
for(auto plate:plateCoding) {
float *prob = (float *)plate.second.data;
if(plate.first == CHINESE) {

decode += mappingTable[std::max_element(prob,prob+31) - prob];
confidence+=*std::max_element(prob,prob+31);


// std::cout<<*std::max_element(prob,prob+31)<<std::endl;

}

if(plate.first == LETTER) {
decode += mappingTable[std::max_element(prob+41,prob+65)- prob];
confidence+=*std::max_element(prob+41,prob+65);
}

if(plate.first == LETTER_NUMS) {
decode += mappingTable[std::max_element(prob+31,prob+65)- prob];
confidence+=*std::max_element(prob+31,prob+65);
// std::cout<<*std::max_element(prob+31,prob+65)<<std::endl;

}

}
name = decode;

confidence/=7;

return decode;
}



private:
cv::Mat licensePlate;
cv::Rect ROI;
std::string name;
PlateColor Type;
};
}


#endif //SWIFTPR_PLATEINFO_H

+ 39
- 0
Prj-IOS/include/PlateSegmentation.h View File

@@ -0,0 +1,39 @@
//
// Created by 庾金科 on 16/10/2017.
//

#ifndef SWIFTPR_PLATESEGMENTATION_H
#define SWIFTPR_PLATESEGMENTATION_H

#include "opencv2/opencv.hpp"
#include <opencv2/dnn.hpp>
#include "PlateInfo.h"

namespace pr{


class PlateSegmentation{
public:
const int PLATE_NORMAL = 6;
const int PLATE_NORMAL_GREEN = 7;
const int DEFAULT_WIDTH = 20;
PlateSegmentation(std::string phototxt,std::string caffemodel);
PlateSegmentation(){}
void segmentPlatePipline(PlateInfo &plateInfo,int stride,std::vector<cv::Rect> &Char_rects);

void segmentPlateBySlidingWindows(cv::Mat &plateImage,int windowsWidth,int stride,cv::Mat &respones);
void templateMatchFinding(const cv::Mat &respones,int windowsWidth,std::pair<float,std::vector<int>> &candidatePts);
void refineRegion(cv::Mat &plateImage,const std::vector<int> &candidatePts,const int padding,std::vector<cv::Rect> &rects);
void ExtractRegions(PlateInfo &plateInfo,std::vector<cv::Rect> &rects);
cv::Mat classifyResponse(const cv::Mat &cropped);
private:
cv::dnn::Net net;


// RefineRegion()

};

}//namespace pr

#endif //SWIFTPR_PLATESEGMENTATION_H

+ 21
- 0
Prj-IOS/include/Recognizer.h View File

@@ -0,0 +1,21 @@
//
// Created by 庾金科 on 20/10/2017.
//


#ifndef SWIFTPR_RECOGNIZER_H
#define SWIFTPR_RECOGNIZER_H

#include "PlateInfo.h"
#include "opencv2/dnn.hpp"
namespace pr{
typedef cv::Mat label;
class GeneralRecognizer{
public:
virtual label recognizeCharacter(cv::Mat character) = 0;
void SegmentBasedSequenceRecognition(PlateInfo &plateinfo);

};

}
#endif //SWIFTPR_RECOGNIZER_H

+ 107
- 0
Prj-IOS/include/niBlackThreshold.h View File

@@ -0,0 +1,107 @@
//
// Created by 庾金科 on 26/10/2017.
//

#ifndef SWIFTPR_NIBLACKTHRESHOLD_H
#define SWIFTPR_NIBLACKTHRESHOLD_H


#include <opencv2/opencv.hpp>
using namespace cv;

enum LocalBinarizationMethods{
BINARIZATION_NIBLACK = 0, //!< Classic Niblack binarization. See @cite Niblack1985 .
BINARIZATION_SAUVOLA = 1, //!< Sauvola's technique. See @cite Sauvola1997 .
BINARIZATION_WOLF = 2, //!< Wolf's technique. See @cite Wolf2004 .
BINARIZATION_NICK = 3 //!< NICK technique. See @cite Khurshid2009 .
};


void niBlackThreshold( InputArray _src, OutputArray _dst, double maxValue,
int type, int blockSize, double k, int binarizationMethod )
{
// Input grayscale image
Mat src = _src.getMat();
CV_Assert(src.channels() == 1);
CV_Assert(blockSize % 2 == 1 && blockSize > 1);
if (binarizationMethod == BINARIZATION_SAUVOLA) {
CV_Assert(src.depth() == CV_8U);
}
type &= THRESH_MASK;
// Compute local threshold (T = mean + k * stddev)
// using mean and standard deviation in the neighborhood of each pixel
// (intermediate calculations are done with floating-point precision)
Mat test;
Mat thresh;
{
// note that: Var[X] = E[X^2] - E[X]^2
Mat mean, sqmean, variance, stddev, sqrtVarianceMeanSum;
double srcMin, stddevMax;
boxFilter(src, mean, CV_32F, Size(blockSize, blockSize),
Point(-1,-1), true, BORDER_REPLICATE);
sqrBoxFilter(src, sqmean, CV_32F, Size(blockSize, blockSize),
Point(-1,-1), true, BORDER_REPLICATE);
variance = sqmean - mean.mul(mean);
sqrt(variance, stddev);
switch (binarizationMethod)
{
case BINARIZATION_NIBLACK:
thresh = mean + stddev * static_cast<float>(k);

break;
case BINARIZATION_SAUVOLA:
thresh = mean.mul(1. + static_cast<float>(k) * (stddev / 128.0 - 1.));
break;
case BINARIZATION_WOLF:
minMaxIdx(src, &srcMin,NULL);
minMaxIdx(stddev, NULL, &stddevMax);
thresh = mean - static_cast<float>(k) * (mean - srcMin - stddev.mul(mean - srcMin) / stddevMax);
break;
case BINARIZATION_NICK:
sqrt(variance + sqmean, sqrtVarianceMeanSum);
thresh = mean + static_cast<float>(k) * sqrtVarianceMeanSum;
break;
default:
CV_Error( CV_StsBadArg, "Unknown binarization method" );
break;
}
thresh.convertTo(thresh, src.depth());

thresh.convertTo(test, src.depth());
//
// cv::imshow("imagex",test);
// cv::waitKey(0);

}
// Prepare output image
_dst.create(src.size(), src.type());
Mat dst = _dst.getMat();
CV_Assert(src.data != dst.data); // no inplace processing
// Apply thresholding: ( pixel > threshold ) ? foreground : background
Mat mask;
switch (type)
{
case THRESH_BINARY: // dst = (src > thresh) ? maxval : 0
case THRESH_BINARY_INV: // dst = (src > thresh) ? 0 : maxval
compare(src, thresh, mask, (type == THRESH_BINARY ? CMP_GT : CMP_LE));
dst.setTo(0);
dst.setTo(maxValue, mask);
break;
case THRESH_TRUNC: // dst = (src > thresh) ? thresh : src
compare(src, thresh, mask, CMP_GT);
src.copyTo(dst);
thresh.copyTo(dst, mask);
break;
case THRESH_TOZERO: // dst = (src > thresh) ? src : 0
case THRESH_TOZERO_INV: // dst = (src > thresh) ? 0 : src
compare(src, thresh, mask, (type == THRESH_TOZERO ? CMP_GT : CMP_LE));
dst.setTo(0);
src.copyTo(dst, mask);
break;
default:
CV_Error( CV_StsBadArg, "Unknown threshold type" );
break;
}
}

#endif //SWIFTPR_NIBLACKTHRESHOLD_H

+ 435
- 0
Prj-IOS/lp.xcodeproj/project.pbxproj View File

@@ -0,0 +1,435 @@
// !$*UTF8*$!
{
archiveVersion = 1;
classes = {
};
objectVersion = 48;
objects = {

/* Begin PBXBuildFile section */
18034F491FD28DF500787983 /* AppDelegate.m in Sources */ = {isa = PBXBuildFile; fileRef = 18034F481FD28DF500787983 /* AppDelegate.m */; };
18034F4C1FD28DF500787983 /* ViewController.mm in Sources */ = {isa = PBXBuildFile; fileRef = 18034F4B1FD28DF500787983 /* ViewController.mm */; };
18034F4F1FD28DF500787983 /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 18034F4D1FD28DF500787983 /* Main.storyboard */; };
18034F511FD28DF500787983 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 18034F501FD28DF500787983 /* Assets.xcassets */; };
18034F541FD28DF500787983 /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 18034F521FD28DF500787983 /* LaunchScreen.storyboard */; };
18034F571FD28DF500787983 /* main.m in Sources */ = {isa = PBXBuildFile; fileRef = 18034F561FD28DF500787983 /* main.m */; };
18034F5E1FD299EE00787983 /* opencv2.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 18034F5D1FD299EE00787983 /* opencv2.framework */; };
1820B1D81FD300D3003CFE6C /* UIImageCVMatConverter.mm in Sources */ = {isa = PBXBuildFile; fileRef = 1820B1D71FD300D3003CFE6C /* UIImageCVMatConverter.mm */; };
18CEE3D51FD5AA8300CC138A /* CNNRecognizer.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 18CEE3D41FD5AA8300CC138A /* CNNRecognizer.cpp */; };
18CEE3DC1FD5AA9600CC138A /* FastDeskew.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 18CEE3D61FD5AA9500CC138A /* FastDeskew.cpp */; };
18CEE3DD1FD5AA9600CC138A /* PlateSegmentation.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 18CEE3D71FD5AA9600CC138A /* PlateSegmentation.cpp */; };
18CEE3DE1FD5AA9600CC138A /* Recognizer.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 18CEE3D81FD5AA9600CC138A /* Recognizer.cpp */; };
18CEE3DF1FD5AA9600CC138A /* FineMapping.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 18CEE3D91FD5AA9600CC138A /* FineMapping.cpp */; };
18CEE3E01FD5AA9600CC138A /* Pipeline.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 18CEE3DA1FD5AA9600CC138A /* Pipeline.cpp */; };
18CEE3E11FD5AA9600CC138A /* PlateDetection.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 18CEE3DB1FD5AA9600CC138A /* PlateDetection.cpp */; };
18CEE3EE1FD5ACFA00CC138A /* model.bundle in Resources */ = {isa = PBXBuildFile; fileRef = 18CEE3ED1FD5ACFA00CC138A /* model.bundle */; };
/* End PBXBuildFile section */

/* Begin PBXFileReference section */
18034F441FD28DF500787983 /* lp.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = lp.app; sourceTree = BUILT_PRODUCTS_DIR; };
18034F471FD28DF500787983 /* AppDelegate.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = AppDelegate.h; sourceTree = "<group>"; };
18034F481FD28DF500787983 /* AppDelegate.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = AppDelegate.m; sourceTree = "<group>"; };
18034F4A1FD28DF500787983 /* ViewController.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = ViewController.h; sourceTree = "<group>"; };
18034F4B1FD28DF500787983 /* ViewController.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = ViewController.mm; sourceTree = "<group>"; };
18034F4E1FD28DF500787983 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = "<group>"; };
18034F501FD28DF500787983 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = "<group>"; };
18034F531FD28DF500787983 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = "<group>"; };
18034F551FD28DF500787983 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = "<group>"; };
18034F561FD28DF500787983 /* main.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = main.m; sourceTree = "<group>"; };
18034F5D1FD299EE00787983 /* opencv2.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; path = opencv2.framework; sourceTree = "<group>"; };
1820B1D61FD300D3003CFE6C /* UIImageCVMatConverter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = UIImageCVMatConverter.h; sourceTree = "<group>"; };
1820B1D71FD300D3003CFE6C /* UIImageCVMatConverter.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = UIImageCVMatConverter.mm; sourceTree = "<group>"; };
18CEE3D41FD5AA8300CC138A /* CNNRecognizer.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; name = CNNRecognizer.cpp; path = src/CNNRecognizer.cpp; sourceTree = SOURCE_ROOT; };
18CEE3D61FD5AA9500CC138A /* FastDeskew.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; name = FastDeskew.cpp; path = src/FastDeskew.cpp; sourceTree = SOURCE_ROOT; };
18CEE3D71FD5AA9600CC138A /* PlateSegmentation.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; name = PlateSegmentation.cpp; path = src/PlateSegmentation.cpp; sourceTree = SOURCE_ROOT; };
18CEE3D81FD5AA9600CC138A /* Recognizer.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; name = Recognizer.cpp; path = src/Recognizer.cpp; sourceTree = SOURCE_ROOT; };
18CEE3D91FD5AA9600CC138A /* FineMapping.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; name = FineMapping.cpp; path = src/FineMapping.cpp; sourceTree = SOURCE_ROOT; };
18CEE3DA1FD5AA9600CC138A /* Pipeline.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; name = Pipeline.cpp; path = src/Pipeline.cpp; sourceTree = SOURCE_ROOT; };
18CEE3DB1FD5AA9600CC138A /* PlateDetection.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; name = PlateDetection.cpp; path = src/PlateDetection.cpp; sourceTree = SOURCE_ROOT; };
18CEE3E21FD5AAE900CC138A /* util.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = util.h; path = src/util.h; sourceTree = SOURCE_ROOT; };
18CEE3E41FD5AB3800CC138A /* FastDeskew.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = FastDeskew.h; path = include/FastDeskew.h; sourceTree = SOURCE_ROOT; };
18CEE3E51FD5AB3800CC138A /* PlateDetection.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = PlateDetection.h; path = include/PlateDetection.h; sourceTree = SOURCE_ROOT; };
18CEE3E61FD5AB3800CC138A /* niBlackThreshold.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = niBlackThreshold.h; path = include/niBlackThreshold.h; sourceTree = SOURCE_ROOT; };
18CEE3E71FD5AB3800CC138A /* Pipeline.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = Pipeline.h; path = include/Pipeline.h; sourceTree = SOURCE_ROOT; };
18CEE3E81FD5AB3800CC138A /* PlateInfo.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = PlateInfo.h; path = include/PlateInfo.h; sourceTree = SOURCE_ROOT; };
18CEE3E91FD5AB3800CC138A /* PlateSegmentation.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = PlateSegmentation.h; path = include/PlateSegmentation.h; sourceTree = SOURCE_ROOT; };
18CEE3EA1FD5AB3800CC138A /* CNNRecognizer.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = CNNRecognizer.h; path = include/CNNRecognizer.h; sourceTree = SOURCE_ROOT; };
18CEE3EB1FD5AB3800CC138A /* FineMapping.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = FineMapping.h; path = include/FineMapping.h; sourceTree = SOURCE_ROOT; };
18CEE3EC1FD5AB3900CC138A /* Recognizer.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = Recognizer.h; path = include/Recognizer.h; sourceTree = SOURCE_ROOT; };
18CEE3ED1FD5ACFA00CC138A /* model.bundle */ = {isa = PBXFileReference; lastKnownFileType = "wrapper.plug-in"; path = model.bundle; sourceTree = "<group>"; };
/* End PBXFileReference section */

/* Begin PBXFrameworksBuildPhase section */
18034F411FD28DF500787983 /* Frameworks */ = {
isa = PBXFrameworksBuildPhase;
buildActionMask = 2147483647;
files = (
18034F5E1FD299EE00787983 /* opencv2.framework in Frameworks */,
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXFrameworksBuildPhase section */

/* Begin PBXGroup section */
18034F3B1FD28DF500787983 = {
isa = PBXGroup;
children = (
18CEE3ED1FD5ACFA00CC138A /* model.bundle */,
18CEE3E31FD5AB2600CC138A /* include */,
18CEE3D31FD5AA5C00CC138A /* src */,
18034F5D1FD299EE00787983 /* opencv2.framework */,
18034F461FD28DF500787983 /* lp */,
18034F451FD28DF500787983 /* Products */,
1820B1D01FD2F9EB003CFE6C /* Frameworks */,
);
sourceTree = "<group>";
};
18034F451FD28DF500787983 /* Products */ = {
isa = PBXGroup;
children = (
18034F441FD28DF500787983 /* lp.app */,
);
name = Products;
sourceTree = "<group>";
};
18034F461FD28DF500787983 /* lp */ = {
isa = PBXGroup;
children = (
1820B1D61FD300D3003CFE6C /* UIImageCVMatConverter.h */,
1820B1D71FD300D3003CFE6C /* UIImageCVMatConverter.mm */,
18034F471FD28DF500787983 /* AppDelegate.h */,
18034F481FD28DF500787983 /* AppDelegate.m */,
18034F4A1FD28DF500787983 /* ViewController.h */,
18034F4B1FD28DF500787983 /* ViewController.mm */,
18034F4D1FD28DF500787983 /* Main.storyboard */,
18034F501FD28DF500787983 /* Assets.xcassets */,
18034F521FD28DF500787983 /* LaunchScreen.storyboard */,
18034F551FD28DF500787983 /* Info.plist */,
18034F561FD28DF500787983 /* main.m */,
);
path = lp;
sourceTree = "<group>";
};
1820B1D01FD2F9EB003CFE6C /* Frameworks */ = {
isa = PBXGroup;
children = (
);
name = Frameworks;
sourceTree = "<group>";
};
18CEE3D31FD5AA5C00CC138A /* src */ = {
isa = PBXGroup;
children = (
18CEE3E21FD5AAE900CC138A /* util.h */,
18CEE3D61FD5AA9500CC138A /* FastDeskew.cpp */,
18CEE3D91FD5AA9600CC138A /* FineMapping.cpp */,
18CEE3DA1FD5AA9600CC138A /* Pipeline.cpp */,
18CEE3DB1FD5AA9600CC138A /* PlateDetection.cpp */,
18CEE3D71FD5AA9600CC138A /* PlateSegmentation.cpp */,
18CEE3D81FD5AA9600CC138A /* Recognizer.cpp */,
18CEE3D41FD5AA8300CC138A /* CNNRecognizer.cpp */,
);
name = src;
path = "New Group";
sourceTree = "<group>";
};
18CEE3E31FD5AB2600CC138A /* include */ = {
isa = PBXGroup;
children = (
18CEE3EA1FD5AB3800CC138A /* CNNRecognizer.h */,
18CEE3E41FD5AB3800CC138A /* FastDeskew.h */,
18CEE3EB1FD5AB3800CC138A /* FineMapping.h */,
18CEE3E61FD5AB3800CC138A /* niBlackThreshold.h */,
18CEE3E71FD5AB3800CC138A /* Pipeline.h */,
18CEE3E51FD5AB3800CC138A /* PlateDetection.h */,
18CEE3E81FD5AB3800CC138A /* PlateInfo.h */,
18CEE3E91FD5AB3800CC138A /* PlateSegmentation.h */,
18CEE3EC1FD5AB3900CC138A /* Recognizer.h */,
);
name = include;
path = "New Group1";
sourceTree = "<group>";
};
/* End PBXGroup section */

/* Begin PBXNativeTarget section */
18034F431FD28DF500787983 /* lp */ = {
isa = PBXNativeTarget;
buildConfigurationList = 18034F5A1FD28DF500787983 /* Build configuration list for PBXNativeTarget "lp" */;
buildPhases = (
18034F401FD28DF500787983 /* Sources */,
18034F411FD28DF500787983 /* Frameworks */,
18034F421FD28DF500787983 /* Resources */,
);
buildRules = (
);
dependencies = (
);
name = lp;
productName = lp;
productReference = 18034F441FD28DF500787983 /* lp.app */;
productType = "com.apple.product-type.application";
};
/* End PBXNativeTarget section */

/* Begin PBXProject section */
18034F3C1FD28DF500787983 /* Project object */ = {
isa = PBXProject;
attributes = {
LastUpgradeCheck = 0910;
ORGANIZATIONNAME = xiaojun;
TargetAttributes = {
18034F431FD28DF500787983 = {
CreatedOnToolsVersion = 9.1;
ProvisioningStyle = Automatic;
};
};
};
buildConfigurationList = 18034F3F1FD28DF500787983 /* Build configuration list for PBXProject "lp" */;
compatibilityVersion = "Xcode 8.0";
developmentRegion = en;
hasScannedForEncodings = 0;
knownRegions = (
en,
Base,
);
mainGroup = 18034F3B1FD28DF500787983;
productRefGroup = 18034F451FD28DF500787983 /* Products */;
projectDirPath = "";
projectRoot = "";
targets = (
18034F431FD28DF500787983 /* lp */,
);
};
/* End PBXProject section */

/* Begin PBXResourcesBuildPhase section */
18034F421FD28DF500787983 /* Resources */ = {
isa = PBXResourcesBuildPhase;
buildActionMask = 2147483647;
files = (
18034F541FD28DF500787983 /* LaunchScreen.storyboard in Resources */,
18CEE3EE1FD5ACFA00CC138A /* model.bundle in Resources */,
18034F511FD28DF500787983 /* Assets.xcassets in Resources */,
18034F4F1FD28DF500787983 /* Main.storyboard in Resources */,
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXResourcesBuildPhase section */

/* Begin PBXSourcesBuildPhase section */
18034F401FD28DF500787983 /* Sources */ = {
isa = PBXSourcesBuildPhase;
buildActionMask = 2147483647;
files = (
18CEE3DF1FD5AA9600CC138A /* FineMapping.cpp in Sources */,
1820B1D81FD300D3003CFE6C /* UIImageCVMatConverter.mm in Sources */,
18034F4C1FD28DF500787983 /* ViewController.mm in Sources */,
18CEE3DD1FD5AA9600CC138A /* PlateSegmentation.cpp in Sources */,
18CEE3DC1FD5AA9600CC138A /* FastDeskew.cpp in Sources */,
18CEE3D51FD5AA8300CC138A /* CNNRecognizer.cpp in Sources */,
18034F571FD28DF500787983 /* main.m in Sources */,
18CEE3E11FD5AA9600CC138A /* PlateDetection.cpp in Sources */,
18CEE3E01FD5AA9600CC138A /* Pipeline.cpp in Sources */,
18034F491FD28DF500787983 /* AppDelegate.m in Sources */,
18CEE3DE1FD5AA9600CC138A /* Recognizer.cpp in Sources */,
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXSourcesBuildPhase section */

/* Begin PBXVariantGroup section */
18034F4D1FD28DF500787983 /* Main.storyboard */ = {
isa = PBXVariantGroup;
children = (
18034F4E1FD28DF500787983 /* Base */,
);
name = Main.storyboard;
sourceTree = "<group>";
};
18034F521FD28DF500787983 /* LaunchScreen.storyboard */ = {
isa = PBXVariantGroup;
children = (
18034F531FD28DF500787983 /* Base */,
);
name = LaunchScreen.storyboard;
sourceTree = "<group>";
};
/* End PBXVariantGroup section */

/* Begin XCBuildConfiguration section */
18034F581FD28DF500787983 /* Debug */ = {
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
CLANG_ANALYZER_NONNULL = YES;
CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
CLANG_CXX_LANGUAGE_STANDARD = "gnu++14";
CLANG_CXX_LIBRARY = "libc++";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_COMMA = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
CLANG_WARN_STRICT_PROTOTYPES = YES;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
CODE_SIGN_IDENTITY = "iPhone Developer";
COPY_PHASE_STRIP = NO;
DEBUG_INFORMATION_FORMAT = dwarf;
ENABLE_STRICT_OBJC_MSGSEND = YES;
ENABLE_TESTABILITY = YES;
GCC_C_LANGUAGE_STANDARD = gnu11;
GCC_DYNAMIC_NO_PIC = NO;
GCC_INPUT_FILETYPE = automatic;
GCC_NO_COMMON_BLOCKS = YES;
GCC_OPTIMIZATION_LEVEL = 0;
GCC_PREPROCESSOR_DEFINITIONS = (
"DEBUG=1",
"$(inherited)",
);
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
GCC_WARN_UNDECLARED_SELECTOR = YES;
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
"HEADER_SEARCH_PATHS[arch=*]" = (
"\"$(SRCROOT)/include/\"",
"\"$(SRCROOT)\"",
);
IPHONEOS_DEPLOYMENT_TARGET = 11.1;
MTL_ENABLE_DEBUG_INFO = YES;
ONLY_ACTIVE_ARCH = YES;
SDKROOT = iphoneos;
};
name = Debug;
};
18034F591FD28DF500787983 /* Release */ = {
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
CLANG_ANALYZER_NONNULL = YES;
CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
CLANG_CXX_LANGUAGE_STANDARD = "gnu++14";
CLANG_CXX_LIBRARY = "libc++";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_COMMA = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
CLANG_WARN_STRICT_PROTOTYPES = YES;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
CODE_SIGN_IDENTITY = "iPhone Developer";
COPY_PHASE_STRIP = NO;
DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
ENABLE_NS_ASSERTIONS = NO;
ENABLE_STRICT_OBJC_MSGSEND = YES;
GCC_C_LANGUAGE_STANDARD = gnu11;
GCC_INPUT_FILETYPE = automatic;
GCC_NO_COMMON_BLOCKS = YES;
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
GCC_WARN_UNDECLARED_SELECTOR = YES;
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
"HEADER_SEARCH_PATHS[arch=*]" = (
"\"$(SRCROOT)/include/\"",
"\"$(SRCROOT)\"",
);
IPHONEOS_DEPLOYMENT_TARGET = 11.1;
MTL_ENABLE_DEBUG_INFO = NO;
SDKROOT = iphoneos;
VALIDATE_PRODUCT = YES;
};
name = Release;
};
18034F5B1FD28DF500787983 /* Debug */ = {
isa = XCBuildConfiguration;
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
CODE_SIGN_STYLE = Automatic;
DEVELOPMENT_TEAM = V74HML5NEB;
FRAMEWORK_SEARCH_PATHS = (
"$(inherited)",
"$(PROJECT_DIR)",
);
INFOPLIST_FILE = lp/Info.plist;
LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
PRODUCT_BUNDLE_IDENTIFIER = tjs.lp;
PRODUCT_NAME = "$(TARGET_NAME)";
TARGETED_DEVICE_FAMILY = "1,2";
};
name = Debug;
};
18034F5C1FD28DF500787983 /* Release */ = {
isa = XCBuildConfiguration;
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
CODE_SIGN_STYLE = Automatic;
DEVELOPMENT_TEAM = V74HML5NEB;
FRAMEWORK_SEARCH_PATHS = (
"$(inherited)",
"$(PROJECT_DIR)",
);
INFOPLIST_FILE = lp/Info.plist;
LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
PRODUCT_BUNDLE_IDENTIFIER = tjs.lp;
PRODUCT_NAME = "$(TARGET_NAME)";
TARGETED_DEVICE_FAMILY = "1,2";
};
name = Release;
};
/* End XCBuildConfiguration section */

/* Begin XCConfigurationList section */
18034F3F1FD28DF500787983 /* Build configuration list for PBXProject "lp" */ = {
isa = XCConfigurationList;
buildConfigurations = (
18034F581FD28DF500787983 /* Debug */,
18034F591FD28DF500787983 /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
18034F5A1FD28DF500787983 /* Build configuration list for PBXNativeTarget "lp" */ = {
isa = XCConfigurationList;
buildConfigurations = (
18034F5B1FD28DF500787983 /* Debug */,
18034F5C1FD28DF500787983 /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
/* End XCConfigurationList section */
};
rootObject = 18034F3C1FD28DF500787983 /* Project object */;
}

+ 7
- 0
Prj-IOS/lp.xcodeproj/project.xcworkspace/contents.xcworkspacedata View File

@@ -0,0 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<Workspace
version = "1.0">
<FileRef
location = "self:lp.xcodeproj">
</FileRef>
</Workspace>

BIN
Prj-IOS/lp.xcodeproj/project.xcworkspace/xcuserdata/apple.xcuserdatad/UserInterfaceState.xcuserstate View File


+ 5
- 0
Prj-IOS/lp.xcodeproj/xcuserdata/apple.xcuserdatad/xcdebugger/Breakpoints_v2.xcbkptlist View File

@@ -0,0 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
<Bucket
type = "1"
version = "2.0">
</Bucket>

+ 14
- 0
Prj-IOS/lp.xcodeproj/xcuserdata/apple.xcuserdatad/xcschemes/xcschememanagement.plist View File

@@ -0,0 +1,14 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>SchemeUserState</key>
<dict>
<key>lp.xcscheme</key>
<dict>
<key>orderHint</key>
<integer>0</integer>
</dict>
</dict>
</dict>
</plist>

+ 17
- 0
Prj-IOS/lp/AppDelegate.h View File

@@ -0,0 +1,17 @@
//
// AppDelegate.h
// lp
//
// Created by xiaojun on 2017/12/2.
// Copyright © 2017年 xiaojun. All rights reserved.
//

#import <UIKit/UIKit.h>

@interface AppDelegate : UIResponder <UIApplicationDelegate>

@property (strong, nonatomic) UIWindow *window;


@end


+ 51
- 0
Prj-IOS/lp/AppDelegate.m View File

@@ -0,0 +1,51 @@
//
// AppDelegate.m
// lp
//
// Created by xiaojun on 2017/12/2.
// Copyright © 2017年 xiaojun. All rights reserved.
//

#import "AppDelegate.h"

@interface AppDelegate ()

@end

@implementation AppDelegate


- (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptions {
// Override point for customization after application launch.
return YES;
}


- (void)applicationWillResignActive:(UIApplication *)application {
// Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state.
// Use this method to pause ongoing tasks, disable timers, and invalidate graphics rendering callbacks. Games should use this method to pause the game.
}


- (void)applicationDidEnterBackground:(UIApplication *)application {
// Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later.
// If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits.
}


- (void)applicationWillEnterForeground:(UIApplication *)application {
// Called as part of the transition from the background to the active state; here you can undo many of the changes made on entering the background.
}


- (void)applicationDidBecomeActive:(UIApplication *)application {
// Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface.
}


- (void)applicationWillTerminate:(UIApplication *)application {
// Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:.
}


@end

+ 98
- 0
Prj-IOS/lp/Assets.xcassets/AppIcon.appiconset/Contents.json View File

@@ -0,0 +1,98 @@
{
"images" : [
{
"idiom" : "iphone",
"size" : "20x20",
"scale" : "2x"
},
{
"idiom" : "iphone",
"size" : "20x20",
"scale" : "3x"
},
{
"idiom" : "iphone",
"size" : "29x29",
"scale" : "2x"
},
{
"idiom" : "iphone",
"size" : "29x29",
"scale" : "3x"
},
{
"idiom" : "iphone",
"size" : "40x40",
"scale" : "2x"
},
{
"idiom" : "iphone",
"size" : "40x40",
"scale" : "3x"
},
{
"idiom" : "iphone",
"size" : "60x60",
"scale" : "2x"
},
{
"idiom" : "iphone",
"size" : "60x60",
"scale" : "3x"
},
{
"idiom" : "ipad",
"size" : "20x20",
"scale" : "1x"
},
{
"idiom" : "ipad",
"size" : "20x20",
"scale" : "2x"
},
{
"idiom" : "ipad",
"size" : "29x29",
"scale" : "1x"
},
{
"idiom" : "ipad",
"size" : "29x29",
"scale" : "2x"
},
{
"idiom" : "ipad",
"size" : "40x40",
"scale" : "1x"
},
{
"idiom" : "ipad",
"size" : "40x40",
"scale" : "2x"
},
{
"idiom" : "ipad",
"size" : "76x76",
"scale" : "1x"
},
{
"idiom" : "ipad",
"size" : "76x76",
"scale" : "2x"
},
{
"idiom" : "ipad",
"size" : "83.5x83.5",
"scale" : "2x"
},
{
"idiom" : "ios-marketing",
"size" : "1024x1024",
"scale" : "1x"
}
],
"info" : {
"version" : 1,
"author" : "xcode"
}
}

+ 25
- 0
Prj-IOS/lp/Base.lproj/LaunchScreen.storyboard View File

@@ -0,0 +1,25 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="13122.16" systemVersion="17A277" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" launchScreen="YES" useTraitCollections="YES" useSafeAreas="YES" colorMatched="YES" initialViewController="01J-lp-oVM">
<dependencies>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="13104.12"/>
<capability name="Safe area layout guides" minToolsVersion="9.0"/>
<capability name="documents saved in the Xcode 8 format" minToolsVersion="8.0"/>
</dependencies>
<scenes>
<!--View Controller-->
<scene sceneID="EHf-IW-A2E">
<objects>
<viewController id="01J-lp-oVM" sceneMemberID="viewController">
<view key="view" contentMode="scaleToFill" id="Ze5-6b-2t3">
<rect key="frame" x="0.0" y="0.0" width="375" height="667"/>
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
<color key="backgroundColor" red="1" green="1" blue="1" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
<viewLayoutGuide key="safeArea" id="6Tk-OE-BBY"/>
</view>
</viewController>
<placeholder placeholderIdentifier="IBFirstResponder" id="iYj-Kq-Ea1" userLabel="First Responder" sceneMemberID="firstResponder"/>
</objects>
<point key="canvasLocation" x="53" y="375"/>
</scene>
</scenes>
</document>

+ 28
- 0
Prj-IOS/lp/Base.lproj/Main.storyboard View File

@@ -0,0 +1,28 @@
<?xml version="1.0" encoding="UTF-8"?>
<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="13529" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" useSafeAreas="YES" colorMatched="YES" initialViewController="BYZ-38-t0r">
<device id="retina4_0" orientation="portrait">
<adaptation id="fullscreen"/>
</device>
<dependencies>
<deployment identifier="iOS"/>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="13527"/>
<capability name="Safe area layout guides" minToolsVersion="9.0"/>
<capability name="documents saved in the Xcode 8 format" minToolsVersion="8.0"/>
</dependencies>
<scenes>
<!--View Controller-->
<scene sceneID="tne-QT-ifu">
<objects>
<viewController id="BYZ-38-t0r" customClass="ViewController" sceneMemberID="viewController">
<view key="view" contentMode="scaleToFill" id="8bC-Xf-vdC">
<rect key="frame" x="0.0" y="0.0" width="320" height="568"/>
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
<color key="backgroundColor" red="1" green="1" blue="1" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
<viewLayoutGuide key="safeArea" id="6Tk-OE-BBY"/>
</view>
</viewController>
<placeholder placeholderIdentifier="IBFirstResponder" id="dkx-z0-nzr" sceneMemberID="firstResponder"/>
</objects>
</scene>
</scenes>
</document>

+ 45
- 0
Prj-IOS/lp/Info.plist View File

@@ -0,0 +1,45 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>CFBundleDevelopmentRegion</key>
<string>$(DEVELOPMENT_LANGUAGE)</string>
<key>CFBundleExecutable</key>
<string>$(EXECUTABLE_NAME)</string>
<key>CFBundleIdentifier</key>
<string>$(PRODUCT_BUNDLE_IDENTIFIER)</string>
<key>CFBundleInfoDictionaryVersion</key>
<string>6.0</string>
<key>CFBundleName</key>
<string>$(PRODUCT_NAME)</string>
<key>CFBundlePackageType</key>
<string>APPL</string>
<key>CFBundleShortVersionString</key>
<string>1.0</string>
<key>CFBundleVersion</key>
<string>1</string>
<key>LSRequiresIPhoneOS</key>
<true/>
<key>UILaunchStoryboardName</key>
<string>LaunchScreen</string>
<key>UIMainStoryboardFile</key>
<string>Main</string>
<key>UIRequiredDeviceCapabilities</key>
<array>
<string>armv7</string>
</array>
<key>UISupportedInterfaceOrientations</key>
<array>
<string>UIInterfaceOrientationPortrait</string>
<string>UIInterfaceOrientationLandscapeLeft</string>
<string>UIInterfaceOrientationLandscapeRight</string>
</array>
<key>UISupportedInterfaceOrientations~ipad</key>
<array>
<string>UIInterfaceOrientationPortrait</string>
<string>UIInterfaceOrientationPortraitUpsideDown</string>
<string>UIInterfaceOrientationLandscapeLeft</string>
<string>UIInterfaceOrientationLandscapeRight</string>
</array>
</dict>
</plist>

+ 32
- 0
Prj-IOS/lp/UIImageCVMatConverter.h View File

@@ -0,0 +1,32 @@
//
// UIImageCVMatConverter.h
// lp
//
// Created by xiaojun on 2017/12/2.
// Copyright © 2017年 xiaojun. All rights reserved.
//

#ifndef UIImageCVMatConverter_h
#define UIImageCVMatConverter_h

#ifdef __cplusplus
#import <opencv2/opencv.hpp>
#endif
#ifdef __OBJC__
#import <UIKit/UIKit.h>
#import <Foundation/Foundation.h>
#endif
using namespace cv;

@interface UIImageCVMatConverter : NSObject

+ (cv::Mat)cvMatFromUIImage:(UIImage *)image;
+ (UIImage *)UIImageFromCVMat:(cv::Mat)image;
+ (UIImage *)scaleAndRotateImageFrontCamera:(UIImage *)image;
+ (UIImage *)scaleAndRotateImageBackCamera:(UIImage *)image;
+(UIImage*) imageWithMat:(const cv::Mat&) image andImageOrientation: (UIImageOrientation) orientation;
+(UIImage*) imageWithMat:(const cv::Mat&) image andDeviceOrientation: (UIDeviceOrientation) orientation;

@end

#endif /* UIImageCVMatConverter_h */

+ 317
- 0
Prj-IOS/lp/UIImageCVMatConverter.mm View File

@@ -0,0 +1,317 @@
//
// UIImageCVMatConverter.m
// lp
//
// Created by xiaojun on 2017/12/2.
// Copyright © 2017年 xiaojun. All rights reserved.
//


#include "UIImageCVMatConverter.h"
@implementation UIImageCVMatConverter

+(UIImage *)UIImageFromCVMat:(cv::Mat)cvMat{
NSData *data = [NSData dataWithBytes:cvMat.data length:cvMat.elemSize()*cvMat.total()];
CGColorSpaceRef colorSpace;
if (cvMat.elemSize() == 1) {
colorSpace = CGColorSpaceCreateDeviceGray();
} else {
colorSpace = CGColorSpaceCreateDeviceRGB();
}
CGDataProviderRef provider = CGDataProviderCreateWithCFData((__bridge CFDataRef)data);
// Creating CGImage from cv::Mat
CGImageRef imageRef = CGImageCreate(cvMat.cols, //width
cvMat.rows, //height
8, //bits per component
8 * cvMat.elemSize(), //bits per pixel
cvMat.step[0], //bytesPerRow
colorSpace, //colorspace
kCGImageAlphaNone|kCGBitmapByteOrderDefault,// bitmap info
provider, //CGDataProviderRef
NULL, //decode
false, //should interpolate
kCGRenderingIntentDefault //intent
);
// Getting UIImage from CGImage
UIImage *finalImage = [UIImage imageWithCGImage:imageRef scale:1.0 orientation:UIImageOrientationUp];
CGImageRelease(imageRef);
CGDataProviderRelease(provider);
CGColorSpaceRelease(colorSpace);
return finalImage;
}
//缩放调整图片
+ (UIImage *)scaleAndRotateImageBackCamera:(UIImage *)image
{
static int kMaxResolution = 480;
CGImageRef imgRef = image.CGImage;
CGFloat width = CGImageGetWidth(imgRef);
CGFloat height = CGImageGetHeight(imgRef);
CGAffineTransform transform = CGAffineTransformIdentity;
CGRect bounds = CGRectMake(0, 0, width, height);
if (width > kMaxResolution || height > kMaxResolution) {
CGFloat ratio = width/height;
if (ratio > 1) {
bounds.size.width = kMaxResolution;
bounds.size.height = bounds.size.width / ratio;
} else {
bounds.size.height = kMaxResolution;
bounds.size.width = bounds.size.height * ratio;
}
}
CGFloat scaleRatio = bounds.size.width / width;
CGSize imageSize = CGSizeMake(CGImageGetWidth(imgRef), CGImageGetHeight(imgRef));
CGFloat boundHeight;
UIImageOrientation orient = image.imageOrientation;
switch(orient) {
case UIImageOrientationUp:
transform = CGAffineTransformIdentity;
break;
case UIImageOrientationUpMirrored:
transform = CGAffineTransformMakeTranslation(imageSize.width, 0.0);
transform = CGAffineTransformScale(transform, -1.0, 1.0);
break;
case UIImageOrientationDown:
transform = CGAffineTransformMakeTranslation(imageSize.width, imageSize.height);
transform = CGAffineTransformRotate(transform, M_PI);
break;
case UIImageOrientationDownMirrored:
transform = CGAffineTransformMakeTranslation(0.0, imageSize.height);
transform = CGAffineTransformScale(transform, 1.0, -1.0);
break;
case UIImageOrientationLeftMirrored:
boundHeight = bounds.size.height;
bounds.size.height = bounds.size.width;
bounds.size.width = boundHeight;
transform = CGAffineTransformMakeTranslation(imageSize.height, imageSize.width);
transform = CGAffineTransformScale(transform, -1.0, 1.0);
transform = CGAffineTransformRotate(transform, 3.0 * M_PI / 2.0);
break;
case UIImageOrientationLeft:
boundHeight = bounds.size.height;
bounds.size.height = bounds.size.width;
bounds.size.width = boundHeight;
transform = CGAffineTransformMakeTranslation(0.0, imageSize.width);
transform = CGAffineTransformRotate(transform, 3.0 * M_PI / 2.0);
break;
case UIImageOrientationRightMirrored:
boundHeight = bounds.size.height;
bounds.size.height = bounds.size.width;
bounds.size.width = boundHeight;
transform = CGAffineTransformMakeScale(-1.0, 1.0);
transform = CGAffineTransformRotate(transform, M_PI / 2.0);
break;
case UIImageOrientationRight:
boundHeight = bounds.size.height;
bounds.size.height = bounds.size.width;
bounds.size.width = boundHeight;
transform = CGAffineTransformMakeTranslation(imageSize.height, 0.0);
transform = CGAffineTransformRotate(transform, M_PI / 2.0);
break;
default:
[NSException raise:NSInternalInconsistencyException format:@"Invalid image orientation"];
}
UIGraphicsBeginImageContext(bounds.size);
CGContextRef context = UIGraphicsGetCurrentContext();
if (orient == UIImageOrientationRight || orient == UIImageOrientationLeft) {
CGContextScaleCTM(context, -scaleRatio, scaleRatio);
CGContextTranslateCTM(context, -height, 0);
} else {
CGContextScaleCTM(context, scaleRatio, -scaleRatio);
CGContextTranslateCTM(context, 0, -height);
}
CGContextConcatCTM(context, transform);
CGContextDrawImage(UIGraphicsGetCurrentContext(), CGRectMake(0, 0, width, height), imgRef);
UIImage *returnImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
NSLog(@"resize w%f,H%f",returnImage.size.width,returnImage.size.height);
return returnImage;
}

+(UIImage*) imageWithMat:(const cv::Mat&) image andDeviceOrientation: (UIDeviceOrientation) orientation
{
UIImageOrientation imgOrientation = UIImageOrientationUp;
switch (orientation)
{
case UIDeviceOrientationLandscapeLeft:
imgOrientation =UIImageOrientationLeftMirrored; break;
case UIDeviceOrientationLandscapeRight:
imgOrientation = UIImageOrientationDown; break;
case UIDeviceOrientationPortraitUpsideDown:
imgOrientation = UIImageOrientationRightMirrored; break;
case UIDeviceOrientationFaceUp:
imgOrientation = UIImageOrientationRightMirrored; break;
default:
case UIDeviceOrientationPortrait:
imgOrientation = UIImageOrientationRight; break;
};
return [UIImageCVMatConverter imageWithMat:image andImageOrientation:imgOrientation];
}

+(UIImage*) imageWithMat:(const cv::Mat&) image andImageOrientation: (UIImageOrientation) orientation;
{
cv::Mat rgbaView;
if (image.channels() == 3)
{
cv::cvtColor(image, rgbaView, COLOR_BGR2BGRA);
}
else if (image.channels() == 4)
{
cv::cvtColor(image, rgbaView, COLOR_BGR2BGRA);
}
else if (image.channels() == 1)
{
cv::cvtColor(image, rgbaView, COLOR_GRAY2RGBA);
}
NSData *data = [NSData dataWithBytes:rgbaView.data length:rgbaView.elemSize() * rgbaView.total()];
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGDataProviderRef provider = CGDataProviderCreateWithCFData((__bridge CFDataRef)data);
CGBitmapInfo bmInfo = kCGImageAlphaPremultipliedLast | kCGBitmapByteOrder32Big;
// Creating CGImage from cv::Mat
CGImageRef imageRef = CGImageCreate(rgbaView.cols, //width
rgbaView.rows, //height
8, //bits per component
8 * rgbaView.elemSize(), //bits per pixel
rgbaView.step.p[0], //bytesPerRow
colorSpace, //colorspace
bmInfo,// bitmap info
provider, //CGDataProviderRef
NULL, //decode
false, //should interpolate
kCGRenderingIntentDefault //intent
);
// Getting UIImage from CGImage
UIImage *finalImage = [UIImage imageWithCGImage:imageRef scale:1 orientation:orientation];
CGImageRelease(imageRef);
CGDataProviderRelease(provider);
CGColorSpaceRelease(colorSpace);
return finalImage;
}


+ (cv::Mat)cvMatFromUIImage:(UIImage *)image
{
CGColorSpaceRef colorSpace = CGImageGetColorSpace(image.CGImage);
CGFloat cols = image.size.width;
CGFloat rows = image.size.height;
cv::Mat cvMat(rows, cols, CV_8UC4); // 8 bits per component, 4 channels
CGContextRef contextRef = CGBitmapContextCreate(cvMat.data, // Pointer to data
cols, // Width of bitmap
rows, // Height of bitmap
8, // Bits per component
cvMat.step[0], // Bytes per row
colorSpace, // Colorspace
kCGImageAlphaNoneSkipLast |
kCGBitmapByteOrderDefault); // Bitmap info flags
CGContextDrawImage(contextRef, CGRectMake(0, 0, cols, rows), image.CGImage);
CGContextRelease(contextRef);
CGColorSpaceRelease(colorSpace);
cv::Mat cvMat3(rows, cols, CV_8UC3); // 8 bits per component, 4 channels
cv::cvtColor(cvMat, cvMat3,COLOR_RGBA2RGB);
return cvMat3;
}
+ (UIImage *)scaleAndRotateImageFrontCamera:(UIImage *)image
{
static int kMaxResolution = 640;
CGImageRef imgRef = image.CGImage;
CGFloat width = CGImageGetWidth(imgRef);
CGFloat height = CGImageGetHeight(imgRef);
CGAffineTransform transform = CGAffineTransformIdentity;
CGRect bounds = CGRectMake( 0, 0, width, height);
if (width > kMaxResolution || height > kMaxResolution) {
CGFloat ratio = width/height;
if (ratio > 1) {
bounds.size.width = kMaxResolution;
bounds.size.height = bounds.size.width / ratio;
} else {
bounds.size.height = kMaxResolution;
bounds.size.width = bounds.size.height * ratio;
}
}
CGFloat scaleRatio = bounds.size.width / width;
CGSize imageSize = CGSizeMake(CGImageGetWidth(imgRef), CGImageGetHeight(imgRef));
CGFloat boundHeight;
UIImageOrientation orient = image.imageOrientation;
switch(orient) {
case UIImageOrientationUp:
transform = CGAffineTransformIdentity;
break;
case UIImageOrientationUpMirrored:
transform = CGAffineTransformMakeTranslation(imageSize.width, 0.0);
transform = CGAffineTransformScale(transform, -1.0, 1.0);
break;
case UIImageOrientationDown:
transform = CGAffineTransformMakeTranslation(imageSize.width, imageSize.height);
transform = CGAffineTransformRotate(transform, M_PI);
break;
case UIImageOrientationDownMirrored:
transform = CGAffineTransformMakeTranslation(0.0, imageSize.height);
transform = CGAffineTransformScale(transform, 1.0, -1.0);
break;
case UIImageOrientationLeftMirrored:
boundHeight = bounds.size.height;
bounds.size.height = bounds.size.width;
bounds.size.width = boundHeight;
transform = CGAffineTransformMakeTranslation(imageSize.height, imageSize.width);
transform = CGAffineTransformScale(transform, -1.0, 1.0);
transform = CGAffineTransformRotate(transform, 3.0 * M_PI / 2.0);
break;
case UIImageOrientationLeft:
boundHeight = bounds.size.height;
bounds.size.height = bounds.size.width;
bounds.size.width = boundHeight;
transform = CGAffineTransformMakeTranslation(0.0, imageSize.width);
transform = CGAffineTransformRotate(transform, 3.0 * M_PI / 2.0);
break;
case UIImageOrientationRight:
case UIImageOrientationRightMirrored:
boundHeight = bounds.size.height;
bounds.size.height = bounds.size.width;
bounds.size.width = boundHeight;
transform = CGAffineTransformMakeScale(-1.0, 1.0);
transform = CGAffineTransformRotate(transform, M_PI / 2.0);
break;
default:
[NSException raise:NSInternalInconsistencyException format:@"Invalid image orientation"];
}
UIGraphicsBeginImageContext( bounds.size );
CGContextRef context = UIGraphicsGetCurrentContext();
if ( orient == UIImageOrientationRight || orient == UIImageOrientationLeft ) {
CGContextScaleCTM(context, -scaleRatio, scaleRatio);
CGContextTranslateCTM(context, -height, 0);
}
else {
CGContextScaleCTM(context, scaleRatio, -scaleRatio);
CGContextTranslateCTM(context, 0, -height);
}
CGContextConcatCTM( context, transform );
CGContextDrawImage( UIGraphicsGetCurrentContext(), CGRectMake(0, 0, width, height), imgRef );
UIImage *returnImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
return returnImage;
}@end

+ 40
- 0
Prj-IOS/lp/ViewController.h View File

@@ -0,0 +1,40 @@
//
// ViewController.h
// lp
//
// Created by xiaojun on 2017/12/2.
// Copyright © 2017年 xiaojun. All rights reserved.
//

//#import <UIKit/UIKit.h>

#ifdef __cplusplus
#import <opencv2/opencv.hpp>

//#import <opencv2/imgproc/types_c.h>
#import <opencv2/imgcodecs/ios.h>
#endif

#ifdef __OBJC__
#import <UIKit/UIKit.h>
#import <Foundation/Foundation.h>
#import <AVFoundation/AVFoundation.h>
#endif

@interface ViewController : UIViewController<UIImagePickerControllerDelegate,
UINavigationControllerDelegate,UIToolbarDelegate> {
cv::Mat source_image;
}

@property (nonatomic, retain) UILabel *textLabel;
@property (nonatomic, retain) UIImageView *imageView;
@property (nonatomic, retain) UIImageView *textView;
@property (nonatomic, retain) UIToolbar *toolbar;

-(void)loadButtonPressed:(id)sender;
-(void)simpleRecognition:(cv::Mat&)src;
-(NSString *)getPath:(NSString *)fileName;

@end


+ 216
- 0
Prj-IOS/lp/ViewController.mm View File

@@ -0,0 +1,216 @@
//
// ViewController.m
// lp
//
// Created by xiaojun on 2017/12/2.
// Copyright © 2017年 xiaojun. All rights reserved.
//



#import "ViewController.h"
#import "UIImageCVMatConverter.h"

#import "Pipeline.h"

using namespace pr;


@interface ViewController ()

@end

@implementation ViewController

@synthesize imageView;
@synthesize textView;
@synthesize toolbar;
@synthesize textLabel;

-(NSString *)getPath:(NSString*)fileName{
NSString *bundlePath = [[NSBundle mainBundle].resourcePath stringByAppendingPathComponent:@"model.bundle"];
NSString *path = [bundlePath stringByAppendingPathComponent:fileName];
return path;
}

-(void)simpleRecognition:(cv::Mat&)src{
NSString *path_1 = [self getPath:@"cascade.xml"];
NSString *path_7 = [self getPath:@"CharacterRecognization.caffemodel"];
NSString *path_6 = [self getPath:@"CharacterRecognization.prototxt"];
NSString *path_3 = [self getPath:@"HorizonalFinemapping.caffemodel"];
NSString *path_2 = [self getPath:@"HorizonalFinemapping.prototxt"];
NSString *path_5 = [self getPath:@"Segmentation.caffemodel"];
NSString *path_4 = [self getPath:@"Segmentation.prototxt"];
std::string *cpath_1 = new std::string([path_1 UTF8String]);
std::string *cpath_2 = new std::string([path_2 UTF8String]);
std::string *cpath_3 = new std::string([path_3 UTF8String]);
std::string *cpath_4 = new std::string([path_4 UTF8String]);
std::string *cpath_5 = new std::string([path_5 UTF8String]);
std::string *cpath_6 = new std::string([path_6 UTF8String]);
std::string *cpath_7 = new std::string([path_7 UTF8String]);
PipelinePR pr2 = PipelinePR::PipelinePR(*cpath_1,*cpath_2,*cpath_3,*cpath_4,*cpath_5,*cpath_6,*cpath_7);
std::vector<pr::PlateInfo> list_res = pr2.RunPiplineAsImage(src);
std::string concat_results="";
for(auto one:list_res){
if(one.confidence>0.7){
concat_results += one.getPlateName()+",";
//std::cout<<"1-----"+one.getPlateName()+"----1";
}
}
//std::cout<<"2--"+concat_results+"--2";
NSString *str=[NSString stringWithCString:concat_results.c_str() encoding:NSUTF8StringEncoding];
if(str.length > 0){
str = [str substringToIndex:str.length-1];
str = [NSString stringWithFormat:@"result:%@",str];
}else{
str = [NSString stringWithFormat:@"result:null"];
}
[self.textLabel setText:str];
}


- (void)imagePickerController:(UIImagePickerController*)picker didFinishPickingMediaWithInfo:(NSDictionary *)info
{
[picker dismissViewControllerAnimated:YES completion:nil];
UIImage* temp = [info objectForKey:@"UIImagePickerControllerOriginalImage"];
UIImage *temp_image=[UIImageCVMatConverter scaleAndRotateImageBackCamera:temp];
source_image=[UIImageCVMatConverter cvMatFromUIImage:temp_image];
[self simpleRecognition:source_image];
imageView.image = temp;
//cv::Mat cvImage,cv2;
//UIImageToMat(temp, cvImage);
// if(!cvImage.empty()){
// cv::Mat gray;
// // 将图像转换为灰度显示
// cv::cvtColor(cvImage,gray,CV_RGB2GRAY);
// // 应用高斯滤波器去除小的边缘
// cv::GaussianBlur(gray, gray, cv::Size(5,5), 1.2,1.2);
// // 计算与画布边缘
// cv::Mat edges;
// cv::Canny(gray, edges, 0, 50);
// // 使用白色填充
// cvImage.setTo(cv::Scalar::all(225));
// // 修改边缘颜色
// cvImage.setTo(cv::Scalar(0,128,255,255),edges);
// // 将Mat转换为Xcode的UIImageView显示
// self.imageView.image = MatToUIImage(cvImage);
// }
}

-(void)imagePickerControllerDidCancel:(UIImagePickerController *)picker
{
[picker dismissViewControllerAnimated:YES completion:nil];
}


- (void)loadButtonPressed:(id)sender
{
UIImagePickerController* picker = [[UIImagePickerController alloc] init];
picker.delegate = self;
if (![UIImagePickerController isSourceTypeAvailable:
UIImagePickerControllerSourceTypePhotoLibrary])
return;
picker.sourceType = UIImagePickerControllerSourceTypePhotoLibrary;
[self presentViewController:picker animated:YES completion:nil];
}

- (void)viewDidLoad {
[super viewDidLoad];
// Do any additional setup after loading the view, typically from a nib.
CGRect bounds = [UIScreen mainScreen].bounds;
imageView = [[UIImageView alloc] init];
imageView.frame = CGRectMake(0, 160, bounds.size.width, bounds.size.height-210);
imageView.contentMode=UIViewContentModeScaleAspectFit;
imageView.backgroundColor = [UIColor clearColor];
[self.view addSubview:imageView];
/* Add the fps Label */
UILabel *fps = [[UILabel alloc] initWithFrame:CGRectMake(20, 120, 180, 20)];
fps.font=[UIFont fontWithName:@"华文细黑" size:14.0f];
fps.backgroundColor=[UIColor clearColor];
fps.textColor=[UIColor redColor];
fps.textAlignment=NSTextAlignmentLeft;
// fps.transform = CGAffineTransformMakeRotation(90);
fps.text=@"result";
self.textLabel = fps;
[self.view addSubview:self.textLabel];
[self.view bringSubviewToFront:self.textLabel];
toolbar=[[UIToolbar alloc] initWithFrame:CGRectMake(0, bounds.size.height- 44, bounds.size.width, 44)];
[toolbar setBackgroundColor:[UIColor clearColor]];
// toolbar.barStyle=UIBarStyleDefault;
toolbar.tintColor=[UIColor blackColor];
toolbar.translucent=YES;
// [toolbar setTranslucent:YES];
[self.toolbar setBackgroundImage:[UIImage new]
forToolbarPosition:UIBarPositionAny
barMetrics:UIBarMetricsDefault];
toolbar.delegate=self;
UIBarButtonItem*flexitem=[[UIBarButtonItem alloc] initWithBarButtonSystemItem:UIBarButtonSystemItemFlexibleSpace target:nil action:nil];
UIBarButtonItem*albumitem=[[UIBarButtonItem alloc]
initWithTitle:@"相册"
style:UIBarButtonItemStylePlain
target:self
action:@selector(loadButtonPressed:)];
[toolbar setItems:[NSArray arrayWithObjects:albumitem,flexitem,nil]];
[self.view addSubview:toolbar];
// Do any additional setup after loading the view, typically from a nib
toolbar.autoresizingMask = UIViewAutoresizingNone;
}


- (void)didReceiveMemoryWarning {
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}


@end

+ 16
- 0
Prj-IOS/lp/main.m View File

@@ -0,0 +1,16 @@
//
// main.m
// lp
//
// Created by xiaojun on 2017/12/2.
// Copyright © 2017年 xiaojun. All rights reserved.
//

#import <UIKit/UIKit.h>
#import "AppDelegate.h"

int main(int argc, char * argv[]) {
@autoreleasepool {
return UIApplicationMain(argc, argv, nil, NSStringFromClass([AppDelegate class]));
}
}

+ 12117
- 0
Prj-IOS/model.bundle/cascade.xml
File diff suppressed because it is too large
View File


+ 19
- 0
Prj-IOS/src/CNNRecognizer.cpp View File

@@ -0,0 +1,19 @@
//
// Created by 庾金科 on 21/10/2017.
//

#include "../include/CNNRecognizer.h"

namespace pr{
CNNRecognizer::CNNRecognizer(std::string prototxt,std::string caffemodel){
net = cv::dnn::readNetFromCaffe(prototxt, caffemodel);
}

label CNNRecognizer::recognizeCharacter(cv::Mat charImage){
if(charImage.channels()== 3)
cv::cvtColor(charImage,charImage,cv::COLOR_BGR2GRAY);
cv::Mat inputBlob = cv::dnn::blobFromImage(charImage, 1/255.0, cv::Size(CHAR_INPUT_W,CHAR_INPUT_H), cv::Scalar(0,0,0),false);
net.setInput(inputBlob,"data");
return net.forward();
}
}

+ 133
- 0
Prj-IOS/src/FastDeskew.cpp View File

@@ -0,0 +1,133 @@
//
// Created by 庾金科 on 02/10/2017.
//



#include <../include/FastDeskew.h>

namespace pr{


const int ANGLE_MIN = 30 ;
const int ANGLE_MAX = 150 ;
const int PLATE_H = 36;
const int PLATE_W = 136;

int angle(float x,float y)
{
return atan2(x,y)*180/3.1415;
}

std::vector<float> avgfilter(std::vector<float> angle_list,int windowsSize) {
std::vector<float> angle_list_filtered(angle_list.size() - windowsSize + 1);
for (int i = 0; i < angle_list.size() - windowsSize + 1; i++) {
float avg = 0.00f;
for (int j = 0; j < windowsSize; j++) {
avg += angle_list[i + j];
}
avg = avg / windowsSize;
angle_list_filtered[i] = avg;
}

return angle_list_filtered;
}


void drawHist(std::vector<float> seq){
cv::Mat image(300,seq.size(),CV_8U);
image.setTo(0);

for(int i = 0;i<seq.size();i++)
{
float l = *std::max_element(seq.begin(),seq.end());

int p = int(float(seq[i])/l*300);

cv::line(image,cv::Point(i,300),cv::Point(i,300-p),cv::Scalar(255,255,255));
}
//cv::imshow("vis",image);
}

cv::Mat correctPlateImage(cv::Mat skewPlate,float angle,float maxAngle)
{

cv::Mat dst;

cv::Size size_o(skewPlate.cols,skewPlate.rows);


int extend_padding = 0;
// if(angle<0)
extend_padding = static_cast<int>(skewPlate.rows*tan(cv::abs(angle)/180* 3.14) );
// else
// extend_padding = static_cast<int>(skewPlate.rows/tan(cv::abs(angle)/180* 3.14) );

// std::cout<<"extend:"<<extend_padding<<std::endl;

cv::Size size(skewPlate.cols + extend_padding ,skewPlate.rows);

float interval = abs(sin((angle /180) * 3.14)* skewPlate.rows);
// std::cout<<interval<<std::endl;

cv::Point2f pts1[4] = {cv::Point2f(0,0),cv::Point2f(0,size_o.height),cv::Point2f(size_o.width,0),cv::Point2f(size_o.width,size_o.height)};
if(angle>0) {
cv::Point2f pts2[4] = {cv::Point2f(interval, 0), cv::Point2f(0, size_o.height),
cv::Point2f(size_o.width, 0), cv::Point2f(size_o.width - interval, size_o.height)};
cv::Mat M = cv::getPerspectiveTransform(pts1,pts2);
cv::warpPerspective(skewPlate,dst,M,size);


}
else {
cv::Point2f pts2[4] = {cv::Point2f(0, 0), cv::Point2f(interval, size_o.height), cv::Point2f(size_o.width-interval, 0),
cv::Point2f(size_o.width, size_o.height)};
cv::Mat M = cv::getPerspectiveTransform(pts1,pts2);
cv::warpPerspective(skewPlate,dst,M,size,cv::INTER_CUBIC);

}
return dst;
}
cv::Mat fastdeskew(cv::Mat skewImage,int blockSize){


const int FILTER_WINDOWS_SIZE = 5;
std::vector<float> angle_list(180);
memset(angle_list.data(),0,angle_list.size()*sizeof(int));

cv::Mat bak;
skewImage.copyTo(bak);
if(skewImage.channels() == 3)
cv::cvtColor(skewImage,skewImage,cv::COLOR_RGB2GRAY);

if(skewImage.channels() == 1)
{
cv::Mat eigen;

cv::cornerEigenValsAndVecs(skewImage,eigen,blockSize,5);
for( int j = 0; j < skewImage.rows; j+=blockSize )
{ for( int i = 0; i < skewImage.cols; i+=blockSize )
{
float x2 = eigen.at<cv::Vec6f>(j, i)[4];
float y2 = eigen.at<cv::Vec6f>(j, i)[5];
int angle_cell = angle(x2,y2);
angle_list[(angle_cell + 180)%180]+=1.0;

}
}
}
std::vector<float> filtered = avgfilter(angle_list,5);

int maxPos = std::max_element(filtered.begin(),filtered.end()) - filtered.begin() + FILTER_WINDOWS_SIZE/2;
if(maxPos>ANGLE_MAX)
maxPos = (-maxPos+90+180)%180;
if(maxPos<ANGLE_MIN)
maxPos-=90;
maxPos=90-maxPos;
cv::Mat deskewed = correctPlateImage(bak, static_cast<float>(maxPos),60.0f);
return deskewed;
}



}//namespace pr

+ 205
- 0
Prj-IOS/src/FineMapping.cpp View File

@@ -0,0 +1,205 @@
//
// Created by 庾金科 on 22/09/2017.
//

#include "FineMapping.h"
namespace pr{

const int FINEMAPPING_H = 50;
const int FINEMAPPING_W = 120;
const int PADDING_UP_DOWN = 30;
void drawRect(cv::Mat image,cv::Rect rect)
{
cv::Point p1(rect.x,rect.y);
cv::Point p2(rect.x+rect.width,rect.y+rect.height);
cv::rectangle(image,p1,p2,cv::Scalar(0,255,0),1);
}


FineMapping::FineMapping(std::string prototxt,std::string caffemodel) {
net = cv::dnn::readNetFromCaffe(prototxt, caffemodel);

}

cv::Mat FineMapping::FineMappingHorizon(cv::Mat FinedVertical,int leftPadding,int rightPadding)
{

// if(FinedVertical.channels()==1)
// cv::cvtColor(FinedVertical,FinedVertical,cv::COLOR_GRAY2BGR);
cv::Mat inputBlob = cv::dnn::blobFromImage(FinedVertical, 1/255.0, cv::Size(66,16),
cv::Scalar(0,0,0),false);

net.setInput(inputBlob,"data");
cv::Mat prob = net.forward();
int front = static_cast<int>(prob.at<float>(0,0)*FinedVertical.cols);
int back = static_cast<int>(prob.at<float>(0,1)*FinedVertical.cols);
front -= leftPadding ;
if(front<0) front = 0;
back +=rightPadding;
if(back>FinedVertical.cols-1) back=FinedVertical.cols - 1;
cv::Mat cropped = FinedVertical.colRange(front,back).clone();
return cropped;


}
std::pair<int,int> FitLineRansac(std::vector<cv::Point> pts,int zeroadd = 0 )
{
std::pair<int,int> res;
if(pts.size()>2)
{
cv::Vec4f line;
cv::fitLine(pts,line,cv::DIST_HUBER,0,0.01,0.01);
float vx = line[0];
float vy = line[1];
float x = line[2];
float y = line[3];
int lefty = static_cast<int>((-x * vy / vx) + y);
int righty = static_cast<int>(((136- x) * vy / vx) + y);
res.first = lefty+PADDING_UP_DOWN+zeroadd;
res.second = righty+PADDING_UP_DOWN+zeroadd;
return res;
}
res.first = zeroadd;
res.second = zeroadd;
return res;
}

cv::Mat FineMapping::FineMappingVertical(cv::Mat InputProposal,int sliceNum,int upper,int lower,int windows_size){


cv::Mat PreInputProposal;
cv::Mat proposal;

cv::resize(InputProposal,PreInputProposal,cv::Size(FINEMAPPING_W,FINEMAPPING_H));
if(InputProposal.channels() == 3)
cv::cvtColor(PreInputProposal,proposal,cv::COLOR_BGR2GRAY);
else
PreInputProposal.copyTo(proposal);

// proposal = PreInputProposal;

// this will improve some sen
cv::Mat kernal = cv::getStructuringElement(cv::MORPH_ELLIPSE,cv::Size(1,3));
// cv::erode(proposal,proposal,kernal);


float diff = static_cast<float>(upper-lower);
diff/=static_cast<float>(sliceNum-1);
cv::Mat binary_adaptive;
std::vector<cv::Point> line_upper;
std::vector<cv::Point> line_lower;
int contours_nums=0;

for(int i = 0 ; i < sliceNum ; i++)
{
std::vector<std::vector<cv::Point> > contours;
float k =lower + i*diff;
cv::adaptiveThreshold(proposal,binary_adaptive,255,cv::ADAPTIVE_THRESH_MEAN_C,cv::THRESH_BINARY,windows_size,k);
cv::Mat draw;
binary_adaptive.copyTo(draw);
cv::findContours(binary_adaptive,contours,cv::RETR_EXTERNAL,cv::CHAIN_APPROX_SIMPLE);
for(auto contour: contours)
{
cv::Rect bdbox =cv::boundingRect(contour);
float lwRatio = bdbox.height/static_cast<float>(bdbox.width);
int bdboxAera = bdbox.width*bdbox.height;
if (( lwRatio>0.7&&bdbox.width*bdbox.height>100 && bdboxAera<300)
|| (lwRatio>3.0 && bdboxAera<100 && bdboxAera>10))
{

cv::Point p1(bdbox.x, bdbox.y);
cv::Point p2(bdbox.x + bdbox.width, bdbox.y + bdbox.height);
line_upper.push_back(p1);
line_lower.push_back(p2);
contours_nums+=1;
}
}
}

std:: cout<<"contours_nums "<<contours_nums<<std::endl;

if(contours_nums<41)
{
cv::bitwise_not(InputProposal,InputProposal);
cv::Mat kernal = cv::getStructuringElement(cv::MORPH_ELLIPSE,cv::Size(1,5));
cv::Mat bak;
cv::resize(InputProposal,bak,cv::Size(FINEMAPPING_W,FINEMAPPING_H));
cv::erode(bak,bak,kernal);
if(InputProposal.channels() == 3)
cv::cvtColor(bak,proposal,cv::COLOR_BGR2GRAY);
else
proposal = bak;
int contours_nums=0;

for(int i = 0 ; i < sliceNum ; i++)
{
std::vector<std::vector<cv::Point> > contours;
float k =lower + i*diff;
cv::adaptiveThreshold(proposal,binary_adaptive,255,cv::ADAPTIVE_THRESH_MEAN_C,cv::THRESH_BINARY,windows_size,k);
// cv::imshow("image",binary_adaptive);
// cv::waitKey(0);
cv::Mat draw;
binary_adaptive.copyTo(draw);
cv::findContours(binary_adaptive,contours,cv::RETR_EXTERNAL,cv::CHAIN_APPROX_SIMPLE);
for(auto contour: contours)
{
cv::Rect bdbox =cv::boundingRect(contour);
float lwRatio = bdbox.height/static_cast<float>(bdbox.width);
int bdboxAera = bdbox.width*bdbox.height;
if (( lwRatio>0.7&&bdbox.width*bdbox.height>120 && bdboxAera<300)
|| (lwRatio>3.0 && bdboxAera<100 && bdboxAera>10))
{

cv::Point p1(bdbox.x, bdbox.y);
cv::Point p2(bdbox.x + bdbox.width, bdbox.y + bdbox.height);
line_upper.push_back(p1);
line_lower.push_back(p2);
contours_nums+=1;
}
}
}
// std:: cout<<"contours_nums "<<contours_nums<<std::endl;
}

cv::Mat rgb;
cv::copyMakeBorder(PreInputProposal, rgb, 30, 30, 0, 0, cv::BORDER_REPLICATE);
// cv::imshow("rgb",rgb);
// cv::waitKey(0);
//


std::pair<int, int> A;
std::pair<int, int> B;
A = FitLineRansac(line_upper, -2);
B = FitLineRansac(line_lower, 2);
int leftyB = A.first;
int rightyB = A.second;
int leftyA = B.first;
int rightyA = B.second;
int cols = rgb.cols;
int rows = rgb.rows;
// pts_map1 = np.float32([[cols - 1, rightyA], [0, leftyA],[cols - 1, rightyB], [0, leftyB]])
// pts_map2 = np.float32([[136,36],[0,36],[136,0],[0,0]])
// mat = cv2.getPerspectiveTransform(pts_map1,pts_map2)
// image = cv2.warpPerspective(rgb,mat,(136,36),flags=cv2.INTER_CUBIC)
std::vector<cv::Point2f> corners(4);
corners[0] = cv::Point2f(cols - 1, rightyA);
corners[1] = cv::Point2f(0, leftyA);
corners[2] = cv::Point2f(cols - 1, rightyB);
corners[3] = cv::Point2f(0, leftyB);
std::vector<cv::Point2f> corners_trans(4);
corners_trans[0] = cv::Point2f(136, 36);
corners_trans[1] = cv::Point2f(0, 36);
corners_trans[2] = cv::Point2f(136, 0);
corners_trans[3] = cv::Point2f(0, 0);
cv::Mat transform = cv::getPerspectiveTransform(corners, corners_trans);
cv::Mat quad = cv::Mat::zeros(36, 136, CV_8UC3);
cv::warpPerspective(rgb, quad, transform, quad.size());
return quad;

}


}



+ 69
- 0
Prj-IOS/src/Pipeline.cpp View File

@@ -0,0 +1,69 @@
//
// Created by 庾金科 on 23/10/2017.
//

#include "../include/Pipeline.h"


namespace pr {

std::vector<std::string> chars_code{"京","沪","津","渝","冀","晋","蒙","辽","吉","黑","苏","浙","皖","闽","赣","鲁","豫","鄂","湘","粤","桂","琼","川","贵","云","藏","陕","甘","青","宁","新","0","1","2","3","4","5","6","7","8","9","A","B","C","D","E","F","G","H","J","K","L","M","N","P","Q","R","S","T","U","V","W","X","Y","Z"};



PipelinePR::PipelinePR(std::string detector_filename,
std::string finemapping_prototxt, std::string finemapping_caffemodel,
std::string segmentation_prototxt, std::string segmentation_caffemodel,
std::string charRecognization_proto, std::string charRecognization_caffemodel) {
plateDetection = new PlateDetection(detector_filename);
fineMapping = new FineMapping(finemapping_prototxt, finemapping_caffemodel);
plateSegmentation = new PlateSegmentation(segmentation_prototxt, segmentation_caffemodel);
generalRecognizer = new CNNRecognizer(charRecognization_proto, charRecognization_caffemodel);
}

PipelinePR::~PipelinePR() {

delete plateDetection;
delete fineMapping;
delete plateSegmentation;
delete generalRecognizer;

}

std::vector<PlateInfo> PipelinePR:: RunPiplineAsImage(cv::Mat plateImage) {
std::vector<PlateInfo> results;
std::vector<pr::PlateInfo> plates;
plateDetection->plateDetectionRough(plateImage,plates);

for (pr::PlateInfo plateinfo:plates) {

cv::Mat image_finemapping = plateinfo.getPlateImage();
image_finemapping = fineMapping->FineMappingVertical(image_finemapping);
image_finemapping = pr::fastdeskew(image_finemapping, 5);
image_finemapping = fineMapping->FineMappingHorizon(image_finemapping, 2, 5);
cv::resize(image_finemapping, image_finemapping, cv::Size(136, 36));
plateinfo.setPlateImage(image_finemapping);
std::vector<cv::Rect> rects;
plateSegmentation->segmentPlatePipline(plateinfo, 1, rects);
plateSegmentation->ExtractRegions(plateinfo, rects);
cv::copyMakeBorder(image_finemapping, image_finemapping, 0, 0, 0, 20, cv::BORDER_REPLICATE);

plateinfo.setPlateImage(image_finemapping);
generalRecognizer->SegmentBasedSequenceRecognition(plateinfo);
plateinfo.decodePlateNormal(chars_code);
results.push_back(plateinfo);
std::cout << plateinfo.getPlateName() << std::endl;


}

// for (auto str:results) {
// std::cout << str << std::endl;
// }
return results;

}//namespace pr



}

+ 61
- 0
Prj-IOS/src/PlateDetection.cpp View File

@@ -0,0 +1,61 @@
//
// Created by 庾金科 on 20/09/2017.
//
#include "../include/PlateDetection.h"

#include "util.h"

namespace pr{


PlateDetection::PlateDetection(std::string filename_cascade){
cascade.load(filename_cascade);

};


void PlateDetection::plateDetectionRough(cv::Mat InputImage,std::vector<pr::PlateInfo> &plateInfos,int min_w,int max_w){

cv::Mat processImage;

cv::cvtColor(InputImage,processImage,cv::COLOR_BGR2GRAY);


std::vector<cv::Rect> platesRegions;
// std::vector<PlateInfo> plates;
cv::Size minSize(min_w,min_w/4);
cv::Size maxSize(max_w,max_w/4);
// cv::imshow("input",InputImage);
// cv::waitKey(0);
cascade.detectMultiScale( processImage, platesRegions,
1.1, 3, cv::CASCADE_SCALE_IMAGE,minSize,maxSize);
for(auto plate:platesRegions)
{
// extend rects
// x -= w * 0.14
// w += w * 0.28
// y -= h * 0.6
// h += h * 1.1;
int zeroadd_w = static_cast<int>(plate.width*0.28);
int zeroadd_h = static_cast<int>(plate.height*1.2);
int zeroadd_x = static_cast<int>(plate.width*0.14);
int zeroadd_y = static_cast<int>(plate.height*0.6);
plate.x-=zeroadd_x;
plate.y-=zeroadd_y;
plate.height += zeroadd_h;
plate.width += zeroadd_w;
cv::Mat plateImage = util::cropFromImage(InputImage,plate);
PlateInfo plateInfo(plateImage,plate);
plateInfos.push_back(plateInfo);

}
}
// std::vector<pr::PlateInfo> PlateDetection::plateDetectionRough(cv::Mat InputImage,cv::Rect roi,int min_w,int max_w){
// cv::Mat roi_region = util::cropFromImage(InputImage,roi);
// return plateDetectionRough(roi_region,min_w,max_w);
// }




}//namespace pr

+ 402
- 0
Prj-IOS/src/PlateSegmentation.cpp View File

@@ -0,0 +1,402 @@
//
// Created by 庾金科 on 16/10/2017.
//

#include "../include/PlateSegmentation.h"
#include "../include/niBlackThreshold.h"


//#define DEBUG
namespace pr{

PlateSegmentation::PlateSegmentation(std::string prototxt,std::string caffemodel) {
net = cv::dnn::readNetFromCaffe(prototxt, caffemodel);
}
cv::Mat PlateSegmentation::classifyResponse(const cv::Mat &cropped){
cv::Mat inputBlob = cv::dnn::blobFromImage(cropped, 1/255.0, cv::Size(22,22), cv::Scalar(0,0,0),false);
net.setInput(inputBlob,"data");
return net.forward();
}

void drawHist(float* seq,int size,const char* name){
cv::Mat image(300,size,CV_8U);
image.setTo(0);
float* start =seq;
float* end = seq+size;
float l = *std::max_element(start,end);
for(int i = 0;i<size;i++)
{
int p = int(float(seq[i])/l*300);
cv::line(image,cv::Point(i,300),cv::Point(i,300-p),cv::Scalar(255,255,255));
}
cv::resize(image,image,cv::Size(600,100));
//cv::imshow(name,image);
}

inline void computeSafeMargin(int &val,const int &rows){
val = std::min(val,rows);
val = std::max(val,0);
}

cv::Rect boxFromCenter(const cv::Point center,int left,int right,int top,int bottom,cv::Size bdSize)
{
cv::Point p1(center.x - left ,center.y - top);
cv::Point p2( center.x + right, center.y + bottom);
p1.x = std::max(0,p1.x);
p1.y = std::max(0,p1.y);
p2.x = std::min(p2.x,bdSize.width-1);
p2.y = std::min(p2.y,bdSize.height-1);
cv::Rect rect(p1,p2);
return rect;
}

cv::Rect boxPadding(cv::Rect rect,int left,int right,int top,int bottom,cv::Size bdSize)
{

cv::Point center(rect.x+(rect.width>>1),rect.y + (rect.height>>1));
int rebuildLeft = (rect.width>>1 )+ left;
int rebuildRight = (rect.width>>1 )+ right;
int rebuildTop = (rect.height>>1 )+ top;
int rebuildBottom = (rect.height>>1 )+ bottom;
return boxFromCenter(center,rebuildLeft,rebuildRight,rebuildTop,rebuildBottom,bdSize);

}



void PlateSegmentation:: refineRegion(cv::Mat &plateImage,const std::vector<int> &candidatePts,const int padding,std::vector<cv::Rect> &rects){
int w = candidatePts[5] - candidatePts[4];
int cols = plateImage.cols;
int rows = plateImage.rows;
for(int i = 0 ; i < candidatePts.size() ; i++)
{
int left = 0;
int right = 0 ;

if(i == 0 ){
left= candidatePts[i];
right = left+w+padding;
}
else {
left = candidatePts[i] - padding;
right = left + w + padding * 2;
}

computeSafeMargin(right,cols);
computeSafeMargin(left,cols);
cv::Rect roi(left,0,right - left,rows-1);
cv::Mat roiImage;
plateImage(roi).copyTo(roiImage);

if (i>=1)
{

cv::Mat roi_thres;
// cv::threshold(roiImage,roi_thres,0,255,cv::THRESH_OTSU|cv::THRESH_BINARY);

niBlackThreshold(roiImage,roi_thres,255,cv::THRESH_BINARY,15,0.3,BINARIZATION_NIBLACK);

std::vector<std::vector<cv::Point>> contours;
cv::findContours(roi_thres,contours,cv::RETR_LIST,cv::CHAIN_APPROX_SIMPLE);
cv::Point boxCenter(roiImage.cols>>1,roiImage.rows>>1);

cv::Rect final_bdbox;
cv::Point final_center;
int final_dist = INT_MAX;


for(auto contour:contours)
{
cv::Rect bdbox = cv::boundingRect(contour);
cv::Point center(bdbox.x+(bdbox.width>>1),bdbox.y + (bdbox.height>>1));
int dist = (center.x - boxCenter.x)*(center.x - boxCenter.x);
if(dist<final_dist and bdbox.height > rows>>1)
{ final_dist =dist;
final_center = center;
final_bdbox = bdbox;
}
}

//rebuild box
if(final_bdbox.height/ static_cast<float>(final_bdbox.width) > 3.5 && final_bdbox.width*final_bdbox.height<10)
final_bdbox = boxFromCenter(final_center,8,8,(rows>>1)-3 , (rows>>1) - 2,roiImage.size());
else {
if(i == candidatePts.size()-1)
final_bdbox = boxPadding(final_bdbox, padding/2, padding, padding/2, padding/2, roiImage.size());
else
final_bdbox = boxPadding(final_bdbox, padding, padding, padding, padding, roiImage.size());


// std::cout<<final_bdbox<<std::endl;
// std::cout<<roiImage.size()<<std::endl;
#ifdef DEBUG
//cv::imshow("char_thres",roi_thres);

//cv::imshow("char",roiImage(final_bdbox));
//cv::waitKey(0);
#endif


}


final_bdbox.x += left;

rects.push_back(final_bdbox);
//

}
else
{
rects.push_back(roi);
}

// else
// {
//
// }

// cv::GaussianBlur(roiImage,roiImage,cv::Size(7,7),3);
//
// cv::imshow("image",roiImage);
// cv::waitKey(0);


}



}
void avgfilter(float *angle_list,int size,int windowsSize) {
float *filterd = new float[size];
for(int i = 0 ; i < size ; i++) filterd [i] = angle_list[i];
// memcpy(filterd,angle_list,size);

cv::Mat kernal_gaussian = cv::getGaussianKernel(windowsSize,3,CV_32F);
float *kernal = (float*)kernal_gaussian.data;
// kernal+=windowsSize;
int r = windowsSize/2;




for (int i = 0; i < size; i++) {
float avg = 0.00f;
for (int j = 0; j < windowsSize; j++) {
if(i+j-r>0&&i+j+r<size-1)
avg += filterd[i + j-r]*kernal[j];
}
// avg = avg / windowsSize;
angle_list[i] = avg;

}

delete filterd;
}

void PlateSegmentation::templateMatchFinding(const cv::Mat &respones,int windowsWidth,std::pair<float,std::vector<int>> &candidatePts){
int rows = respones.rows;
int cols = respones.cols;



float *data = (float*)respones.data;
float *engNum_prob = data;
float *false_prob = data+cols;
float *ch_prob = data+cols*2;

avgfilter(engNum_prob,cols,5);
avgfilter(false_prob,cols,5);
// avgfilter(ch_prob,cols,5);
std::vector<int> candidate_pts(7);
#ifdef DEBUG
drawHist(engNum_prob,cols,"engNum_prob");
drawHist(false_prob,cols,"false_prob");
drawHist(ch_prob,cols,"ch_prob");
//cv::waitKey(0);
#endif




int cp_list[7];
float loss_selected = -1;

for(int start = 0 ; start < 20 ; start+=2)
for(int width = windowsWidth-5; width < windowsWidth+5 ; width++ ){
for(int interval = windowsWidth/2; interval < windowsWidth; interval++)
{
int cp1_ch = start;
int cp2_p0 = cp1_ch+ width;
int cp3_p1 = cp2_p0+ width + interval;
int cp4_p2 = cp3_p1 + width;
int cp5_p3 = cp4_p2 + width+1;
int cp6_p4 = cp5_p3 + width+2;
int cp7_p5= cp6_p4+ width+2;

int md1 = (cp1_ch+cp2_p0)>>1;
int md2 = (cp2_p0+cp3_p1)>>1;
int md3 = (cp3_p1+cp4_p2)>>1;
int md4 = (cp4_p2+cp5_p3)>>1;
int md5 = (cp5_p3+cp6_p4)>>1;
int md6 = (cp6_p4+cp7_p5)>>1;




if(cp7_p5>=cols)
continue;
float loss = ch_prob[cp1_ch]+
engNum_prob[cp2_p0] +engNum_prob[cp3_p1]+engNum_prob[cp4_p2]+engNum_prob[cp5_p3]+engNum_prob[cp6_p4] +engNum_prob[cp7_p5]
+ (false_prob[md2]+false_prob[md3]+false_prob[md4]+false_prob[md5]+false_prob[md5] + false_prob[md6]);
// float loss = ch_prob[cp1_ch]*3 -(false_prob[cp3_p1]+false_prob[cp4_p2]+false_prob[cp5_p3]+false_prob[cp6_p4]+false_prob[cp7_p5]);




if(loss>loss_selected)
{
loss_selected = loss;
cp_list[0]= cp1_ch;
cp_list[1]= cp2_p0;
cp_list[2]= cp3_p1;
cp_list[3]= cp4_p2;
cp_list[4]= cp5_p3;
cp_list[5]= cp6_p4;
cp_list[6]= cp7_p5;
}
}
}
candidate_pts[0] = cp_list[0];
candidate_pts[1] = cp_list[1];
candidate_pts[2] = cp_list[2];
candidate_pts[3] = cp_list[3];
candidate_pts[4] = cp_list[4];
candidate_pts[5] = cp_list[5];
candidate_pts[6] = cp_list[6];

candidatePts.first = loss_selected;
candidatePts.second = candidate_pts;

};


void PlateSegmentation::segmentPlateBySlidingWindows(cv::Mat &plateImage,int windowsWidth,int stride,cv::Mat &respones){


cv::resize(plateImage,plateImage,cv::Size(136,36));

cv::Mat plateImageGray;
cv::cvtColor(plateImage,plateImageGray,cv::COLOR_BGR2GRAY);

int height = plateImage.rows - 1;
int width = plateImage.cols - 1;

for(int i = 0 ; i < plateImage.cols - windowsWidth +1 ; i +=stride)
{
cv::Rect roi(i,0,windowsWidth,height);
cv::Mat roiImage = plateImageGray(roi);
cv::Mat response = classifyResponse(roiImage);
respones.push_back(response);
}




respones = respones.t();
// std::pair<float,std::vector<int>> images ;
//
//
// std::cout<<images.first<<" ";
// for(int i = 0 ; i < images.second.size() ; i++)
// {
// std::cout<<images.second[i]<<" ";
//// cv::line(plateImageGray,cv::Point(images.second[i],0),cv::Point(images.second[i],36),cv::Scalar(255,255,255),1); //DEBUG
// }

// int w = images.second[5] - images.second[4];

// cv::line(plateImageGray,cv::Point(images.second[5]+w,0),cv::Point(images.second[5]+w,36),cv::Scalar(255,255,255),1); //DEBUG
// cv::line(plateImageGray,cv::Point(images.second[5]+2*w,0),cv::Point(images.second[5]+2*w,36),cv::Scalar(255,255,255),1); //DEBUG


// RefineRegion(plateImageGray,images.second,5);

// std::cout<<w<<std::endl;

// std::cout<<<<std::endl;

// cv::resize(plateImageGray,plateImageGray,cv::Size(600,100));



}

// void filterGaussian(cv::Mat &respones,float sigma){
//
// }


void PlateSegmentation::segmentPlatePipline(PlateInfo &plateInfo,int stride,std::vector<cv::Rect> &Char_rects){
cv::Mat plateImage = plateInfo.getPlateImage(); // get src image .
cv::Mat plateImageGray;
cv::cvtColor(plateImage,plateImageGray,cv::COLOR_BGR2GRAY);
//do binarzation
//
std::pair<float,std::vector<int>> sections ; // segment points variables .

cv::Mat respones; //three response of every sub region from origin image .
segmentPlateBySlidingWindows(plateImage,DEFAULT_WIDTH,1,respones);
templateMatchFinding(respones,DEFAULT_WIDTH/stride,sections);

// std::cout<<sections<<std::endl;

refineRegion(plateImageGray,sections.second,5,Char_rects);
#ifdef DEBUG
for(int i = 0 ; i < sections.second.size() ; i++)
{
std::cout<<sections.second[i]<<" ";
cv::line(plateImageGray,cv::Point(sections.second[i],0),cv::Point(sections.second[i],36),cv::Scalar(255,255,255),1); //DEBUG
}
//cv::imshow("plate",plateImageGray);
//cv::waitKey(0);
#endif
// cv::waitKey(0);

}

void PlateSegmentation::ExtractRegions(PlateInfo &plateInfo,std::vector<cv::Rect> &rects){
cv::Mat plateImage = plateInfo.getPlateImage();
for(int i = 0 ; i < rects.size() ; i++){
cv::Mat charImage;
plateImage(rects[i]).copyTo(charImage);
if(charImage.channels())
cv::cvtColor(charImage,charImage,cv::COLOR_BGR2GRAY);
// cv::imshow("image",charImage);
// cv::waitKey(0);
cv::equalizeHist(charImage,charImage);
//

//


std::pair<CharType,cv::Mat> char_instance;
if(i == 0 ){

char_instance.first = CHINESE;


} else if(i == 1){
char_instance.first = LETTER;
}
else{
char_instance.first = LETTER_NUMS;
}
char_instance.second = charImage;
plateInfo.appendPlateChar(char_instance);

}

}

}//namespace pr

+ 26
- 0
Prj-IOS/src/Recognizer.cpp View File

@@ -0,0 +1,26 @@
//
// Created by 庾金科 on 22/10/2017.
//

#include "../include/Recognizer.h"

namespace pr{
void GeneralRecognizer::SegmentBasedSequenceRecognition(PlateInfo &plateinfo){


for(auto char_instance:plateinfo.plateChars)
{


std::pair<CharType,cv::Mat> res;
cv::Mat code_table= recognizeCharacter(char_instance.second);
res.first = char_instance.first;
code_table.copyTo(res.second);
plateinfo.appendPlateCoding(res);

}



}
}

+ 79
- 0
Prj-IOS/src/util.h View File

@@ -0,0 +1,79 @@
//
// Created by 庾金科 on 04/04/2017.
//

#include <opencv2/opencv.hpp>

namespace util{

template <class T> void swap ( T& a, T& b )
{
T c(a); a=b; b=c;
}

template <class T> T min(T& a,T& b )
{
return a>b?b:a;

}

cv::Mat cropFromImage(const cv::Mat &image,cv::Rect rect){
int w = image.cols-1;
int h = image.rows-1;
rect.x = std::max(rect.x,0);
rect.y = std::max(rect.y,0);
rect.height = std::min(rect.height,h-rect.y);
rect.width = std::min(rect.width,w-rect.x);
cv::Mat temp(rect.size(), image.type());
cv::Mat cropped;
temp = image(rect);
temp.copyTo(cropped);
return cropped;

}

cv::Mat cropBox2dFromImage(const cv::Mat &image,cv::RotatedRect rect)
{
cv::Mat M, rotated, cropped;
float angle = rect.angle;
cv::Size rect_size(rect.size.width,rect.size.height);
if (rect.angle < -45.) {
angle += 90.0;
swap(rect_size.width, rect_size.height);
}
M = cv::getRotationMatrix2D(rect.center, angle, 1.0);
cv::warpAffine(image, rotated, M, image.size(), cv::INTER_CUBIC);
cv::getRectSubPix(rotated, rect_size, rect.center, cropped);
return cropped;
}

cv::Mat calcHist(const cv::Mat &image)
{
cv::Mat hsv;
std::vector<cv::Mat> hsv_planes;
cv::cvtColor(image,hsv,cv::COLOR_BGR2HSV);
cv::split(hsv,hsv_planes);
cv::Mat hist;
int histSize = 256;
float range[] = {0,255};
const float* histRange = {range};

cv::calcHist( &hsv_planes[0], 1, 0, cv::Mat(), hist, 1, &histSize, &histRange,true, true);
return hist;

}
float computeSimilir(const cv::Mat &A,const cv::Mat &B)
{

cv::Mat histA,histB;
histA = calcHist(A);
histB = calcHist(B);
return cv::compareHist(histA,histB,CV_COMP_CORREL);

}





}//namespace util

+ 17
- 18
README.md View File

@@ -8,15 +8,21 @@ HyperLPR是一个使用深度学习针对对中文车牌识别的实现,与较

[相关技术博客](http://blog.csdn.net/relocy/article/details/78705662)(技术文章会在接下来的几个月的时间内连续更新)。


### 更新

+ 添加了IOS版本的实现(感谢[xiaojun123456](https://github.com/xiaojun123456)的工作)
+ 添加端到端的序列识别模型识别率大幅度提升,使得无需分割字符即可识别,识别速度提高20% (2017.11.17)
+ 新增的端到端模型可以识别新能源车牌、教练车牌、白色警用车牌、武警车牌 (2017.11.17)
+ 更新Windows版本的Visual Studio 2015 工程(2017.11.15)
+ 增加cpp版本,目前仅支持标准蓝牌(需要依赖OpenCV 3.3) (2017.10.28)
+ 添加了简单的Android实现 (骁龙835 (*720*x*1280*) 200ms )(2017.10.28)

### TODO

+ 提供字符分割、字符识别的训练代码
+ 改进精定位方法
+ C++版的端到端识别模型

### 特性

+ 速度快 720p ,单核 Intel 2.2G CPU (macbook Pro 2015)平均识别时间低于100ms
@@ -44,22 +50,6 @@ HyperLPR是一个使用深度学习针对对中文车牌识别的实现,与较

+ Opencv 3.3

### 设计流程

> step1. 使用opencv 的 HAAR Cascade 检测车牌大致位置
>
> step2. Extend 检测到的大致位置的矩形区域
>
> step3. 使用类似于MSER的方式的 多级二值化 + RANSAC 拟合车牌的上下边界
>
> step4. 使用CNN Regression回归车牌左右边界
>
> step5. 使用基于纹理场的算法进行车牌校正倾斜
>
> step6. 使用CNN滑动窗切割字符
>
> step7. 使用CNN识别字符

### 简单使用方式

```python
@@ -98,6 +88,8 @@ sudo make -j
- [ ] 双层个性化车牌


###### Note:由于训练的时候样本存在一些不均衡的问题,一些特殊车牌存在一定识别率低下的问题,如(使馆/港澳车牌),会在后续的版本进行改进。


### 测试样例

@@ -118,6 +110,13 @@ sudo make -j

车牌识别框架开发时使用的数据并不是很多,有意着可以为我们提供相关车牌数据。联系邮箱 455501914@qq.com。

### 获取帮助
### 捐赠我们

***如果您愿意支持我们持续对这个框架的开发,可以通过下面的链接来对我们捐赠。***

**[支付宝/微信](http://chuantu.biz/t6/178/1513525003x-1404758184.png)**

#### 获取帮助

+ HyperLPR讨论QQ群:673071218, 加前请备注HyperLPR交流。


Loading…
Cancel
Save