You cannot select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

500 lines
19 KiB
Plaintext

7 years ago
//
// CameraViewController.m
// lpr
//
// Created by baotim on 2018/10/26.
// Copyright © 2018 lprSample. All rights reserved.
//
#import "CameraViewController.h"
#import "Utility.h"
#import "Pipeline.h"
//屏幕的宽、高
#define kScreenWidth [UIScreen mainScreen].bounds.size.width
#define kScreenHeight [UIScreen mainScreen].bounds.size.height
@interface CameraViewController () <UIAlertViewDelegate, AVCaptureVideoDataOutputSampleBufferDelegate>
{
AVCaptureSession *_session;
AVCaptureDeviceInput *_captureInput;
AVCaptureStillImageOutput *_captureOutput;
AVCaptureVideoPreviewLayer *_preview;
AVCaptureDevice *_device;
NSTimer *_timer; //定时器
BOOL _on; //闪光灯状态
BOOL _capture;//导航栏动画是否完成
BOOL _isFoucePixel;//是否相位对焦
CGRect _imgRect;//拍照裁剪
int _count;//每几帧识别
CGFloat _isLensChanged;//镜头位置
/*相位聚焦下镜头位置 镜头晃动 值不停的改变 */
CGFloat _isIOS8AndFoucePixelLensPosition;
/*
控制识别速度最小值为1数值越大识别越慢。
相机初始化时设置默认值为1不要改动判断设备若为相位对焦时设置此值为2可以修改最小为1越大越慢
此值的功能是为了减小相位对焦下,因识别速度过快
此值在相机初始化中设置,在相机代理中使用,用户若无特殊需求不用修改。
*/
int _MaxFR;
cv::Mat source_image;
}
@property (assign, nonatomic) BOOL adjustingFocus;
@property (nonatomic, retain) CALayer *customLayer;
@property (nonatomic,assign) BOOL isProcessingImage;
@property (nonatomic, strong) UIImage* image;
@end
@implementation CameraViewController
- (void)viewDidLoad {
[super viewDidLoad];
// Do any additional setup after loading the view.
self.view.backgroundColor = [UIColor clearColor];
//初始化相机
[self initialize];
//创建相机界面控件
[self createCameraView];
}
- (void)viewWillAppear:(BOOL)animated {
[super viewWillAppear:animated];
_capture = NO;
[self performSelector:@selector(changeCapture) withObject:nil afterDelay:0.4];
//不支持相位对焦情况下(iPhone6以后的手机支持相位对焦) 设置定时器 开启连续对焦
if (!_isFoucePixel) {
_timer = [NSTimer scheduledTimerWithTimeInterval:1.3 target:self selector:@selector(fouceMode) userInfo:nil repeats:YES];
}
AVCaptureDevice*camDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
int flags = NSKeyValueObservingOptionNew;
//注册通知
[camDevice addObserver:self forKeyPath:@"adjustingFocus" options:flags context:nil];
if (_isFoucePixel) {
[camDevice addObserver:self forKeyPath:@"lensPosition" options:flags context:nil];
}
[_session startRunning];
}
- (void)viewWillDisappear:(BOOL)animated {
[super viewWillDisappear:animated];
self.navigationController.navigationBarHidden = NO;
}
- (void)viewDidDisappear:(BOOL)animated {
[super viewDidDisappear:animated];
if (!_isFoucePixel) {
[_timer invalidate];
_timer = nil;
}
AVCaptureDevice*camDevice =[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
[camDevice removeObserver:self forKeyPath:@"adjustingFocus"];
if (_isFoucePixel) {
[camDevice removeObserver:self forKeyPath:@"lensPosition"];
}
[_session stopRunning];
_capture = NO;
}
- (void)changeCapture {
_capture = YES;
}
#pragma mark - Private Methods
//初始化相机
- (void)initialize {
//判断摄像头授权
AVAuthorizationStatus authStatus = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo];
if(authStatus == AVAuthorizationStatusRestricted || authStatus == AVAuthorizationStatusDenied){
self.view.backgroundColor = [UIColor blackColor];
UIAlertView * alt = [[UIAlertView alloc] initWithTitle:@"未获得授权使用摄像头" message:@"请在'设置-隐私-相机'打开" delegate:self cancelButtonTitle:nil otherButtonTitles:@"OK", nil];
[alt show];
return;
}
_MaxFR = 1;
//1.创建会话层
_session = [[AVCaptureSession alloc] init];
[_session setSessionPreset:AVCaptureSessionPreset1920x1080];
//2.创建、配置输入设备
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for (AVCaptureDevice *device in devices){
if (device.position == AVCaptureDevicePositionBack){
_device = device;
_captureInput = [AVCaptureDeviceInput deviceInputWithDevice:device error:nil];
}
}
[_session addInput:_captureInput];
//2.创建视频流输出
AVCaptureVideoDataOutput *captureOutput = [[AVCaptureVideoDataOutput alloc] init];
captureOutput.alwaysDiscardsLateVideoFrames = YES;
dispatch_queue_t queue;
queue = dispatch_queue_create("cameraQueue", NULL);
[captureOutput setSampleBufferDelegate:self queue:queue];
NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey;
NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA];
NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:value forKey:key];
[captureOutput setVideoSettings:videoSettings];
[_session addOutput:captureOutput];
//3.创建、配置静态拍照输出
_captureOutput = [[AVCaptureStillImageOutput alloc] init];
NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys:AVVideoCodecJPEG,AVVideoCodecKey,nil];
[_captureOutput setOutputSettings:outputSettings];
[_session addOutput:_captureOutput];
//4.预览图层
_preview = [AVCaptureVideoPreviewLayer layerWithSession: _session];
_preview.frame = CGRectMake(0, 0, self.view.frame.size.width, self.view.frame.size.height);
_preview.videoGravity = AVLayerVideoGravityResizeAspectFill;
[self.view.layer addSublayer:_preview];
//判断是否相位对焦
if ([[[UIDevice currentDevice] systemVersion] floatValue] >= 8.0) {
AVCaptureDeviceFormat *deviceFormat = _device.activeFormat;
if (deviceFormat.autoFocusSystem == AVCaptureAutoFocusSystemPhaseDetection){
_isFoucePixel = YES;
_MaxFR = 2;
}
}
}
- (void)createCameraView
{
//设置覆盖层
CAShapeLayer *maskWithHole = [CAShapeLayer layer];
// Both frames are defined in the same coordinate system
CGRect biggerRect = self.view.bounds;
CGFloat offset = 1.0f;
if ([[UIScreen mainScreen] scale] >= 2) {
offset = 0.5;
}
CGRect smallFrame = CGRectMake(45, 100, 300, 500);
CGRect smallerRect = CGRectInset(smallFrame, -offset, -offset) ;
UIBezierPath *maskPath = [UIBezierPath bezierPath];
[maskPath moveToPoint:CGPointMake(CGRectGetMinX(biggerRect), CGRectGetMinY(biggerRect))];
[maskPath addLineToPoint:CGPointMake(CGRectGetMinX(biggerRect), CGRectGetMaxY(biggerRect))];
[maskPath addLineToPoint:CGPointMake(CGRectGetMaxX(biggerRect), CGRectGetMaxY(biggerRect))];
[maskPath addLineToPoint:CGPointMake(CGRectGetMaxX(biggerRect), CGRectGetMinY(biggerRect))];
[maskPath addLineToPoint:CGPointMake(CGRectGetMinX(biggerRect), CGRectGetMinY(biggerRect))];
[maskPath moveToPoint:CGPointMake(CGRectGetMinX(smallerRect), CGRectGetMinY(smallerRect))];
[maskPath addLineToPoint:CGPointMake(CGRectGetMinX(smallerRect), CGRectGetMaxY(smallerRect))];
[maskPath addLineToPoint:CGPointMake(CGRectGetMaxX(smallerRect), CGRectGetMaxY(smallerRect))];
[maskPath addLineToPoint:CGPointMake(CGRectGetMaxX(smallerRect), CGRectGetMinY(smallerRect))];
[maskPath addLineToPoint:CGPointMake(CGRectGetMinX(smallerRect), CGRectGetMinY(smallerRect))];
[maskWithHole setPath:[maskPath CGPath]];
[maskWithHole setFillRule:kCAFillRuleEvenOdd];
[maskWithHole setFillColor:[[UIColor colorWithWhite:0 alpha:0.35] CGColor]];
[self.view.layer addSublayer:maskWithHole];
[self.view.layer setMasksToBounds:YES];
/* 相机按钮 适配了iPhone和ipad 不同需求自行修改界面*/
//返回、闪光灯按钮
CGFloat backWidth = 35;
if (kScreenHeight>=1024) {
backWidth = 50;
}
CGFloat s = 80;
CGFloat s1 = 0;
if (kScreenHeight==480) {
s = 60;
s1 = 10;
}
UIButton *backBtn = [[UIButton alloc]initWithFrame:CGRectMake(kScreenWidth/16,kScreenWidth/16-s1, backWidth, backWidth)];
[backBtn addTarget:self action:@selector(backAction) forControlEvents:UIControlEventTouchUpInside];
[backBtn setImage:[UIImage imageNamed:@"back_camera_btn"] forState:UIControlStateNormal];
backBtn.titleLabel.textAlignment = NSTextAlignmentLeft;
[self.view addSubview:backBtn];
UIButton *flashBtn = [[UIButton alloc]initWithFrame:CGRectMake(kScreenWidth-kScreenWidth/16-backWidth,kScreenWidth/16-s1, backWidth, backWidth)];
[flashBtn setImage:[UIImage imageNamed:@"flash_camera_btn"] forState:UIControlStateNormal];
[flashBtn addTarget:self action:@selector(modeBtn) forControlEvents:UIControlEventTouchUpInside];
[self.view addSubview:flashBtn];
//拍照视图 上拉按钮 拍照按钮
UIButton *upBtn = [[UIButton alloc]initWithFrame:CGRectMake(kScreenWidth/2-60, kScreenHeight-20, 120, 20)];
upBtn.tag = 1001;
[upBtn addTarget:self action:@selector(upBtn:) forControlEvents:UIControlEventTouchUpInside];
[upBtn setImage:[UIImage imageNamed:@"locker_btn_def"] forState:UIControlStateNormal];
[self.view addSubview:upBtn];
UIButton *photoBtn = [[UIButton alloc]initWithFrame:CGRectMake(kScreenWidth/2-30,kScreenHeight-s,60, 60)];
photoBtn.tag = 1000;
photoBtn.hidden = YES;
[photoBtn setImage:[UIImage imageNamed:@"take_pic_btn"] forState:UIControlStateNormal];
[photoBtn addTarget:self action:@selector(photoBtn) forControlEvents:UIControlEventTouchUpInside];
[photoBtn setTitleColor:[UIColor grayColor] forState:UIControlStateHighlighted];
[self.view addSubview:photoBtn];
[self.view bringSubviewToFront:photoBtn];
}
#pragma mark - UIAlertViewDelegate
- (void)alertView:(UIAlertView *)alertView clickedButtonAtIndex:(NSInteger)buttonIndex{
[self.navigationController popViewControllerAnimated:YES];
}
//隐藏状态栏
- (UIStatusBarStyle)preferredStatusBarStyle{
return UIStatusBarStyleDefault;
}
- (BOOL)prefersStatusBarHidden{
return YES;
}
//对焦
- (void)fouceMode
{
NSError *error;
AVCaptureDevice *device = [self cameraWithPosition:AVCaptureDevicePositionBack];
if ([device isFocusModeSupported:AVCaptureFocusModeAutoFocus])
{
if ([device lockForConfiguration:&error]) {
CGPoint cameraPoint = [_preview captureDevicePointOfInterestForPoint:self.view.center];
[device setFocusPointOfInterest:cameraPoint];
[device setFocusMode:AVCaptureFocusModeAutoFocus];
[device unlockForConfiguration];
} else {
//NSLog(@"Error: %@", error);
}
}
}
- (AVCaptureDevice *)cameraWithPosition:(AVCaptureDevicePosition)position
{
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for (AVCaptureDevice *device in devices){
if (device.position == position){
return device;
}
}
return nil;
}
- (NSString *)getPath:(NSString*)fileName
{
NSString *bundlePath = [NSBundle mainBundle].bundlePath;
NSString *path = [bundlePath stringByAppendingPathComponent:fileName];
return path;
}
- (NSString *)simpleRecognition:(cv::Mat&)src
{
NSString *path_1 = [self getPath:@"cascade.xml"];
NSString *path_2 = [self getPath:@"HorizonalFinemapping.prototxt"];
NSString *path_3 = [self getPath:@"HorizonalFinemapping.caffemodel"];
NSString *path_4 = [self getPath:@"Segmentation.prototxt"];
NSString *path_5 = [self getPath:@"Segmentation.caffemodel"];
NSString *path_6 = [self getPath:@"CharacterRecognization.prototxt"];
NSString *path_7 = [self getPath:@"CharacterRecognization.caffemodel"];
std::string *cpath_1 = new std::string([path_1 UTF8String]);
std::string *cpath_2 = new std::string([path_2 UTF8String]);
std::string *cpath_3 = new std::string([path_3 UTF8String]);
std::string *cpath_4 = new std::string([path_4 UTF8String]);
std::string *cpath_5 = new std::string([path_5 UTF8String]);
std::string *cpath_6 = new std::string([path_6 UTF8String]);
std::string *cpath_7 = new std::string([path_7 UTF8String]);
pr::PipelinePR pr2 = pr::PipelinePR(*cpath_1, *cpath_2, *cpath_3, *cpath_4, *cpath_5, *cpath_6, *cpath_7);
std::vector<pr::PlateInfo> list_res = pr2.RunPiplineAsImage(src);
std::string concat_results = "";
for(auto one:list_res) {
if(one.confidence>0.7) {
concat_results += one.getPlateName()+",";
}
}
NSString *str = [NSString stringWithCString:concat_results.c_str() encoding:NSUTF8StringEncoding];
if (str.length > 0) {
str = [str substringToIndex:str.length-1];
str = [NSString stringWithFormat:@"%@",str];
} else {
str = [NSString stringWithFormat:@"未识别成功"];
}
NSLog(@"===> 识别结果 = %@", str);
return str;
}
#pragma mark - Actions
//返回按钮按钮点击事件
- (void)backAction
{
[self dismissViewControllerAnimated:YES completion:nil];
}
//闪光灯按钮点击事件
- (void)modeBtn
{
if (![_device hasTorch]) {
//NSLog(@"no torch");
} else {
[_device lockForConfiguration:nil];
if (!_on) {
[_device setTorchMode: AVCaptureTorchModeOn];
_on = YES;
}else{
[_device setTorchMode: AVCaptureTorchModeOff];
_on = NO;
}
[_device unlockForConfiguration];
}
}
//上拉按钮点击事件
- (void)upBtn:(UIButton *)upBtn
{
UIButton *photoBtn = (UIButton *)[self.view viewWithTag:1000];
photoBtn.hidden = NO;
upBtn.hidden = YES;
}
//拍照按钮点击事件
- (void)photoBtn
{
self.isProcessingImage = YES;
//get connection
AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in _captureOutput.connections) {
for (AVCaptureInputPort *port in [connection inputPorts]) {
if ([[port mediaType] isEqual:AVMediaTypeVideo] ) {
videoConnection = connection;
break;
}
}
if (videoConnection) { break; }
}
//get UIImage
[_captureOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler:
^(CMSampleBufferRef imageSampleBuffer, NSError *error) {
if (imageSampleBuffer != NULL) {
//停止取景
[_session stopRunning];
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
UIImage *tempImage = [[UIImage alloc] initWithData:imageData];
UIImage *temp_image = [Utility scaleAndRotateImageBackCamera:tempImage];
source_image = [Utility cvMatFromUIImage:temp_image];
NSString* text = [self simpleRecognition:source_image];
NSMutableDictionary* resultDict = [NSMutableDictionary new];
resultDict[@"image"] = temp_image;
resultDict[@"text"] = text;
[self performSelectorOnMainThread:@selector(readyToGetImage:) withObject:resultDict waitUntilDone:NO];
self.isProcessingImage = NO;
}
}];
}
//从摄像头缓冲区获取图像
#pragma mark - AVCaptureSession delegate
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection
{
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
CVPixelBufferLockBaseAddress(imageBuffer,0);
uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer);
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
/*We unlock the image buffer*/
CVPixelBufferUnlockBaseAddress(imageBuffer,0);
/*Create a CGImageRef from the CVImageBufferRef*/
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef newContext = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
CGImageRef newImage = CGBitmapContextCreateImage(newContext);
/*We release some components*/
CGContextRelease(newContext);
CGColorSpaceRelease(colorSpace);
/*We display the result on the image view (We need to change the orientation of the image so that the video is displayed correctly)*/
self.image = [UIImage imageWithCGImage:newImage scale:1.0 orientation:UIImageOrientationUp];
/*We relase the CGImageRef*/
CGImageRelease(newImage);
//检边识别
if (_capture == YES) { //导航栏动画完成
if (self.isProcessingImage==NO) { //点击拍照后 不去识别
if (!self.adjustingFocus) { //反差对焦下 非正在对焦状态相位对焦下self.adjustingFocus此值不会改变
if (_isLensChanged == _isIOS8AndFoucePixelLensPosition) {
_count++;
if (_count >= _MaxFR) {
//识别
UIImage *temp_image = [Utility scaleAndRotateImageBackCamera:self.image];
source_image = [Utility cvMatFromUIImage:temp_image];
NSString* text = [self simpleRecognition:source_image];
if (text.length == 7) { //识别成功
_count = 0;
// 停止取景
[_session stopRunning];
//设置震动
AudioServicesPlaySystemSound(kSystemSoundID_Vibrate);
NSMutableDictionary* resultDict = [NSMutableDictionary new];
resultDict[@"image"] = temp_image;
resultDict[@"text"] = text;
[self performSelectorOnMainThread:@selector(readyToGetImage:) withObject:resultDict waitUntilDone:NO];
}
}
} else {
_isLensChanged = _isIOS8AndFoucePixelLensPosition;
_count = 0;
}
}
}
}
CVPixelBufferUnlockBaseAddress(imageBuffer,0);
}
//找边成功开始拍照
- (void)readyToGetImage:(NSDictionary *)resultDict
{
[self dismissViewControllerAnimated:NO completion:^{
}];
if (self.resultCB) {
self.resultCB(resultDict[@"text"], resultDict[@"image"]);
}
}
- (void)observeValueForKeyPath:(NSString*)keyPath ofObject:(id)object change:(NSDictionary*)change context:(void*)context {
/*反差对焦 监听反差对焦此*/
if([keyPath isEqualToString:@"adjustingFocus"]){
self.adjustingFocus =[[change objectForKey:NSKeyValueChangeNewKey] isEqualToNumber:[NSNumber numberWithInt:1]];
}
/*监听相位对焦此*/
if([keyPath isEqualToString:@"lensPosition"]){
_isIOS8AndFoucePixelLensPosition =[[change objectForKey:NSKeyValueChangeNewKey] floatValue];
//NSLog(@"监听_isIOS8AndFoucePixelLensPosition == %f",_isIOS8AndFoucePixelLensPosition);
}
}
@end