廢話不多直接上代碼看注釋,大家的智慧結晶。
1.聲明頭文件
<p><code>
#import <Foundation/Foundation.h>
#import <UIKit/UIKit.h>
#import <AudioToolbox/AudioToolbox.h>
typedef void (^YYScanSuccessBlock)(NSString *scanResult);
@interface YYScanHelper : NSObject
//第一步:創(chuàng)建聲明單例方法
+ (instancetype)manager;
@property (nonatomic, strong) UIView *scanView;
@property (nonatomic, copy) YYScanSuccessBlock scanBlock;
//開始掃描
- (void)startRunning;
//停止掃描
- (void)stopRunning;
//顯示掃描掃描結果的窗口
- (void)showLayer:(UIView *)superView;
//設置掃描窗口的范圍
- (void)setScanningRect:(CGRect)scanRect scanView:(UIView *)scanView;
@end
</code></p>
2.代碼實現(xiàn)
<p><code>
#import "YYScanHelper.h"
#import <AVFoundation/AVFoundation.h>
@interface YYScanHelper()<AVCaptureMetadataOutputObjectsDelegate>
{
//聲明掃描需要的系統(tǒng)類
AVCaptureSession *_session;
AVCaptureVideoPreviewLayer *_layer;
AVCaptureMetadataOutput *_output;
AVCaptureDeviceInput *_input;
UIView *_superView;
}
@end
@implementation YYScanHelper
+ (instancetype)manager
{
//封裝非標準單利
static YYScanHelper *singleton = nil;
static dispatch_once_t onceToken;
dispatch_once(&onceToken, ^{
singleton = [[YYScanHelper alloc] init];
});
return singleton;
}
- (id)init
{
self = [super init];
if (self) {
//初始化鏈接對象
_session = [[AVCaptureSession alloc]init];
//高質量采集
[_session setSessionPreset:AVCaptureSessionPresetHigh];
//避免模擬器運行崩潰
if (!TARGET_IPHONE_SIMULATOR) {
//獲取攝像設備
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
//創(chuàng)建輸入流
_input = [AVCaptureDeviceInput deviceInputWithDevice:device error:nil];
//添加到鏈接對象
[_session addInput:_input];
//創(chuàng)建輸出流
_output = [[AVCaptureMetadataOutput alloc]init];
//添加到鏈接對象
[_session addOutput:_output];
//設置代理 在主線程里刷新
[_output setMetadataObjectsDelegate:self queue:dispatch_get_main_queue()];
//設置掃碼支持的編碼格式(如下設置條形碼和二維碼兼容)
_output.metadataObjectTypes = @[AVMetadataObjectTypeQRCode,
AVMetadataObjectTypeEAN8Code,
AVMetadataObjectTypeEAN13Code,
AVMetadataObjectTypeCode128Code];
}
_layer = [AVCaptureVideoPreviewLayer layerWithSession:_session];
_layer.videoGravity = AVLayerVideoGravityResizeAspectFill;
}
return self;
}
- (void)startRunning {
//開始捕獲
[_session startRunning];
}
-
(void)stopRunning {
//停止捕獲
[_session stopRunning];
}
pragma mark - AVCaptureMetadataOutputObjects Delegate
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects:(NSArray *)metadataObjects fromConnection:(AVCaptureConnection *)connection
{
if (metadataObjects.count > 0) {
//掃碼完成
[_session stopRunning];
//播放系統(tǒng)聲音(ID可選)
AudioServicesPlaySystemSound(1200);
AVMetadataMachineReadableCodeObject *metadataObject = [metadataObjects objectAtIndex:0];
if (self.scanBlock) {
self.scanBlock(metadataObject.stringValue);
}
//輸出掃描字符串
NSLog(@"輸出掃描字符串為:%@",metadataObject.stringValue);
}
}
/**
* @param 設置掃描范圍區(qū)域 CGRectMake (y的起點/屏幕的高,x的起點/屏幕的款,掃描的區(qū)域的高/屏幕的高,掃描的區(qū)域的寬/屏幕的寬)
*
* @param scanRect 掃描范圍
* @param scanView 掃描框
*/
- (void)setScanningRect:(CGRect)scanRect scanView:(UIView *)scanView
{
CGFloat x,y,width,height;
x = scanRect.origin.y / _layer.frame.size.height;
y = scanRect.origin.x / _layer.frame.size.width;
width = scanRect.size.height / _layer.frame.size.height;
height = scanRect.size.width / _layer.frame.size.width;
_output.rectOfInterest = CGRectMake(x, y, width, height);
self.scanView = scanView;
if (self.scanView) {
self.scanView.frame = scanRect;
if (_superView) {
[_superView addSubview:self.scanView];
}
}
}
/**
* @param顯示圖層
*
* @param superView 需要在哪個View顯示
*/
- (void)showLayer:(UIView *)superView
{
_superView = superView;
_layer.frame = superView.layer.frame;
[superView.layer insertSublayer:_layer atIndex:0];
}
@end
</code></p>
3.使用方法
<p><code>
[[YYScanHelper manager] showLayer:self.view];
[[YYScanHelper manager] setScanningRect:self.scanRect scanView:scanRectView];
[[YYScanHelper manager] setScanBlock:^(NSString *scanResult){
NSLog(@"打印掃描傳值結果%@",scanResult);
[[YYScanHelper manager] startRunning];
</code></p>