首先來(lái)看看借助蘋果原生CoreImage框架如何將文字信息生成二維碼圖片,毫無(wú)疑問(wèn)肯定要導(dǎo)入#import <CoreImage/CoreImage.h>這個(gè)頭文件。
實(shí)現(xiàn)以下代碼既可以生成二維碼圖片,但是圖片比較模糊。
// 1.創(chuàng)建濾鏡對(duì)象
CIFilter *filter = [CIFilter filterWithName:@"CIQRCodeGenerator"];
// 2.恢復(fù)默認(rèn)設(shè)置
[filter setDefaults];
// 3.設(shè)置數(shù)據(jù)
NSString *info = @"http://www.baidu.com";
NSData *infoData = [info dataUsingEncoding:NSUTF8StringEncoding];
[filter setValue:infoData forKeyPath:@"inputMessage"];
// 4.生成二維碼
CIImage *outputImage = [filter outputImage];
self.imageView.image = [UIImage imageWithCIImage:outputImage];
解決二維碼圖片模糊問(wèn)題,需要我們自己繪圖。新建一個(gè)CIImage的分類,然后實(shí)現(xiàn)如下代碼。直接調(diào)用這個(gè)分類的方法將CIImage轉(zhuǎn)為UIImage即可解決問(wèn)題。
- (UIImage *)createNonInterpolatedWithSize:(CGFloat)size
{
CGRect extent = CGRectIntegral(self.extent);
CGFloat scale = MIN(size/CGRectGetWidth(extent), size/CGRectGetHeight(extent));
// 1.創(chuàng)建bitmap;
size_t width = CGRectGetWidth(extent) * scale;
size_t height = CGRectGetHeight(extent) * scale;
CGColorSpaceRef cs = CGColorSpaceCreateDeviceGray();
CGContextRef bitmapRef = CGBitmapContextCreate(nil, width, height, 8, 0, cs, (CGBitmapInfo)kCGImageAlphaNone);
CIContext *context = [CIContext contextWithOptions:nil];
CGImageRef bitmapImage = [context createCGImage:self fromRect:extent];
CGContextSetInterpolationQuality(bitmapRef, kCGInterpolationNone);
CGContextScaleCTM(bitmapRef, scale, scale);
CGContextDrawImage(bitmapRef, extent, bitmapImage);
// 2.保存bitmap到圖片
CGImageRef scaledImage = CGBitmapContextCreateImage(bitmapRef);
CGContextRelease(bitmapRef);
CGImageRelease(bitmapImage);
return [UIImage imageWithCGImage:scaledImage];
}
接下來(lái)看看如何掃面二維碼。掃秒二維碼是借助AVFoundation框架,相關(guān)注釋在代碼中。
#import "ViewController.h"
#import <AVFoundation/AVFoundation.h>
@interface ViewController ()<AVCaptureMetadataOutputObjectsDelegate>
//注意這里用weak
@property (nonatomic, weak) AVCaptureSession *session;
@property (nonatomic, weak) AVCaptureVideoPreviewLayer *layer;
@end
@implementation ViewController
- (void)touchesBegan:(NSSet<UITouch *> *)touches withEvent:(UIEvent *)event{
//1.獲取捕捉會(huì)話
AVCaptureSession *session = [[AVCaptureSession alloc]init];
self.session = session;
//2.設(shè)置輸入(攝像頭)
//AVMediaTypeVideo攝像頭, AVMediaTypeAudio話筒, or AVMediaTypeMuxed彈幕
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:nil];
[_session addInput:input];
//3.設(shè)置輸出數(shù)據(jù)
AVCaptureMetadataOutput *output = [[AVCaptureMetadataOutput alloc]init];
//設(shè)置代理 <AVCaptureMetadataOutputObjectsDelegate>
[output setMetadataObjectsDelegate:self queue:dispatch_get_main_queue()];
[_session addOutput:output];
//設(shè)置輸出類型 AVMetadataObjectTypeQRCode表示輸出類型為二維碼
//注意:必須在設(shè)置代理完畢后設(shè)置
[output setMetadataObjectTypes:@[AVMetadataObjectTypeQRCode]];
//4.添加閱覽圖層
AVCaptureVideoPreviewLayer *layer = [[AVCaptureVideoPreviewLayer alloc]initWithSession:self.session];
self.layer = layer;
_layer.frame = self.view.bounds;
[self.view.layer addSublayer:_layer];
//5.開(kāi)始掃描
[_session startRunning];
}
#pragma mark - <AVCaptureMetadataOutputObjectsDelegate>
-(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects:(NSArray *)metadataObjects fromConnection:(AVCaptureConnection *)connection{
if (metadataObjects.count > 0) {
// 1.獲取掃描到的內(nèi)容
AVMetadataMachineReadableCodeObject *object = [metadataObjects lastObject];
NSLog(@"%@", object.stringValue);
// 2.停止會(huì)話
[self.session stopRunning];
// 3.移除預(yù)覽圖層
[self.layer removeFromSuperlayer];
}
}