Magic.gif
本章介紹一下視頻采集的實現,主要有功能有
1.音、視頻文件錄制播放
2.焦距設置
3.防抖功能
4.攝像頭切換
5.手電筒功能
6.聚焦處理
7.二維碼掃描
8.視頻裁剪壓縮
9.流數據采集處理(暫未處理,后期會補上)
10.旋轉檢測(<CoreMotion/CoreMotion.h> 方位檢測)
實現思路如下
由于小視頻、流媒體、二維碼掃描用的都是使用了AVFoundation的框架,只
是輸入AVCaptureInput、輸出AVCaptureoutput對象不同和對應的輸出內容處理不
一樣,所以想寫一個工具類來集中處理
功能還是比較全的,代碼量也不小,目前大約六、七百行,通過.h文件大家可以自己
去找自己感興趣的地方去看
因為是個多功能集成類,為了不至于一上來所有的輸入輸出對象都加入進來,所以所有
輸入輸出對象以及設備管理對象均以懶加載的方式去按需加載
.h 文件
typedef NS_ENUM(NSInteger,ERecordResult) {
ERecordSucess,
ERecordLessThanMinTime,
ERecordFaile
};
typedef NS_ENUM(NSUInteger,EAVCaptureOutputType) {
EAVCaptureMovieFileOutput, //文件輸出
EAVCaptureVideoDataOutput, //data輸出
EAVCaptureMetadataOutput //元數據輸出
};
#import <Foundation/Foundation.h>
#import <AVFoundation/AVFoundation.h>
@protocol ReCordPlayProtoCol <NSObject>
@optional
- (void)joyRecordTimeCurrentTime:(CGFloat)currentTime
totalTime:(CGFloat)totalTime;
- (void)joyCaptureOutput:(AVCaptureFileOutput *)captureOutput
didStartRecordingToOutputFileAtURL:(NSURL *)fileURL
fromConnections:(NSArray *)connections;
-(void)joyCaptureOutput:(AVCaptureFileOutput *)captureOutput
didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL
fromConnections:(NSArray *)connections error:(NSError *)error
recordResult:(ERecordResult)recordResult;
- (void)joyCaptureOutput:(AVCaptureOutput *)captureOutput
didOutputMetadataObjects:(NSArray *)metadataObjects
fromConnection:(AVCaptureConnection *)connection;
@end
@interface JoyMediaRecordPlay : NSObject
@property (nonatomic,strong)AVCaptureSession *captureSession;
@property (nonatomic,strong)AVCaptureVideoPreviewLayer *preViewLayer; //視圖層
//@property (nonatomic,assign)TIMERBLOCK recordProgressBlock;
//@property (nonatomic,copy)IDBLOCK recordFinishBlock;
@property (nonatomic,weak)id<ReCordPlayProtoCol> delegate;
@property (nonatomic,assign)EAVCaptureOutputType captureOutputType;
#pragma mark 初始化類型,默認錄制文件
-(instancetype)initWithCaptureType:(EAVCaptureOutputType)captureType;
#pragma mark 準備錄制
- (void)preareReCord;
#pragma mark 設置焦距
- (void)updateVideoScaleAndCropFactor:(CGFloat)scale;
#pragma mark 防抖功能
- (void)openStabilization;
#pragma mark 開始錄制
- (void)startRecordToFile:(NSURL *)outPutFile;
#pragma mark 停止錄制
- (void)stopCurrentVideoRecording;
#pragma mark 移除輸入
-(void)removeAVCaptureAudioDeviceInput;
#pragma mark 手電筒
- (void)switchTorch;
#pragma mark 切換攝像頭
- (void)switchCamera;
#pragma mark 設置聚焦點
- (void)setFoucusWithPoint:(CGPoint)point;
@end
@interface JoyMediaRecordPlay (JoyRecorderPrivary)
- (BOOL)isAvailableWithCamera;
- (BOOL)isAvailableWithMic;
- (void)getVideoAuth:(BOOLBLOCK)videoAuth;
- (void)showAlert;
#pragma mark 視頻裁剪壓縮
+ (void)mergeAndExportVideosAtFileURLs:(NSURL *)fileURL
newUrl:(NSString *)mergeFilePath
widthHeightScale:(CGFloat)whScalle
presetName:(NSString *)presetName
mergeSucess:(VOIDBLOCK)mergeSucess;
#pragma mark 視頻保存相冊
+ (void)saveToPhotoWithUrl:(NSURL *)url;
#pragma mark - 視頻地址
+ (NSString *)generateFilePathWithType:(NSString *)fileType;
#pragma mark 獲取文件大小
+ (CGFloat)getfileSize:(NSString *)filePath;
@end
.m文件
#import "JoyMediaRecordPlay.h"
#import <Photos/Photos.h>
#import <AssetsLibrary/AssetsLibrary.h>
#import <JoyAlert.h>
/*
這里簡單介紹一下三個協議,按需去實現就行了,下面會詳細講具體處理
AVCaptureFileOutputRecordingDelegate 文件錄制協議,小視頻用這個
AVCaptureVideoDataOutputSampleBufferDelegate 流數據協議,流媒體數據
AVCaptureMetadataOutputObjectsDelegate 元數據協議,二維碼掃描
*/
@interface JoyMediaRecordPlay ()<AVCaptureFileOutputRecordingDelegate,AVCaptureVideoDataOutputSampleBufferDelegate,AVCaptureMetadataOutputObjectsDelegate>
@property (nonatomic,strong)NSTimer *timer; //定時器,用于計時和時間進度條管理
@property (nonatomic,assign)CGFloat recordTime; //當前錄制時間
@property (nonatomic,assign)CGFloat totalTime; //總錄制時間
@property (nonatomic,strong)AVCaptureDeviceInput *mediaDeviceInput; //視頻輸入
@property (nonatomic,strong)AVCaptureDeviceInput *audioDeviceInput; //音頻輸入
@property (nonatomic,strong)AVCaptureMovieFileOutput *movieFileOutput; //視頻文件輸出
@property (nonatomic,strong)AVCaptureStillImageOutput *stillImageOutput; //圖像輸出
@property (strong, nonatomic) AVCaptureVideoDataOutput *videoDataOutput; //視頻data輸出
@property (strong, nonatomic) AVCaptureAudioDataOutput *audioDataOutput; //視頻data輸出
@property (strong, nonatomic) AVCaptureMetadataOutput *metadataOutput; //元數據輸出
@property (strong, nonatomic) AVCaptureConnection *captureConnection;
@property (assign,nonatomic) UIBackgroundTaskIdentifier backgroundTaskIdentifier; //后臺任務標識
@end
static const CGFloat KTimerInterval = 0.05;
static const CGFloat KMaxRecordTime = 20;
static const CGFloat KMinRecordTime = 3;
@implementation JoyMediaRecordPlay
//初始化錄制類型以確定要進行何種數據采集
-(instancetype)initWithCaptureType:(EAVCaptureOutputType)captureType{
if (self = [super init])
{
self.captureOutputType = captureType;
__weak __typeof (&*self)weakSelf = self;
//獲取授權,成功則預準備錄制,否則彈警告,警告框可自行去掉,因為本人用了
自己寫的一個pod庫,你直接使用會因為找到相應文件而crash,也可以去pod中配置
JoyTool這庫并update下來
[self getVideoAuth:^(BOOL boolValue) {boolValue?[weakSelf preareReCord]:[weakSelf showAlert];}];
}
return self;
}
-(instancetype)init{
if (self = [super init])
{
__weak __typeof (&*self)weakSelf = self;
[self getVideoAuth:^(BOOL boolValue) {boolValue?[weakSelf preareReCord]:[weakSelf showAlert];}];
}
return self;
}
//錄制時間,默認15s,否則按給定執行
-(CGFloat)totalTime{
return _totalTime = _totalTime?:15;
}
//輸入輸出對象管理,以及數據采集的啟停管理
-(AVCaptureSession *)captureSession{
return _captureSession = _captureSession?:[[AVCaptureSession alloc]init];
}
#pragma mark private method ??????????????????????????????????????????????????
#pragma mark 視頻輸入
-(AVCaptureDeviceInput *)mediaDeviceInput{
if (!_mediaDeviceInput) {
__block AVCaptureDevice *frontCamera = nil;
__block AVCaptureDevice *backCamera = nil;
NSArray *cameras = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
[cameras enumerateObjectsUsingBlock:^(AVCaptureDevice *camera, NSUInteger idx, BOOL * _Nonnull stop) {
if(camera.position == AVCaptureDevicePositionFront) {frontCamera = camera;}
if(camera.position == AVCaptureDevicePositionBack) {backCamera = camera;}
}];
[self setExposureModeWithDevice:backCamera];
_mediaDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:backCamera error:nil];
}
return _mediaDeviceInput;
}
#pragma mark 音頻輸入
-(AVCaptureDeviceInput *)audioDeviceInput{
if (!_audioDeviceInput) {
NSError *error;
_audioDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio] error:&error];
}
return _audioDeviceInput;
}
#pragma mark 圖片輸出
-(AVCaptureStillImageOutput *)stillImageOutput{
if (!_stillImageOutput) {
_stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
[_stillImageOutput setOutputSettings:[[NSDictionary alloc] initWithObjectsAndKeys:AVVideoCodecJPEG,AVVideoCodecKey, nil]]; //設置參數AVVideoCodecJPEG參數表示以JPEG的圖片
}
return _stillImageOutput;
}
#pragma mark 文件輸出
-(AVCaptureMovieFileOutput *)movieFileOutput{
return _movieFileOutput = _movieFileOutput?:[[AVCaptureMovieFileOutput alloc] init];
}
#pragma mark data輸出
-(AVCaptureVideoDataOutput *)videoDataOutput{
if (!_videoDataOutput){
_videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
_videoDataOutput.videoSettings = [NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey];
dispatch_queue_t videoQueue = dispatch_queue_create("Video Capture Queue", DISPATCH_QUEUE_SERIAL);
[_videoDataOutput setSampleBufferDelegate:self queue:videoQueue];
}
return _videoDataOutput;
}
#pragma mark 元數據輸出
-(AVCaptureMetadataOutput *)metadataOutput{
if (!_metadataOutput){
_metadataOutput = [[AVCaptureMetadataOutput alloc]init];
// _metadataOutput.rectOfInterest = CGRectMake(0.2, 0.2, 0.6, 0.6);
// //設置輸出數據代理
[_metadataOutput setMetadataObjectsDelegate:self queue:dispatch_get_main_queue()];
}
return _metadataOutput;
}
#pragma mark 輸入輸出對象連接
-(AVCaptureConnection *)captureConnection{
return _captureConnection = _captureConnection?:[self.movieFileOutput connectionWithMediaType:AVMediaTypeVideo];
}
#pragma mark layer層
-(AVCaptureVideoPreviewLayer *)preViewLayer{
if (!_preViewLayer) {
_preViewLayer = [AVCaptureVideoPreviewLayer layerWithSession:self.captureSession];
_preViewLayer.masksToBounds = YES;
_preViewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
}
return _preViewLayer;
}
//配置曝光模式
- (void)setExposureModeWithDevice:(AVCaptureDevice *)device{
NSError *error = nil;
//注意改變設備屬性前一定要首先調用lockForConfiguration:調用完之后使用unlockForConfiguration方法解鎖
[device lockForConfiguration:&error];
//設置持續曝光模式
if ([device isExposureModeSupported:AVCaptureExposureModeContinuousAutoExposure])[device setExposureMode:AVCaptureExposureModeContinuousAutoExposure];
[device unlockForConfiguration];
}
-(NSTimer *)timer{
if (!_timer)
{
_timer = [NSTimer scheduledTimerWithTimeInterval:KTimerInterval target:self selector:@selector(startTime:) userInfo:nil repeats:YES];
// [[NSRunLoop mainRunLoop] addTimer:_timer forMode:NSRunLoopCommonModes];
}
return _timer;
}
#pragma mark計時器事件處理
- (void)startTime:(NSTimer *)timer{
// self.recordProgressBlock?self.recordProgressBlock(self.recordTime,self.totalTime):nil;
if ([self.delegate respondsToSelector:@selector(joyRecordTimeCurrentTime:totalTime:)]) {
[self.delegate joyRecordTimeCurrentTime:self.recordTime totalTime:self.totalTime];
}
self.recordTime += KTimerInterval;
if(_recordTime>=KMaxRecordTime){[self stopCurrentVideoRecording];}
}
//定時器開啟
- (void)startTimer{
[self.timer invalidate];
self.timer = nil;
self.recordTime = 0;
[self.timer fire];
}
//停止計時
- (void)stopTimer{
[self.timer invalidate];
self.timer = nil;
}
//文件視頻開始錄制代理,也就是小視頻的開始錄制
#pragma mark - AVCaptureFileOutputRecordignDelegate
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didStartRecordingToOutputFileAtURL:(NSURL *)fileURL fromConnections:(NSArray *)connections{
[self startTimer];
if([self.delegate respondsToSelector:@selector(joyCaptureOutput:didStartRecordingToOutputFileAtURL:fromConnections:)]){
[self.delegate joyCaptureOutput:captureOutput didStartRecordingToOutputFileAtURL:fileURL fromConnections:connections];
}
}
#pragma mark 文件錄制結束代理 小視頻錄制結束
-(void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error{
[self endBackgroundTask];
if ([self.delegate respondsToSelector:@selector(joyCaptureOutput:didFinishRecordingToOutputFileAtURL:fromConnections:error: recordResult:) ])
{
ERecordResult result = error?ERecordFaile:(self.recordTime>KMinRecordTime?ERecordSucess:ERecordLessThanMinTime);
[self.delegate joyCaptureOutput:captureOutput didFinishRecordingToOutputFileAtURL:outputFileURL fromConnections:connections error:error recordResult:result];
}
}
#pragma mark 流數據丟包 流媒體
-(void)captureOutput:(AVCaptureOutput *)captureOutput didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection{
}
#pragma mark 流數據輸出 流媒體
-(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection{
}
#pragma mark 掃描到數據 二維碼掃描成功
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects:(NSArray *)metadataObjects fromConnection:(AVCaptureConnection *)connection{
if ([self.delegate respondsToSelector:@selector(joyCaptureOutput:didOutputMetadataObjects:fromConnection:)]) {
[self.delegate joyCaptureOutput:captureOutput didOutputMetadataObjects:metadataObjects fromConnection:connection];
}
}
-(void)dealloc{
[self.timer invalidate];
self.timer = nil;
self.recordTime = 0;
[self stopCurrentVideoRecording];
[self.captureSession stopRunning];
[self.preViewLayer removeFromSuperlayer];
}
#pragma mark private method ??????????????????????????????????????????????????End
#pragma mark public method ?????????????????????????????????????????????????? Start
#pragma mark 準備錄制
- (void)preareReCord{
[self.captureSession beginConfiguration];
[self.captureSession canSetSessionPreset:AVCaptureSessionPresetMedium]?[self.captureSession setSessionPreset:AVCaptureSessionPresetHigh]:nil;
[self.captureSession canAddInput:self.mediaDeviceInput]?[self.captureSession addInput:self.mediaDeviceInput]:nil;
[self.captureSession canAddInput:self.audioDeviceInput]?[self.captureSession addInput:self.audioDeviceInput]:nil;
[self.captureSession canAddOutput:self.stillImageOutput]?[self.captureSession addOutput:self.stillImageOutput]:nil;
switch (self.captureOutputType)
{
case EAVCaptureVideoDataOutput:
[self.captureSession canAddOutput:self.videoDataOutput]?[self.captureSession addOutput:self.videoDataOutput]:nil;
break;
case EAVCaptureMetadataOutput:
[self.captureSession canAddOutput:self.metadataOutput]?[self.captureSession addOutput:self.metadataOutput]:nil;
if ([_metadataOutput.availableMetadataObjectTypes containsObject:AVMetadataObjectTypeQRCode])
{_metadataOutput.metadataObjectTypes = [NSArray arrayWithObjects:AVMetadataObjectTypeQRCode,AVMetadataObjectTypeUPCECode,
AVMetadataObjectTypeCode39Code,
AVMetadataObjectTypeCode39Mod43Code,
AVMetadataObjectTypeEAN13Code,
AVMetadataObjectTypeEAN8Code,
AVMetadataObjectTypeCode93Code,
AVMetadataObjectTypeCode128Code,
AVMetadataObjectTypePDF417Code,
AVMetadataObjectTypeQRCode,
AVMetadataObjectTypeAztecCode, nil];}
break;
default:
[self.captureSession canAddOutput:self.movieFileOutput]?[self.captureSession addOutput:self.movieFileOutput]:nil;
break;
}
//設置輸出數據代理
[self.captureSession commitConfiguration];
[self openStabilization];
[self.captureSession startRunning];
}
//這個地方講一下,每次錄制結束都要移除輸入設備管理,否則下次錄制代理不會走的
#pragma mark 移除輸入
-(void)removeAVCaptureAudioDeviceInput
{
self.mediaDeviceInput?[self.captureSession removeInput:self.mediaDeviceInput]:nil;
self.audioDeviceInput?[self.captureSession removeInput:self.audioDeviceInput]:nil;
self.stillImageOutput?[self.captureSession removeOutput:self.stillImageOutput]:nil;
switch (self.captureOutputType)
{
case EAVCaptureVideoDataOutput:
self.videoDataOutput? [self.captureSession removeOutput:self.videoDataOutput]:nil;
break;
case EAVCaptureMetadataOutput:
self.metadataOutput? [self.captureSession removeOutput:self.metadataOutput]:nil;
break;
default:
self.movieFileOutput? [self.captureSession removeOutput:self.movieFileOutput]:nil;
break;
}
}
#pragma mark 設置焦距
- (void)updateVideoScaleAndCropFactor:(CGFloat)scale{
if (scale < self.mediaDeviceInput.device.activeFormat.videoMaxZoomFactor && scale>1)
[self changeDeviceProperty:^(AVCaptureDevice *captureDevice) {
[captureDevice rampToVideoZoomFactor:scale withRate:10];
}];
}
#pragma mark 防抖功能 并設置縮放比例最大以提高視頻質量
- (void)openStabilization{
if ([self.captureConnection isVideoStabilizationSupported ] &&self.captureConnection.activeVideoStabilizationMode == AVCaptureVideoStabilizationModeOff)
{
self.captureConnection.preferredVideoStabilizationMode=AVCaptureVideoStabilizationModeAuto;//視頻防抖
}
self.captureConnection.videoScaleAndCropFactor = _captureConnection.videoMaxScaleAndCropFactor;//鏡頭縮放最大
}
#pragma mark 開始錄制
- (void)startRecordToFile:(NSURL *)outPutFile{
if (![self.movieFileOutput isRecording]) { // 如果此時沒有在錄屏
if ([[UIDevice currentDevice] isMultitaskingSupported])//如果支持多任務則則開始多任務
{
__weak __typeof(&*self)weakSelf = self;
self.backgroundTaskIdentifier=[[UIApplication sharedApplication] beginBackgroundTaskWithExpirationHandler:^{[weakSelf endBackgroundTask];}];
}
if ([self.captureConnection isVideoOrientationSupported])
self.captureConnection.videoOrientation =[self.preViewLayer connection].videoOrientation;
self.recordTime = 0.0f;
[_movieFileOutput startRecordingToOutputFileURL:outPutFile recordingDelegate:self];
}
else{[self.movieFileOutput stopRecording];//停止錄制
}
}
#pragma mark - 視頻錄制
-(void)endBackgroundTask
{
if (self.backgroundTaskIdentifier != UIBackgroundTaskInvalid) {
[[UIApplication sharedApplication] endBackgroundTask:self.backgroundTaskIdentifier];
}
self.backgroundTaskIdentifier = UIBackgroundTaskInvalid;
}
#pragma mark 暫停
- (void)stopCurrentVideoRecording
{
if (self.movieFileOutput.isRecording) {
[self stopTimer];
[_movieFileOutput stopRecording];
}
}
#pragma mark 手電筒
- (void)switchTorch{
__weak __typeof (&*self)weakSelf = self;
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *error = nil;
//設備參數修改時要鎖定配置,修改后unlock,攝像頭也一樣
[device lockForConfiguration:&error];
if (error) {NSLog(@"error:%@",error.description);}
// AVCaptureTorchMode torchMode = device.torchMode == AVCaptureTorchModeOff?AVCaptureTorchModeOn:AVCaptureTorchModeOff;
AVCaptureTorchMode torchMode = AVCaptureTorchModeAuto;
AVCaptureDevice *currentDevice = [weakSelf.mediaDeviceInput device];
if(currentDevice.position == AVCaptureDevicePositionFront) torchMode = AVCaptureTorchModeOff;
[device setTorchMode:torchMode];
[device unlockForConfiguration];
});
}
#pragma mark 切換攝像頭
- (void)switchCamera{
[_captureSession beginConfiguration];
//移除舊的輸入設備
[_captureSession removeInput:_mediaDeviceInput];
AVCaptureDevice *swithToDevice = [self getSwitchCameraDevice];
[swithToDevice lockForConfiguration:nil];
[self setExposureModeWithDevice:swithToDevice];
self.mediaDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:swithToDevice error:nil];
//替換更換攝像頭后的輸入對象
[_captureSession addInput:_mediaDeviceInput];
[_captureSession commitConfiguration];
}
- (void)cancleRecord{
}
#pragma mark 設置對焦
- (void)setFoucusWithPoint:(CGPoint)point{
CGPoint cameraPoint= [self.preViewLayer captureDevicePointOfInterestForPoint:point];
[self focusWithMode:AVCaptureFocusModeAutoFocus exposureMode:AVCaptureExposureModeAutoExpose atPoint:cameraPoint];
}
/**
* 設置聚焦點
*
* @param point 聚焦點
*/
-(void)focusWithMode:(AVCaptureFocusMode)focusMode exposureMode:(AVCaptureExposureMode)exposureMode atPoint:(CGPoint)point{
[self changeDeviceProperty:^(AVCaptureDevice *captureDevice) {
//聚焦
if ([captureDevice isFocusModeSupported:focusMode]) {
[captureDevice setFocusMode:focusMode];
}
//聚焦位置
if ([captureDevice isFocusPointOfInterestSupported]) {
[captureDevice setFocusPointOfInterest:point];
}
//曝光模式
if ([captureDevice isExposureModeSupported:exposureMode]) {
[captureDevice setExposureMode:exposureMode];
}
//曝光點位置
if ([captureDevice isExposurePointOfInterestSupported]) {
[captureDevice setExposurePointOfInterest:point];
}
}];
}
/**
* 改變設備屬性的統一操作方法
* @param propertyChange 屬性改變操作
*/
- (void)changeDeviceProperty:(IDBLOCK)propertyChange
{
AVCaptureDevice *captureDevice = [self.mediaDeviceInput device];
NSError *error;
//注意改變設備屬性前一定要首先調用lockForConfiguration:調用完之后使用unlockForConfiguration方法解鎖
if ([captureDevice lockForConfiguration:&error]) {
propertyChange(captureDevice);
[captureDevice unlockForConfiguration];
}else{
NSLog(@"設置設備屬性過程發生錯誤,錯誤信息:%@",error.localizedDescription);
}
}
- (AVCaptureDevice *)getSwitchCameraDevice{
AVCaptureDevice *currentDevice = [self.mediaDeviceInput device];
AVCaptureDevicePosition currentPosition = [currentDevice position];
BOOL isUnspecifiedOrFront = (currentPosition==AVCaptureDevicePositionUnspecified||currentPosition==AVCaptureDevicePositionFront);
AVCaptureDevicePosition swithToPosition = isUnspecifiedOrFront?AVCaptureDevicePositionBack:AVCaptureDevicePositionFront;
NSArray *cameras= [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
__block AVCaptureDevice *swithCameraDevice = nil;
[cameras enumerateObjectsUsingBlock:^(AVCaptureDevice *camera, NSUInteger idx, BOOL * _Nonnull stop) {
if(camera.position == swithToPosition){swithCameraDevice = camera;*stop = YES;};
}];
return swithCameraDevice;
}
@end
##類別,權限處理、視頻裁剪壓縮、視頻存儲
#pragma mark 權限認證、緩存處理 類別???????????????????????????????????????????????????????????????????????????開始
@implementation JoyMediaRecordPlay(JoyRecorderPrivary)
- (BOOL)isAvailableWithCamera
{
return [self isAvailableWithDeviveMediaType:AVMediaTypeVideo];
}
- (BOOL)isAvailableWithMic
{
return [self isAvailableWithDeviveMediaType:AVMediaTypeAudio];
}
- (BOOL)isAvailableWithDeviveMediaType:(NSString *)mediaType
{
AVAuthorizationStatus status = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo];
return !(status == ALAuthorizationStatusDenied||status == ALAuthorizationStatusRestricted);
}
- (void)getVideoAuth:(BOOLBLOCK)videoAuth{
__weak typeof(self)weakSelf = self;
AVAuthorizationStatus authStatus = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo];
if (authStatus == AVAuthorizationStatusNotDetermined)
{
[AVCaptureDevice requestAccessForMediaType:AVMediaTypeVideo
completionHandler:^(BOOL granted) {
granted?[weakSelf authAudio:videoAuth]:videoAuth(NO); }];
}
else if (authStatus == AVAuthorizationStatusAuthorized)
{ [self authAudio:videoAuth];}
else
{videoAuth(NO);}
}
- (void)authAudio:(BOOLBLOCK)audio {
if ([[AVAudioSession sharedInstance] respondsToSelector:@selector(requestRecordPermission:)]) {
[[AVAudioSession sharedInstance] requestRecordPermission:^(BOOL granted) {
audio(granted);
}];
}
}
- (void)showAlert{
[[JoyAlert shareAlert] showAlertViewWithTitle:@"請在iPhone的“設置-隱私”選項中,允許%@訪問你的攝像頭和麥克風。"
message:nil
cancle:@"好"
confirm:nil
alertBlock:nil];
}
#pragma mark 視頻保存相冊
+ (void)saveToPhotoWithUrl:(NSURL *)url{
[[PHPhotoLibrary sharedPhotoLibrary] performChanges:^{
[PHAssetChangeRequest creationRequestForAssetFromVideoAtFileURL:url];
} completionHandler:nil];
}
#pragma mark 視頻裁剪 ??????????????????????????????????????????????????????????????開始
/*
fileURL :原視頻url
mergeFilePath:新的fileurl
whScalle:所需裁剪的寬高比
presetName:壓縮視頻質量,不傳則 AVAssetExportPresetMediumQuality
*/
+ (void)mergeAndExportVideosAtFileURLs:(NSURL *)fileURL newUrl:(NSString *)mergeFilePath widthHeightScale:(CGFloat)whScalle presetName:(NSString *)presetName mergeSucess:(VOIDBLOCK)mergeSucess
{
NSError *error = nil;
CMTime totalDuration = kCMTimeZero;
//轉換AVAsset
AVAsset *asset = [AVAsset assetWithURL:fileURL];
if (!asset) {
return;
}
AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init];
//提取音頻、視頻
NSArray * assetArray = [asset tracksWithMediaType:AVMediaTypeVideo];
AVAssetTrack *assetTrack;
if (assetArray.count) {
assetTrack = [assetArray objectAtIndex:0];
}
[JoyMediaRecordPlay audioTrackWith:mixComposition assetTrack:assetTrack asset:asset totalDuration:totalDuration error:error];
AVMutableCompositionTrack *videoTrack = [JoyMediaRecordPlay videoTrackWith:mixComposition assetTrack:assetTrack asset:asset totalDuration:totalDuration error:error];
CGFloat renderW = [JoyMediaRecordPlay videoTrackRenderSizeWithassetTrack:assetTrack];
totalDuration = CMTimeAdd(totalDuration, asset.duration);
NSMutableArray *layerInstructionArray = [JoyMediaRecordPlay assetArrayWith:videoTrack totalDuration:totalDuration assetTrack:assetTrack renderW:renderW widthHeightScale:whScalle];
[JoyMediaRecordPlay mergingVideoWithmergeFilePath:mergeFilePath layerInstructionArray:layerInstructionArray mixComposition:mixComposition totalDuration:totalDuration renderW:renderW widthHeightScale:whScalle presetName:presetName mergeSucess:mergeSucess];
}
//壓縮視頻
+(void)mergingVideoWithmergeFilePath:(NSString *)mergeFilePath
layerInstructionArray:(NSMutableArray*)layerInstructionArray
mixComposition:(AVMutableComposition *)mixComposition
totalDuration:(CMTime)totalDuration
renderW:(CGFloat)renderW
widthHeightScale:(CGFloat)whScalle
presetName:(NSString *)presetName
mergeSucess:(VOIDBLOCK)mergeSucess
{
//get save path
NSURL *mergeFileURL = [NSURL fileURLWithPath:mergeFilePath];
//export
AVMutableVideoCompositionInstruction *mainInstruciton = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
mainInstruciton.timeRange = CMTimeRangeMake(kCMTimeZero, totalDuration);
mainInstruciton.layerInstructions = layerInstructionArray;
AVMutableVideoComposition *mainCompositionInst = [AVMutableVideoComposition videoComposition];
mainCompositionInst.instructions = @[mainInstruciton];
mainCompositionInst.frameDuration = CMTimeMake(1, 30);
mainCompositionInst.renderSize = CGSizeMake(renderW, renderW/whScalle);//renderW/4*3
__block AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:presetName?:AVAssetExportPresetMediumQuality];
exporter.videoComposition = mainCompositionInst;
exporter.outputURL = mergeFileURL;
exporter.outputFileType = AVFileTypeMPEG4;
exporter.shouldOptimizeForNetworkUse = YES;
[exporter exportAsynchronouslyWithCompletionHandler:^{
dispatch_async(dispatch_get_main_queue(), ^{
switch (exporter.status) {
case AVAssetExportSessionStatusCompleted:
mergeSucess?mergeSucess():nil;
break;
default:
break;
}
});
}];
}
//合成視頻
+ (NSMutableArray *)assetArrayWith:(AVMutableCompositionTrack *)videoTrack
totalDuration:(CMTime)totalDuration
assetTrack:(AVAssetTrack *)assetTrack
renderW:(CGFloat)renderW
widthHeightScale:(CGFloat)whScalle
{
NSMutableArray *layerInstructionArray = [[NSMutableArray alloc] init];
AVMutableVideoCompositionLayerInstruction *layerInstruciton = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
CGFloat rate = renderW / MIN(assetTrack.naturalSize.width, assetTrack.naturalSize.height);
CGAffineTransform layerTransform = CGAffineTransformMake(assetTrack.preferredTransform.a, assetTrack.preferredTransform.b, assetTrack.preferredTransform.c, assetTrack.preferredTransform.d, assetTrack.preferredTransform.tx * rate, assetTrack.preferredTransform.ty * rate);
layerTransform = CGAffineTransformConcat(layerTransform, CGAffineTransformMake(1, 0, 0, 1, 0, -(assetTrack.naturalSize.width - assetTrack.naturalSize.height/whScalle) / 2.0));//向上移動取中部影響
layerTransform = CGAffineTransformScale(layerTransform, rate, rate);//放縮,解決前后攝像結果大小不對稱
[layerInstruciton setTransform:layerTransform atTime:kCMTimeZero];
[layerInstruciton setOpacity:0.0 atTime:totalDuration];
//data
[layerInstructionArray addObject:layerInstruciton];
return layerInstructionArray;
}
//視頻大小
+(CGFloat)videoTrackRenderSizeWithassetTrack:(AVAssetTrack *)assetTrack{
CGSize renderSize = CGSizeMake(0, 0);
renderSize.width = MAX(renderSize.width, assetTrack.naturalSize.height);
renderSize.height = MAX(renderSize.height, assetTrack.naturalSize.width);
return MIN(renderSize.width, renderSize.height);
}
//videoTrack
+(AVMutableCompositionTrack*)videoTrackWith:(AVMutableComposition *)mixComposition
assetTrack:(AVAssetTrack *)assetTrack
asset:(AVAsset *)asset
totalDuration:(CMTime)totalDuration
error:(NSError *)error{
AVMutableCompositionTrack *videoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[videoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration)
ofTrack:assetTrack
atTime:totalDuration
error:&error];
return videoTrack;
}
//audioTrack
+(void)audioTrackWith:(AVMutableComposition *)mixComposition
assetTrack:(AVAssetTrack *)assetTrack
asset:(AVAsset *)asset
totalDuration:(CMTime)totalDuration
error:(NSError *)error{
AVMutableCompositionTrack *audioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
NSArray *array = [asset tracksWithMediaType:AVMediaTypeAudio];
if (array.count > 0) {
AVAssetTrack *audiok =[array objectAtIndex:0];
[audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration)
ofTrack:audiok
atTime:totalDuration
error:nil];
}
}
#pragma mark 視頻裁剪 ??????????????????????????????????????????????????????????結束
#pragma mark - 視頻地址
+ (NSString *)generateFilePathWithType:(NSString *)fileType{
return [[[self class] getVideoPathCache] stringByAppendingString:[[self class] getVideoNameWithType:fileType]];
}
+ (NSString *)getVideoPathCache
{
NSString *videoCache = [NSTemporaryDirectory() stringByAppendingPathComponent:@"videos"] ;
BOOL isDir = NO;
NSFileManager *fileManager = [NSFileManager defaultManager];
BOOL existed = [fileManager fileExistsAtPath:videoCache isDirectory:&isDir];
if ( !(isDir == YES && existed == YES) ) {
[fileManager createDirectoryAtPath:videoCache withIntermediateDirectories:YES attributes:nil error:nil];
};
return videoCache;
}
+ (NSString *)getVideoNameWithType:(NSString *)fileType
{
NSTimeInterval now = [[NSDate date] timeIntervalSince1970];
NSDateFormatter * formatter = [[NSDateFormatter alloc] init];
[formatter setDateFormat:@"HHmmss"];
NSDate * NowDate = [NSDate dateWithTimeIntervalSince1970:now];
;
NSString * timeStr = [formatter stringFromDate:NowDate];
NSString *fileName = [NSString stringWithFormat:@"/video_%@.%@",timeStr,fileType];
return fileName;
}
#pragma mark 獲取文件大小
+ (CGFloat)getfileSize:(NSString *)filePath
{
NSFileManager *fm = [NSFileManager defaultManager];
filePath = [filePath stringByReplacingOccurrencesOfString:@"file://" withString:@""];
CGFloat fileSize = 0;
if ([fm fileExistsAtPath:filePath]) {
fileSize = [[fm attributesOfItemAtPath:filePath error:nil] fileSize];
NSLog(@"視頻 - - - - - %fM,--------- %fKB",fileSize / (1024.0 * 1024.0),fileSize / 1024.0);
}
return fileSize/1024/1024;
}
@end
#pragma mark 權限認證、緩存處理 類別???????????????????????????????????????????????????????????????????????????結束
主文件git地址??這兒
簡單寫了個demo,主要還是看player的實現,view是臨時寫了一個,你可以把回調拿到vc里去處理。joytool masony框架可以用自己的