這個(gè) 給我的感覺(jué)就像是 PPT 播放一樣。這里找了一些資料,學(xué)習(xí)視頻合成方面的知識(shí)。
一:圖片和視頻的合成:
@interface ViewController ()
@property(nonatomic, strong)NSMutableArray *imageArr;
@property(nonatomic, strong)NSString? *theVideoPath;
@end
@implementation ViewController
- (void)viewDidLoad {
[super viewDidLoad];
self.imageArr =[[NSMutableArray alloc]initWithObjects:
[UIImage imageNamed:@"1.jpg"],[UIImage imageNamed:@"2.jpg"],[UIImage imageNamed:@"3.jpg"],[UIImage imageNamed:@"4.jpg"],[UIImage imageNamed:@"5.jpg"],[UIImage imageNamed:@"6.jpg"],[UIImage imageNamed:@"7"],[UIImage imageNamed:@"8"],[UIImage imageNamed:@"9.jpg"],[UIImage imageNamed:@"10.jpg"],[UIImage imageNamed:@"11.jpg"],[UIImage imageNamed:@"12.jpg"],[UIImage imageNamed:@"13.jpg"],[UIImage imageNamed:@"14.jpg"],[UIImage imageNamed:@"15.jpg"],[UIImage imageNamed:@"16.jpg"],[UIImage imageNamed:@"17.jpg"],[UIImage imageNamed:@"18.jpg"],[UIImage imageNamed:@"19.jpg"],[UIImage imageNamed:@"20.jpg"],[UIImage imageNamed:@"21.jpg"],[UIImage imageNamed:@"22.jpg"],[UIImage imageNamed:@"23.jpg"],nil];
UIButton * button =[UIButton buttonWithType:UIButtonTypeRoundedRect];
[button setFrame:CGRectMake(100,100, 100,100)];
[button setTitle:@"合成"forState:UIControlStateNormal];
[button addTarget:self action:@selector(testCompressionSession)forControlEvents:UIControlEventTouchUpInside];
[self.view addSubview:button];
UIButton * button1 =[UIButton buttonWithType:UIButtonTypeRoundedRect];
[button1 setFrame:CGRectMake(100,200, 100,100)];
[button1 setTitle:@"播放"forState:UIControlStateNormal];
[button1 addTarget:self action:@selector(playAction)forControlEvents:UIControlEventTouchUpInside];
[self.view addSubview:button1];
// Do any additional setup after loading the view, typically from a nib.
}
-(void)testCompressionSession
{
NSLog(@"開始");
//NSString *moviePath = [[NSBundle mainBundle]pathForResource:@"Movie" ofType:@"mov"];
NSArray *paths =NSSearchPathForDirectoriesInDomains(NSDocumentDirectory,NSUserDomainMask,YES);
NSString *moviePath =[[paths objectAtIndex:0]stringByAppendingPathComponent:[NSString stringWithFormat:@"%@.mp4",@"2016全球三大超跑宣傳片_超清"]];
self.theVideoPath=moviePath;
CGSize size =CGSizeMake(320,400);//定義視頻的大小
//[self writeImages:_imageArr ToMovieAtPath:moviePath withSize:size? inDuration:4 byFPS:30];//第2中方法
NSError *error =nil;
unlink([moviePath UTF8String]);
NSLog(@"path->%@",moviePath);
//—-initialize compression engine
AVAssetWriter *videoWriter =[[AVAssetWriter alloc]initWithURL:[NSURL fileURLWithPath:moviePath]fileType:AVFileTypeQuickTimeMovie?error:&error];
NSParameterAssert(videoWriter);
if(error)
NSLog(@"error =%@", [error localizedDescription]);
NSDictionary *videoSettings =[NSDictionary dictionaryWithObjectsAndKeys:AVVideoCodecH264,AVVideoCodecKey,
[NSNumber numberWithInt:size.width],AVVideoWidthKey,
[NSNumber numberWithInt:size.height],AVVideoHeightKey,nil];
AVAssetWriterInput *writerInput =[AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
NSDictionary*sourcePixelBufferAttributesDictionary =[NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:kCVPixelFormatType_32ARGB],kCVPixelBufferPixelFormatTypeKey,nil];
AVAssetWriterInputPixelBufferAdaptor *adaptor =[AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput
sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary];
NSParameterAssert(writerInput);
NSParameterAssert([videoWriter canAddInput:writerInput]);
if ([videoWriter canAddInput:writerInput])
NSLog(@"11111");
else
NSLog(@"22222");
[videoWriter addInput:writerInput];
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
//合成多張圖片為一個(gè)視頻文件
dispatch_queue_t dispatchQueue =dispatch_queue_create("mediaInputQueue",NULL);
int __block frame =0;
[writerInput requestMediaDataWhenReadyOnQueue:dispatchQueue usingBlock:^{
while([writerInput isReadyForMoreMediaData])
{
if(++frame >=[self.imageArr count]*10)
{
[writerInput markAsFinished];
[videoWriter finishWriting];
break;}
CVPixelBufferRef buffer =NULL;
int idx =frame/10;
NSLog(@"idx==%d",idx);
buffer =(CVPixelBufferRef)
[self pixelBufferFromCGImage:[[self.imageArr objectAtIndex:idx]CGImage]size:size];
if (buffer){
if(![adaptor appendPixelBuffer:buffer withPresentationTime:CMTimeMake(frame,10)])
NSLog(@"FAIL");
else
NSLog(@"OK");
CFRelease(buffer);}}}];}
- (CVPixelBufferRef)pixelBufferFromCGImage:(CGImageRef)image size:(CGSize)size{
NSDictionary *options =[NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES],kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:YES],kCVPixelBufferCGBitmapContextCompatibilityKey,nil];
CVPixelBufferRef pxbuffer =NULL;
CVReturn status =CVPixelBufferCreate(kCFAllocatorDefault,size.width,size.height,kCVPixelFormatType_32ARGB,(__bridge CFDictionaryRef) options,&pxbuffer);
NSParameterAssert(status ==kCVReturnSuccess && pxbuffer !=NULL);
CVPixelBufferLockBaseAddress(pxbuffer,0);
void *pxdata =CVPixelBufferGetBaseAddress(pxbuffer);
NSParameterAssert(pxdata !=NULL);
CGColorSpaceRef rgbColorSpace=CGColorSpaceCreateDeviceRGB();
CGContextRef context =CGBitmapContextCreate(pxdata,size.width,size.height,8,4*size.width,rgbColorSpace,kCGImageAlphaPremultipliedFirst);
NSParameterAssert(context);
CGContextDrawImage(context,CGRectMake(0,0,CGImageGetWidth(image),CGImageGetHeight(image)), image);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
CVPixelBufferUnlockBaseAddress(pxbuffer,0);
return pxbuffer;}
-(void)playAction{
MPMoviePlayerViewController *theMovie =[[MPMoviePlayerViewController alloc]initWithContentURL:[NSURL fileURLWithPath:self.theVideoPath]];
[self presentMoviePlayerViewControllerAnimated:theMovie];
theMovie.moviePlayer.movieSourceType=MPMovieSourceTypeFile;[theMovie.moviePlayer play];}
//第二種方式
- (void)writeImages:(NSArray *)imagesArray ToMovieAtPath:(NSString *)path withSize:(CGSize)size ?inDuration:(float)duration byFPS:(int32_t)fps{
//Wire the writer:
NSError *error =nil;
AVAssetWriter *videoWriter =[[AVAssetWriter alloc]initWithURL:[NSURL fileURLWithPath:path]fileType:AVFileTypeQuickTimeMovie ?error:&error]; ?
NSParameterAssert(videoWriter);
NSDictionary *videoSettings =[NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264,AVVideoCodecKey,
[NSNumber numberWithInt:size.width],AVVideoWidthKey,
[NSNumber numberWithInt:size.height],AVVideoHeightKey,nil];
AVAssetWriterInput* videoWriterInput =[AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo?outputSettings:videoSettings];
AVAssetWriterInputPixelBufferAdaptor *adaptor =[AVAssetWriterInputPixelBufferAdaptor?assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput?sourcePixelBufferAttributes:nil];
NSParameterAssert(videoWriterInput);
NSParameterAssert([videoWriter canAddInput:videoWriterInput]);
[videoWriter addInput:videoWriterInput];
//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
//Write some samples:
CVPixelBufferRef buffer =NULL;
int frameCount =0;
int imagesCount = [imagesArray count];
float averageTime =duration/imagesCount;
int averageFrame =(int)(averageTime * fps);
for(UIImage *img in imagesArray){
buffer=[self pixelBufferFromCGImage:[img CGImage]size:size];
BOOL append_ok =NO;
int j =0;
while (!append_ok && j <= 30)
{
if(adaptor.assetWriterInput.readyForMoreMediaData)
{
printf("appending %d attemp%d\n", frameCount, j);
CMTime frameTime =CMTimeMake(frameCount,(int32_t)fps);float frameSeconds =CMTimeGetSeconds(frameTime);
NSLog(@"frameCount:%d,kRecordingFPS:%d,frameSeconds:%f",frameCount,fps,frameSeconds);
append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];
if(buffer)
[NSThread sleepForTimeInterval:0.05];}else{
printf("adaptor not ready %d,%d\n", frameCount, j);
[NSThread sleepForTimeInterval:0.1];}
j++;}
if (!append_ok){
printf("error appendingimage %d times %d\n", frameCount, j);}
frameCount = frameCount + averageFrame;}
//Finish the session:
[videoWriterInput markAsFinished];
[videoWriter finishWriting];NSLog(@"finishWriting");}
二:視頻跟音頻的合成
// 混合音樂(lè)
-(void)merge{
// mbp提示框
//? [MBProgressHUD showMessage:@"正在處理中"];
// 路徑
NSString *documents = [NSHomeDirectory() stringByAppendingPathComponent:@"Documents"];
// 聲音來(lái)源
NSURL *audioInputUrl = [NSURL fileURLWithPath:[[NSBundle mainBundle] pathForResource:@"藍(lán)瘦香菇" ofType:@"mp3"]];
// 視頻來(lái)源
NSURL *videoInputUrl = [NSURL fileURLWithPath:[[NSBundle mainBundle] pathForResource:@"2016全球三大超跑宣傳片_超清" ofType:@"mp4"]];
// 最終合成輸出路徑
NSString *outPutFilePath = [documents stringByAppendingPathComponent:@"merge.mp4"];
// 添加合成路徑
NSURL *outputFileUrl = [NSURL fileURLWithPath:outPutFilePath];
// 時(shí)間起點(diǎn)
CMTime nextClistartTime = kCMTimeZero;
// 創(chuàng)建可變的音視頻組合
AVMutableComposition *comosition = [AVMutableComposition composition];
// 視頻采集
AVURLAsset *videoAsset = [[AVURLAsset alloc] initWithURL:videoInputUrl options:nil];
// 視頻時(shí)間范圍
CMTimeRange videoTimeRange = CMTimeRangeMake(kCMTimeZero, videoAsset.duration);
// 視頻通道 枚舉 kCMPersistentTrackID_Invalid = 0
AVMutableCompositionTrack *videoTrack = [comosition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
// 視頻采集通道
AVAssetTrack *videoAssetTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] firstObject];
//? 把采集軌道數(shù)據(jù)加入到可變軌道之中
[videoTrack insertTimeRange:videoTimeRange ofTrack:videoAssetTrack atTime:nextClistartTime error:nil];
// 聲音采集
AVURLAsset *audioAsset = [[AVURLAsset alloc] initWithURL:audioInputUrl options:nil];
// 因?yàn)橐曨l短這里就直接用視頻長(zhǎng)度了,如果自動(dòng)化需要自己寫判斷
CMTimeRange audioTimeRange = videoTimeRange;
// 音頻通道
AVMutableCompositionTrack *audioTrack = [comosition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
// 音頻采集通道
AVAssetTrack *audioAssetTrack = [[audioAsset tracksWithMediaType:AVMediaTypeAudio] firstObject];
// 加入合成軌道之中
[audioTrack insertTimeRange:audioTimeRange ofTrack:audioAssetTrack atTime:nextClistartTime error:nil];
// 創(chuàng)建一個(gè)輸出
AVAssetExportSession *assetExport = [[AVAssetExportSession alloc] initWithAsset:comosition presetName:AVAssetExportPresetMediumQuality];
// 輸出類型
assetExport.outputFileType = AVFileTypeQuickTimeMovie;
// 輸出地址
assetExport.outputURL = outputFileUrl;
// 優(yōu)化
assetExport.shouldOptimizeForNetworkUse = YES;
// 合成完畢
[assetExport exportAsynchronouslyWithCompletionHandler:^{
// 回到主線程
dispatch_async(dispatch_get_main_queue(), ^{
// 調(diào)用播放方法? outputFileUrl 這個(gè)就是合成視頻跟音頻的視頻
[self playWithUrl:outputFileUrl];
});
}];
}?