前言
最近在開發一個小項目,要用到 AVFoundation 來錄制小視頻,在開發的過程中遇到不少的坑,希望記錄下來。
整個 AVFoundation 框架真是相當復雜而有序的(這里不得不佩服 Apple 的工程師),要使用 AVFoundation 錄制小視頻,并且保存成文件,你需要用到以下的類。
- AVCaptureSession
- AVCaptureDeviceInput
- AVCaptureVideoDataOutput
- AVCaptureAudioDataOutput
- AVCaptureConnection
- AVAssetWriter
- AVAssetWriterInput
類的定義
建立一個 Session 類,先定義好必須的屬性
class VideoSessionEntity: NSObject {
let session = AVCaptureSession()
var tmpFileURL: NSURL?
var videoInput: AVCaptureDeviceInput?
var videoOutput: AVCaptureVideoDataOutput = AVCaptureVideoDataOutput()
var videoConnection: AVCaptureConnection?
var audioInput: AVCaptureDeviceInput?
var audioOutput: AVCaptureAudioDataOutput = AVCaptureAudioDataOutput()
var audioConnection: AVCaptureConnection?
var assetWriter: AVAssetWriter?
var videoWriterInput: AVAssetWriterInput?
var audioWriterInput: AVAssetWriterInput?
}
我們需要這么多的屬性去完成我們的工作,這里面包括視頻、音頻的相關屬性。
- session 是整個錄制工作的控制中心
- videoInput 是視頻源的入口
- videoOutput 是視頻源的出口
- videoConnection 是視頻源的控制中心
- audioInput 是音頻源的入口
- audioOutput 是音頻源的出口
- audioConnection 是音頻源的控制中心
創建回調
其中 videoOutput 和 audioOutput 是需要設置 delegate 的,并且,他們的 delegate 方法是同一個的,它們各需要一個 dispatch_queue_t 進行 GCD 異步回調,這樣子創建就可以。
let videoDataOutputQueue = dispatch_queue_create("com.firefly.videoDataOutputQueue", DISPATCH_QUEUE_SERIAL)
let audioDataOutputQueue = dispatch_queue_create("com.firefly.audioDataOutputQueue", DISPATCH_QUEUE_SERIAL)
初始化
在初始化這些屬性前,先寫兩個方法,獲取設備。
func deviceForVideo() -> AVCaptureDevice? {
let devices = AVCaptureDevice.devicesWithMediaType(AVMediaTypeVideo)
for device in devices {
if let device = device as? AVCaptureDevice {
if device.position == self.videoSource { //在這里選擇前后攝像頭
return device
}
}
}
return nil
}
func deviceForAudio() -> AVCaptureDevice? {
let devices = AVCaptureDevice.devicesWithMediaType(AVMediaTypeAudio)
for device in devices {
if let device = device as? AVCaptureDevice {
return device
}
}
return nil
}
然后,我們可以初始化這些屬性了,其實都是簡單的操作,就是比較繁瑣。
func configureVideo() {
if let device = FireFlyCore.sharedCore.post.videoManager.deviceForVideo() {
do {
try! device.lockForConfiguration()
device.activeVideoMaxFrameDuration = CMTimeMake(1, 24)
device.unlockForConfiguration()
}
do {
videoInput = try AVCaptureDeviceInput(device: device)
videoOutput.setSampleBufferDelegate(self, queue: videoDataOutputQueue)
if session.canAddInput(videoInput) {
session.addInput(videoInput)
}
if session.canAddOutput(videoOutput) {
session.addOutput(videoOutput)
}
if session.canSetSessionPreset(AVCaptureSessionPreset352x288) {
session.sessionPreset = AVCaptureSessionPreset352x288
}
videoConnection = videoOutput.connectionWithMediaType(AVMediaTypeVideo)
}
catch _ {
}
}
}
func configureAudio() {
if let device = FireFlyCore.sharedCore.post.videoManager.deviceForAudio() {
do {
audioInput = try AVCaptureDeviceInput(device: device)
audioOutput.setSampleBufferDelegate(self, queue: audioDataOutputQueue)
if session.canAddInput(audioInput) {
session.addInput(audioInput)
}
if session.canAddOutput(audioOutput) {
session.addOutput(audioOutput)
}
audioConnection = audioOutput.connectionWithMediaType(AVMediaTypeAudio)
}
catch _ {
}
}
}
如果沒有任何問題,你可以調用 session.startRuning()
方法,這個時候,func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!)
代理方法應該可以收到好多的數據回調, 這代表工作正常。
寫入文件
我們需要初始化 AssetWriter,初始化 AssetWriter 需要一些參數,它們代表視頻、音頻的壓縮參數。
let videoSetting: [String : AnyObject] = [
AVVideoCodecKey: AVVideoCodecH264,
AVVideoWidthKey: 320,
AVVideoHeightKey: 240,
AVVideoCompressionPropertiesKey: [
AVVideoPixelAspectRatioKey: [
AVVideoPixelAspectRatioHorizontalSpacingKey: 1,
AVVideoPixelAspectRatioVerticalSpacingKey: 1
],
AVVideoMaxKeyFrameIntervalKey: 1,
AVVideoAverageBitRateKey: 1280000
]
]
let audioSetting: [String: AnyObject] = [
AVFormatIDKey: NSNumber(unsignedInt: kAudioFormatMPEG4AAC),
AVNumberOfChannelsKey: 1,
AVSampleRateKey: 22050
]
然后開始初始化 AssetWriter。
- 注意,這里只是初始化,不要在這里調用
startWriting()
方法,如果在這此處調用這個方法,你就沒有辦法設置 startSessionAtSourceTime() 了。 - 這里還有一個坑,
.expectsMediaDataInRealTime = true
必須設為 true,否則,視頻會丟幀。
func startRecording() {
tmpFileURL = NSURL.fileURLWithPath("\(NSTemporaryDirectory())tmp\(arc4random()).mp4")
do {
assetWriter = try AVAssetWriter(URL: tmpFileURL!, fileType: AVFileTypeMPEG4)
videoWriterInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: videoSetting)
videoWriterInput?.expectsMediaDataInRealTime = true
videoWriterInput?.transform = CGAffineTransformMakeRotation(CGFloat(M_PI / 2))
if assetWriter!.canAddInput(videoWriterInput!) {
assetWriter!.addInput(videoWriterInput!)
}
audioWriterInput = AVAssetWriterInput(mediaType: AVMediaTypeAudio, outputSettings: audioSetting)
audioWriterInput?.expectsMediaDataInRealTime = true
if assetWriter!.canAddInput(audioWriterInput!) {
assetWriter!.addInput(audioWriterInput!)
}
}
catch _ {
}
}
func endRecording() {
if let assetWriter = assetWriter {
if let videoWriterInput = videoWriterInput {
videoWriterInput.markAsFinished()
}
if let audioWriterInput = audioWriterInput {
audioWriterInput.markAsFinished()
}
assetWriter.finishWritingWithCompletionHandler({ () -> Void in
})
}
}
寫入文件
回調方法的可以參考這樣的寫法,如果需要為視頻添加濾鏡可以在這里實現。
func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) {
objc_sync_enter(self)
if let assetWriter = assetWriter {
if assetWriter.status != .Writing && assetWriter.status != .Unknown {
return
}
}
if let assetWriter = assetWriter where assetWriter.status == AVAssetWriterStatus.Unknown {
assetWriter.startWriting()
assetWriter.startSessionAtSourceTime(CMSampleBufferGetPresentationTimeStamp(sampleBuffer))
}
if connection == self.videoConnection {
dispatch_async(videoDataOutputQueue, { () -> Void in
if let videoWriterInput = self.videoWriterInput where videoWriterInput.readyForMoreMediaData {
videoWriterInput.appendSampleBuffer(sampleBuffer)
}
})
}
else if connection == self.audioConnection {
dispatch_async(audioDataOutputQueue, { () -> Void in
if let audioWriterInput = self.audioWriterInput where audioWriterInput.readyForMoreMediaData {
audioWriterInput.appendSampleBuffer(sampleBuffer)
}
})
}
objc_sync_exit(self)
}