概述
- 音視頻采集是直播架構(gòu)的第一步
- 音視頻采集包括兩部分
- 視頻采集
- 音頻采集
- iOS 開發(fā)中,同音視頻采集相關(guān) API 都封裝在 AVFoundation 中,導(dǎo)入該框架,即可實(shí)現(xiàn)音頻、視頻的同步采集
采集步驟
采集步驟文字描述
- 導(dǎo)入框架
- 同采集相關(guān) API 在 AVFoundation 中,因此需要先導(dǎo)入框架
- 創(chuàng)建捕捉會話(AVCaptureSession)
- 會話:用于連接輸入源、輸出源
- 輸入源:攝像頭、麥克風(fēng)
- 輸出源:對應(yīng)的視頻、音頻數(shù)據(jù)
- 設(shè)置視頻輸入源、輸出源
- 輸入源(AVCaptureDeviceInput):從攝像頭輸入(前置/后置)
- 輸出源(AVCaptureVideoDataOutput):可從代理方法中拿到數(shù)據(jù)
- 將輸入源、輸出源添加到會話中
- 設(shè)置音頻輸入源、輸出源
- 輸入源(AVCaptureDeviceInput):從麥克風(fēng)輸入
- 輸出源(AVCaptureAudioDataOutput):可從代理方法中拿到數(shù)據(jù)
- 將輸入源、輸出源添加到會話中
- 設(shè)置預(yù)覽圖層
- 將攝像頭采集的畫面添加到屏幕上
(不添加也可實(shí)現(xiàn)采集,但就一般需求來說應(yīng)該添加)
- 將攝像頭采集的畫面添加到屏幕上
- 開始采集
- 開始采集方法
- 結(jié)束采集方法
- 切換攝像頭等方法
采集步驟代碼實(shí)現(xiàn)
視頻采集部分
import UIKit
import AVFoundation
class ViewController: UIViewController {
fileprivate lazy var videoQueue = DispatchQueue.global()
fileprivate lazy var session : AVCaptureSession = AVCaptureSession()
fileprivate lazy var previewLayer : AVCaptureVideoPreviewLayer = AVCaptureVideoPreviewLayer(session: self.session)
}
- 開始視頻采集(從故事板拖了幾個 button)
@IBAction func startCapture() {
// 1.創(chuàng)建捕捉會話
// let session = AVCaptureSession()
// self.session = session
// 2.設(shè)置輸入源(攝像頭)
// 2.1.獲取攝像頭
guard let devices = AVCaptureDevice.devices(withMediaType:AVMediaTypeVideo) as? [AVCaptureDevice] else {
print("攝像頭不可用")
return
}
guard let device = devices.filter({ $0.position == .front }).first else { return }
// 2.2.通過 device 創(chuàng)建 AVCaptureInput 對象
guard let videoInput = try? AVCaptureDeviceInput(device: device) else { return }
// 2.3.將 input 添加到會話中
session.addInput(videoInput)
// 3.設(shè)置輸出源
let videoOutput = AVCaptureVideoDataOutput()
videoOutput.setSampleBufferDelegate(self, queue: videoQueue)
session.addOutput(videoOutput)
// 4.設(shè)置預(yù)覽圖層
// let previewLayer = AVCaptureVideoPreviewLayer(session: session)
// previewLayer?.frame = view.bounds
// view.layer.addSublayer(previewLayer!)
previewLayer.frame = view.bounds
view.layer.insertSublayer(previewLayer, at: 0)
// 5.開始采集
session.startRunning()
}
- 停止采集
@IBAction func stopCapture() {
// 停止采集
session.stopRunning()
previewLayer.removeFromSuperlayer()
print("停止采集")
}
- 遵守協(xié)議
extension ViewController : AVCaptureVideoDataOutSampleBufferDelegate {
func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) {
// sampleBuffer 就是我們拿到的畫面,美顏等操作都是對 sampleBuffer 進(jìn)行的
print("已經(jīng)采集到視頻")
}
}
獲取攝像頭時,也可以這樣遍歷
var device : AVCaptureDevice!
for d in devices {
if d.position == .front {
device = d
break
}
}
或者通過閉包
let device = devices.filter { (device : AVCaptureDevice) -> Bool in
return device.position == .front
}.first
不過還是推薦第一種,比較簡潔,一行代碼就搞定了( $0 表示數(shù)組內(nèi)第一個元素)
guard let device = devices.filter({ $0.position == .front }).first else { return }
音頻采集部分
- 先對之前的代碼進(jìn)行一下抽取
extension ViewController {
@IBAction func startCapture() {
// 1.設(shè)置視頻輸入、輸出
setupVideo()
// 2.設(shè)置音頻輸入、輸出
setupAudio()
// 3.設(shè)置預(yù)覽圖層
previewLayer.frame = view.bounds
view.layer.insertSublayer(previewLayer, at: 0)
// 4.開始采集
session.startRunning()
}
@IBAction func stopCapture() {
// 停止采集
session.stopRunning()
previewLayer.removeFromSuperlayer()
print("停止采集")
}
}
extension ViewController {
fileprivate func setupVideo() {
// 1.設(shè)置輸入源(攝像頭)
// 1.1.獲取攝像頭設(shè)備
guard let devices = AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo) as? [AVCaptureDevice] else {
print("攝像頭不可用")
return
}
guard let device = devices.filter({ $0.position == .front }).first else { return }
// 1.2.通過 device 創(chuàng)建 AVCaptureInput 對象
guard let videoInput = try? AVCaptureDeviceInput(device: device) else { return }
// 1.3.將 input 添加到會話中
session.addInput(videoInput)
// 2.設(shè)置輸出源
let videoOutput = AVCaptureVideoDataOutput()
videoOutput.setSampleBufferDelegate(self, queue: videoQueue)
session.addOutput(videoOutput)
}
fileprivate func setupAudio() {
}
}
- 音頻采集,也就是對 setupAudio() 的實(shí)現(xiàn)
import UIKit
import AVFoundation
class ViewController: UIViewController {
fileprivate lazy var videoQueue = DispatchQueue.global()
fileprivate lazy var audioQueue = DispatchQueue.global()
fileprivate lazy var session : AVCaptureSession = AVCaptureSession()
fileprivate lazy var previewLayer : AVCaptureVideoPreviewLayer = AVCaptureVideoPreviewLayer(session: self.session)
}
fileprivate func setupAudio() {
// 1.設(shè)置輸入源(麥克風(fēng))
// 1.1.獲取麥克風(fēng)
guard let device = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeAudio) else { return }
// 1.2.根據(jù) device 創(chuàng)建 AVCaptureInput
guard let audioInput = try? AVCaptureDeviceInput(device: device) else { return }
// 1.3.將 input 添加到會話中
session.addInput(audioInput)
// 2.設(shè)置輸出源
let audioOutput = AVCaptureAudioDataOutput()
audioOutput.setSampleBufferDelegate(self, queue: audioQueue)
session.addOutput(audioOutput)
}
- 遵守協(xié)議
extension ViewController : AVCaptureVideoDataOutSampleBufferDelegate, AVCaptureAudioDataOutSampleBufferDelegate {
// 獲取音頻數(shù)據(jù)的代理方法是一樣的
// 所以為了區(qū)分拿到的是視頻還是音頻數(shù)據(jù),我們一般通過 connection 來判斷
func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) {
print("已經(jīng)采集到音頻")
}
}
- connection
fileprivate func setupVideo() {
// 1.設(shè)置輸入源(攝像頭)
// 1.1.獲取攝像頭設(shè)備
// 1.2.通過 device 創(chuàng)建 AVCaptureInput 對象
// 1.3.將 input 添加到會話中
// 2.設(shè)置輸出源
// 3.獲取 video 對應(yīng)的 connection
connection = videoOutput.connection(withMediaType: AVMediaTypeVideo)
}
// 因?yàn)檫@的 connection 是個局部變量,在代理方法中拿不到,所以定義一個 connection
class ViewController: UIViewController {
fileprivate var connection : AVCaptureConnection?
}
- 遵守協(xié)議(設(shè)置好 connection 后)
extension ViewController : AVCaptureVideoDataOutSampleBufferDelegate, AVCaptureAudioDataOutSampleBufferDelegate {
func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) {
if connection == self. connection {
print("已經(jīng)采集視頻—-video")
} else {
print("已經(jīng)采集音頻--audio")
}
}
}
切換鏡頭操作
// 因?yàn)榍袚Q鏡頭,需要拿到之前的視頻輸入源
// 而之前的輸入源是局部,切換鏡頭方法中拿不到,所以定義一個 videoInput
class ViewController: UIViewController {
fileprivate var videoInput : AVCaptureDeviceInput?
}
// 然后在 setupVideo() 中的 2.2 賦值給 videoInput
// 2.2.通過 device 創(chuàng)建 AVCaptureInput 對象
guard let videoInput = try? AVCaptureDeviceInput(device: device) else { return }
self.videoInput = videoInput
@IBAction func switchScene() {
// 1.獲取當(dāng)前鏡頭
guard var position = videoInput?.device.position else { return }
// 2.獲取將要顯示鏡頭
position = position == .front ? .back : .front
// 3.根據(jù)將要顯示鏡頭創(chuàng)建 device
let devices = AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo) as! [AVCaptureDevice]
guard let device = devices.filter({ $0.position == position }).first else { return }
// 4.根據(jù) device 創(chuàng)建 input
guard let videoInput = try? AVCaptureDeviceInput(device: device) else { return }
// 5.在 session 中切換 input
session.beginConfiguration()
session.removeInput(self.videoInput!)
session.addInput(videoInput)
session.commitConfiguration()
self.videoInput = videoInput
print("切換鏡頭")
}
這時運(yùn)行程序,切換鏡頭后會發(fā)現(xiàn)控制臺只打印“已經(jīng)采集音頻--audio”。因?yàn)殓R頭切換,之前獲得的 connection 也會改變,所以我們還要進(jìn)行一個操作,獲取新的 connection
fileprivate var connection : AVCaptureConnection?
connection = videoOutput.connection(withMediaType: AVMediaTypeVideo)
然后定義 videoOutput,通過 videoOutput 獲取新的 connection
class ViewController: UIViewController {
fileprivate var videoOutput : AVCaptureVideoDataOutput?
}
// 然后修改 setupVideo() 中的 3 步驟,也就是刪除之前獲取 connection 的步驟,賦值給 videoOutput
// 3.獲取 video 對應(yīng)的 connection
self.videoOutput = videoOutput
- 遵守協(xié)議(根據(jù) videoOutput 獲取 connection 后)
extension ViewController : AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate {
func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) {
if connection == videoOutput?.connection(withMediaType: AVMediaTypeVideo) {
print("已經(jīng)采集視頻—-video")
} else {
print("已經(jīng)采集音頻--audio")
}
}
}
文件寫入部分
- 定義 movieOutput
class ViewController: UIViewController {
fileprivate var movieOutput : AVCaptureMovieFileOutput?
}
- 開始寫入文件
@IBAction func startCapture() {
// 1.設(shè)置視頻輸入、輸出
// 2.設(shè)置音頻輸入、輸出
// 3.添加寫入文件的 output
let movieOutput = AVCaptureMovieFileOutput()
session.addOutput(movieOutput)
self.movieOutput = movieOutput
// 設(shè)置寫入穩(wěn)定性(不做這一步可能會丟幀)
let connection = movieOutput.connection(withMediaType: AVMediaTypeVideo)
connection?.preferredVideoStabilizationMode = .auto
// 4.設(shè)置預(yù)覽圖層
// 5.開始采集
// 6.將采集到的畫面寫入到文件中
let path = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true).first! + "/test.mp4"
let url = URL(fileURLWithPath: path)
movieOutput.startRecording(toOutputFileURL: url, recordingDelegate: self)
}
- 停止寫入
@IBAction func stopCapture() {
// 停止寫入
movieOutput?.stopRecording()
print("停止寫入")
// 停止采集
session.stopRunning()
previewLayer.removeFromSuperlayer()
print("停止采集")
}
- 遵守代理
extension ViewController : AVCaptureFileOutputRecordingDelegate {
func capture(_ captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAt fileURL: URL!, fromConnections connections: [Any]!) {
print("開始寫入文件")
}
func capture(_ captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAt outputFileURL: URL!, fromConnections connections: [Any]!, error: Error!) {
print("結(jié)束寫入文件")
}
}
這樣就完成了視頻的采集,并將視頻寫入了沙盒。
- 我好像發(fā)了了簡書的一個 bug,這篇文章寫的時候?yàn)g覽器崩了好幾次,后來時用 macdown 寫完粘貼的,好像是大段代碼后再寫 * 某某某 ,就閃退了。不知道你們出現(xiàn)過這種情況么,我用的是 Chrome??