IOS自定義相機總結

自定義相機分一下幾個步驟

1,判斷當前相機設備是否可用與是否授權

2,自定義相機的相關參數

3,相機切換與閃光燈

4,拍照處理

授權及設備判斷

1,攝像頭是否可用

//相機是否可用
func isCameraAvailable() -> Bool {
    return UIImagePickerController.isSourceTypeAvailable(UIImagePickerControllerSourceType.camera)
}
//前置攝像頭是否可用
func isFrontCameraAvailable() -> Bool {
    return UIImagePickerController.isCameraDeviceAvailable(UIImagePickerControllerCameraDevice.front)
}
//后置攝像頭是否可用
func isBackCameraAvailable() -> Bool {
    return UIImagePickerController.isCameraDeviceAvailable(UIImagePickerControllerCameraDevice.rear)
}

2,用戶是否授權

   //判斷相機是否授權
    func isCanUseCamera()->Bool{
        let status = AVCaptureDevice.authorizationStatus(for: AVMediaType.video)
        if status == AVAuthorizationStatus.authorized {
            return true
        }
        return false
    }

相機參數配置

1,基礎配置

    //設備
    device = AVCaptureDevice.default(for: AVMediaType.video)
    //輸入源
    input = try! AVCaptureDeviceInput.init(device: device)
    //輸出
    output = AVCaptureStillImageOutput.init();
    //會話
    session = AVCaptureSession.init()

    if (session.canAddInput(input)) {
        session.addInput(input)
    }
    if session.canAddOutput(output) {
        session.addOutput(output)
    }
    let layer = AVCaptureVideoPreviewLayer.init(session: session)
    
    session .startRunning()

2,可選配置

    if session .canSetSessionPreset(AVCaptureSession.Preset.photo) {
    //該項用來設置輸出圖像的質量
        session.sessionPreset = AVCaptureSession.Preset.photo
    }


    try! device.lockForConfiguration()  //鎖住設備

    if device.isFlashModeSupported(AVCaptureDevice.FlashMode.auto) {
    //設置閃光燈樣式
        device.flashMode = AVCaptureDevice.FlashMode.auto
    }
    
    if device.isWhiteBalanceModeSupported(AVCaptureDevice.WhiteBalanceMode.autoWhiteBalance) {
    //設置白平衡樣式
        device.whiteBalanceMode = AVCaptureDevice.WhiteBalanceMode.autoWhiteBalance
    }
    //解鎖設備
    device.unlockForConfiguration()

拍攝

func takePhoto(){
    let connection = output.connection(with: AVMediaType.video)
    if connection == nil {
        print("拍攝失敗")
        return
    }
    output.captureStillImageAsynchronously(from: connection!) { (buffer, error) in
        let data = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(buffer!)

    }
}

實時濾鏡相機

要實現實時濾鏡效果,則需要獲得相機捕獲的每一幀,并進行加濾鏡的操作

1,改變輸出源頭

    output = AVCaptureVideoDataOutput.init()
    //設置代理與回調隊列
    output.setSampleBufferDelegate(self, queue: queue)
    //設置回調獲得的圖像參數(這里設置為32位BGR格式)還可以設置寬高等等
    output.videoSettings = [kCVPixelBufferPixelFormatTypeKey as String:NSNumber.init(value: kCVPixelFormatType_32BGRA)]

2,回調代理方法

func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
    //這里獲得當前幀的圖像 可以對其進行加工展示 實現 實時濾鏡的效果(在這里我使用的GPUImage2的濾鏡)
    let im = self.imageFromSampleBuffer(sampleBuffer: sampleBuffer)
    // 創建圖片輸入
    let brightnessAdjustment = BrightnessAdjustment()
    brightnessAdjustment.brightness = 0.2
    let pictureInput = PictureInput(image: im)
    // 創建圖片輸出
    let pictureOutput = PictureOutput()
    // 給閉包賦值
    pictureOutput.imageAvailableCallback = { image in
        // 這里的image是處理完的數據,UIImage類型
        OperationQueue.main.addOperation {

            self.imv.image = image.imageRotatedByDegrees(degrees: 90, flip: false)
        }
    }
    // 綁定處理鏈
    pictureInput --> brightnessAdjustment --> pictureOutput
    // 開始處理 synchronously: true 同步執行 false 異步執行,處理完畢后會調用imageAvailableCallback這個閉包
    pictureInput.processImage(synchronously: true)

}

補充buffer轉換為UIImage 和 UIImage進行旋轉(因為得到處理的圖片需要旋轉才正確)的方法 (代碼為Swift4.0版本)

extension UIImage {
    //  false為旋轉(面向圖片順時針) true為逆時針
    public func imageRotatedByDegrees(degrees: CGFloat, flip: Bool) -> UIImage {
        let radiansToDegrees: (CGFloat) -> CGFloat = {
            return $0 * (180.0 / CGFloat(M_PI))
        }
        let degreesToRadians: (CGFloat) -> CGFloat = {
            return $0 / 180.0 * CGFloat(M_PI)
        }

        // calculate the size of the rotated view's containing box for our drawing space
        let rotatedViewBox = UIView(frame: CGRect(origin: CGPoint.zero, size: size))
        let t = CGAffineTransform(rotationAngle: degreesToRadians(degrees));
        rotatedViewBox.transform = t
        let rotatedSize = rotatedViewBox.frame.size

        // Create the bitmap context
        UIGraphicsBeginImageContext(rotatedSize)
        let bitmap = UIGraphicsGetCurrentContext()

        // Move the origin to the middle of the image so we will rotate and scale around the center.
        bitmap?.translateBy(x: rotatedSize.width / 2.0, y: rotatedSize.height / 2.0)
        //   // Rotate the image context
        bitmap?.rotate(by: degreesToRadians(degrees))

        // Now, draw the rotated/scaled image into the context
        var yFlip: CGFloat

        if(flip){
            yFlip = CGFloat(-1.0)
        } else {
            yFlip = CGFloat(1.0)
        }
        bitmap?.scaleBy(x: yFlip, y: -1.0)
        bitmap?.draw(self.cgImage!, in: CGRect.init(x: -size.width / 2, y: -size.height / 2, width: size.width, height: size.height))

        let newImage = UIGraphicsGetImageFromCurrentImageContext()
        UIGraphicsEndImageContext()

        return newImage!
    }
}
func imageFromSampleBuffer(sampleBuffer : CMSampleBuffer) -> UIImage
{
    // Get a CMSampleBuffer's Core Video image buffer for the media data
    let  imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
    // Lock the base address of the pixel buffer
    CVPixelBufferLockBaseAddress(imageBuffer!, CVPixelBufferLockFlags.readOnly);


    // Get the number of bytes per row for the pixel buffer
    let baseAddress = CVPixelBufferGetBaseAddress(imageBuffer!);

    // Get the number of bytes per row for the pixel buffer
    let bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer!);
    // Get the pixel buffer width and height
    let width = CVPixelBufferGetWidth(imageBuffer!);
    let height = CVPixelBufferGetHeight(imageBuffer!);

    // Create a device-dependent RGB color space
    let colorSpace = CGColorSpaceCreateDeviceRGB();

    // Create a bitmap graphics context with the sample buffer data
    var bitmapInfo: UInt32 = CGBitmapInfo.byteOrder32Little.rawValue
    bitmapInfo |= CGImageAlphaInfo.premultipliedFirst.rawValue & CGBitmapInfo.alphaInfoMask.rawValue
    //let bitmapInfo: UInt32 = CGBitmapInfo.alphaInfoMask.rawValue
    let context = CGContext.init(data: baseAddress, width: width, height: height, bitsPerComponent: 8, bytesPerRow: bytesPerRow, space: colorSpace, bitmapInfo: bitmapInfo)
    // Create a Quartz image from the pixel data in the bitmap graphics context
    let quartzImage = context?.makeImage();
    // Unlock the pixel buffer
    CVPixelBufferUnlockBaseAddress(imageBuffer!, CVPixelBufferLockFlags.readOnly);

    // Create an image object from the Quartz image
    let image = UIImage.init(cgImage: quartzImage!);

    return image
}
最后編輯于
?著作權歸作者所有,轉載或內容合作請聯系作者
平臺聲明:文章內容(如有圖片或視頻亦包括在內)由作者上傳并發布,文章內容僅代表作者本人觀點,簡書系信息發布平臺,僅提供信息存儲服務。

推薦閱讀更多精彩內容

  • 發現 關注 消息 iOS 第三方庫、插件、知名博客總結 作者大灰狼的小綿羊哥哥關注 2017.06.26 09:4...
    肇東周閱讀 12,223評論 4 61
  • AVFoundation 相關類 AVFoundation 框架基于以下幾個類實現圖像捕捉 ,通過這些類可以訪問來...
    coderST閱讀 3,761評論 0 10
  • 引言數據鏈路層是TCP/IP中的最底層,負責幫助ARP和IP協議發送數據和將收到的數據傳送給這兩個協議。 數據鏈路...
    iamc閱讀 552評論 0 0
  • 在圖書館整理著筆記,媽媽的電話突然打過來。要知道,我媽媽可是不輕易打電話的人吶,肯定不是問吃飽喝足了沒,應該是有什...
    如若_邊閱讀 270評論 2 2
  • 五月五日是端陽,艾葉香來飲雄黃。 萬門閉戶卷蘆葦,齊舟迸進唱國殤。 懷王一恨悲千古,屈子忠良萬世芳。 心揣情意思遠...
    悅竹弄藻閱讀 147評論 0 2