ios 從采集到gpuimage,再到人臉定位

//按照BGRA采集,轉(zhuǎn)換為RGB24,送入gpuimage

BGRA_TO_RGB24(baseAddress,tempCaptureCapability.width*tempCaptureCapability.height*4);

//此處為gpuimage的處理,輸出為ARGB

addfilter_on_rawdata(baseAddress,tempCaptureCapability.width,tempCaptureCapability.height,temp_argb);

//ARGB轉(zhuǎn)為RGBA

ARGB_TO_RGBA(temp_argb,tempCaptureCapability.width*tempCaptureCapability.height*4,temp_rgba);

//此處由RGBA轉(zhuǎn)為UIImage,參照另外一篇博文

UIImage *imgJPeg = [self convertBitmapRGBA8ToUIImage:temp_rgba withWidth:tempCaptureCapability.width? withHeight:tempCaptureCapability.height];

//此處為CoreImage framework內(nèi)容

CIImage* cgImage = [CIImage imageWithCGImage:imgJPeg.CGImage];

NSDictionary? *opts = [NSDictionary dictionaryWithObject:CIDetectorAccuracyHigh

forKey:CIDetectorAccuracy];

CIDetector* detector = [CIDetector detectorOfType:CIDetectorTypeFace

context:nil

options:opts];

//得到面部數(shù)據(jù)

NSArray* features = [detector featuresInImage:cgImage];

CIFaceFeature *feature = nil;

CGRect rect;

for (CIFaceFeature *f in features)

{

CGRect aRect = f.bounds;

NSLog(@"%f, %f, %f, %f", aRect.origin.x, aRect.origin.y, aRect.size.width, aRect.size.height);

//眼睛和嘴的位置

if(f.hasLeftEyePosition) NSLog(@"Left eye %g %g\n", f.leftEyePosition.x, f.leftEyePosition.y);

if(f.hasRightEyePosition) NSLog(@"Right eye %g %g\n", f.rightEyePosition.x, f.rightEyePosition.y);

if(f.hasMouthPosition)

{

NSLog(@"Mouth %g %g %g %g\n", f.mouthPosition.x, f.mouthPosition.y,f.bounds.size.height,f.bounds.size.width);

feature = f;

rect = CGRectMake(f.mouthPosition.x - 100 , f.mouthPosition.y? - 60, 200, 250);

}

}

最后編輯于
?著作權(quán)歸作者所有,轉(zhuǎn)載或內(nèi)容合作請聯(lián)系作者
平臺聲明:文章內(nèi)容(如有圖片或視頻亦包括在內(nèi))由作者上傳并發(fā)布,文章內(nèi)容僅代表作者本人觀點(diǎn),簡書系信息發(fā)布平臺,僅提供信息存儲服務(wù)。

推薦閱讀更多精彩內(nèi)容