频道栏目
首页 > 资讯 > IOS > 正文

iOS Example:SquareCam分析

13-04-01        来源:[db:作者]  
收藏   我要投稿

[cpp] 
// used for KVO observation of the @"capturingStillImage" property to perform flash bulb animation  
static const NSString *AVCaptureStillImageIsCapturingStillImageContext = @"AVCaptureStillImageIsCapturingStillImageContext"; 
 
static CGFloat DegreesToRadians(CGFloat degrees) {return degrees * M_PI / 180;}; 
 
static void ReleaseCVPixelBuffer(void *pixel, const void *data, size_t size); 
static void ReleaseCVPixelBuffer(void *pixel, const void *data, size_t size)  

    /**
     CVPixelBuffer(core video pixel buffer): 指的是主内存中的图片缓存,用来保存图片像素数据。应用程序在产生图片帧、解压缩视频数据或调用Core Image的时候可以调用此对象
     **/ 
    CVPixelBufferRef pixelBuffer = (CVPixelBufferRef)pixel; 
    CVPixelBufferUnlockBaseAddress( pixelBuffer, 0 ); 
    CVPixelBufferRelease( pixelBuffer ); 

 
 
//用给定的pixel buffer创建CGImage对象, pixel buffer必须为未压缩的kCVPixelFormatType_32ARGB 或者 kCVPixelFormatType_32BGRA  
static OSStatus CreateCGImageFromCVPixelBuffer(CVPixelBufferRef pixelBuffer, CGImageRef *imageOut); 
static OSStatus CreateCGImageFromCVPixelBuffer(CVPixelBufferRef pixelBuffer, CGImageRef *imageOut)  
{    
    OSStatus err = noErr; 
    OSType sourcePixelFormat; 
    size_t width, height, sourceRowBytes; 
    void *sourceBaseAddr = NULL; 
    CGBitmapInfo bitmapInfo; 
    CGColorSpaceRef colorspace = NULL; 
     
    //一些Quartz程序直接提供block数据给程序,而不是从内存中读取,CGDataProviderRef允许用户以这种方式提供给Quartz数据。(等于是Quartz定义的一种数据格式)  
    CGDataProviderRef provider = NULL; 
    CGImageRef image = NULL; 
     
    sourcePixelFormat = CVPixelBufferGetPixelFormatType( pixelBuffer ); 
    if ( kCVPixelFormatType_32ARGB == sourcePixelFormat ) 
        bitmapInfo = kCGBitmapByteOrder32Big | kCGImageAlphaNoneSkipFirst; 
    else if ( kCVPixelFormatType_32BGRA == sourcePixelFormat ) 
        bitmapInfo = kCGBitmapByteOrder32Little | kCGImageAlphaNoneSkipFirst; 
    else 
        return -95014; // only uncompressed pixel formats  
     
     
    //获取pixel buffer中数据属性  
    sourceRowBytes = CVPixelBufferGetBytesPerRow( pixelBuffer ); 
    width = CVPixelBufferGetWidth( pixelBuffer ); 
    height = CVPixelBufferGetHeight( pixelBuffer ); 
     
    CVPixelBufferLockBaseAddress( pixelBuffer, 0 ); 
     
    //获取pixel buffer的其实地址?直接内存操作了啊? 靠  
    sourceBaseAddr = CVPixelBufferGetBaseAddress( pixelBuffer ); 
     
    colorspace = CGColorSpaceCreateDeviceRGB(); 
     
    CVPixelBufferRetain( pixelBuffer ); 
     
    //将Data转换为Quartz直接访问的数据类型,目测其逻辑为制定数据内容,给定起始地址、长度等信息。  
    provider = CGDataProviderCreateWithData( (void *)pixelBuffer, sourceBaseAddr, sourceRowBytes * height, ReleaseCVPixelBuffer); 
    image = CGImageCreate(width, height, 8, 32, sourceRowBytes, colorspace, bitmapInfo, provider, NULL, true, kCGRenderingIntentDefault); 
     
bail: 
    if ( err && image ) { 
        CGImageRelease( image ); 
        image = NULL; 
    } 
    if ( provider ) CGDataProviderRelease( provider ); 
    if ( colorspace ) CGColorSpaceRelease( colorspace ); 
    *imageOut = image; 
    return err; 

 
// utility used by newSquareOverlayedImageForFeatures for   
static CGContextRef CreateCGBitmapContextForSize(CGSize size); 
static CGContextRef CreateCGBitmapContextForSize(CGSize size) 

    CGContextRef    context = NULL; 
    CGColorSpaceRef colorSpace;   //  
    int             bitmapBytesPerRow; 
     
    bitmapBytesPerRow = (size.width * 4); 
     
    colorSpace = CGColorSpaceCreateDeviceRGB(); 
    context = CGBitmapContextCreate (NULL, 
                                     size.width, 
                                     size.height, 
                                     8,      // bits per component  
                                     bitmapBytesPerRow, 
                                     colorSpace, 
                                     kCGImageAlphaPremultipliedLast); 
    CGContextSetAllowsAntialiasing(context, NO); 
    CGColorSpaceRelease( colorSpace ); 
    return context; 

 
#pragma mark-  
 
@interface UIImage (RotationMethods) 
- (UIImage *)imageRotatedByDegrees:(CGFloat)degrees; 
@end 
 
@implementation UIImage (RotationMethods) 
 
- (UIImage *)imageRotatedByDegrees:(CGFloat)degrees 
{    
    // calculate the size of the rotated view's containing box for our drawing space  
    UIView *rotatedViewBox = [[UIView alloc] initWithFrame:CGRectMake(0,0,self.size.width, self.size.height)]; 
     
    //CGAffineTransform 结构体用于旋转一个坐标系统,  
    CGAffineTransform t = CGAffineTransformMakeRotation(DegreesToRadians(degrees)); 
    rotatedViewBox.transform = t; 
    CGSize rotatedSize = rotatedViewBox.frame.size; 
    [rotatedViewBox release]; 
     
    // Create the bitmap context  
    UIGraphicsBeginImageContext(rotatedSize); //创建一个bitmap类型的graphic context,并将其置为当前context  
    CGContextRef bitmap = UIGraphicsGetCurrentContext();  
     
    // Move the origin to the middle of the image so we will rotate and scale around the center.  
    CGContextTranslateCTM(bitmap, rotatedSize.width/2, rotatedSize.height/2); 
     
    //   // Rotate the image context  
    CGContextRotateCTM(bitmap, DegreesToRadians(degrees)); 
     
    // Now, draw the rotated/scaled image into the context  
    CGContextScaleCTM(bitmap, 1.0, -1.0); 
    CGContextDrawImage(bitmap, CGRectMake(-self.size.width / 2, -self.size.height / 2, self.size.width, self.size.height), [self CGImage]); 
     
    UIImage *newImage = UIGraphicsGetImageFromCurrentImageContext(); 
    UIGraphicsEndImageContext(); 
    return newImage; 
     

 
@end 
 
#pragma mark-  
 
@interface SquareCamViewController (InternalMethods) 
- (void)setupAVCapture; 
- (void)teardownAVCapture; 
- (void)drawFaceBoxesForFeatures:(NSArray *)features forVideoBox:(CGRect)clap orientation:(UIDeviceOrientation)orientation; 
@end 
 
@implementation SquareCamViewController 
 
- (void)setupAVCapture 

    NSError *error = nil; 
     
    AVCaptureSession *session = [AVCaptureSession new]; 
    //UIDevice是一个单例对象,代表当前设备,可以获取设备信息,比如说分配的名字,设备型号,OS名字,版本号等  
    if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPhone) 
        [session setSessionPreset:AVCaptureSessionPreset640x480]; 
    else 
        [session setSessionPreset:AVCaptureSessionPresetPhoto]; 
     
    // Select a video device, make an input  
    AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; 
    AVCaptureDeviceInput *deviceInput = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error]; 
    require( error == nil, bail ); 
     
    isUsingFrontFacingCamera = NO; 
    if ( [session canAddInput:deviceInput] ) 
        [session addInput:deviceInput]; 
     
    //AVCaptureStillImageOutput对象,AVCaptureOutput的子类,用来捕捉高质量的持续图片  
    stillImageOutput = [AVCaptureStillImageOutput new]; 
    [stillImageOutput addObserver:self forKeyPath:@"capturingStillImage" options:NSKeyValueObservingOptionNew context:AVCaptureStillImageIsCapturingStillImageContext]; 
    if ( [session canAddOutput:stillImageOutput] ) 
        [session addOutput:stillImageOutput]; 
     
    // Make a video data output  
    videoDataOutput = [AVCaptureVideoDataOutput new]; 
     
    // we want BGRA, both CoreGraphics and OpenGL work well with 'BGRA'  
    NSDictionary *rgbOutputSettings = [NSDictionary dictionaryWithObject: 
                                       [NSNumber numberWithInt:kCMPixelFormat_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey]; 
    [videoDataOutput setVideoSettings:rgbOutputSettings]; 
    [videoDataOutput setAlwaysDiscardsLateVideoFrames:YES]; // discard if the data output queue is blocked (as we process the still image)  
  
    //dispatch_queue_t对象, 设置AVCaptureVideoDataOutput对SampleBuffer的处理为FIFO队列方式,这样没个帧到 captureOutput:didOutputSampleBuffer:fromConnection:里面处理的时候都会按照FIFO方式,并且可以为了处理最新帧,抛弃旧的帧,保证不会错乱顺序。  
    videoDataOutputQueue = dispatch_queue_create("VideoDataOutputQueue", DISPATCH_QUEUE_SERIAL); 
    [videoDataOutput setSampleBufferDelegate:self queue:videoDataOutputQueue]; 
     
    if ( [session canAddOutput:videoDataOutput] ) 
        [session addOutput:videoDataOutput]; 
    [[videoDataOutput connectionWithMediaType:AVMediaTypeVideo] setEnabled:NO]; 
     
    effectiveScale = 1.0; 
    //给定session中获取一个预览层  
    previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session]; 
    [previewLayer setBackgroundColor:[[UIColor blackColor] CGColor]]; 
    [previewLayer setVideoGravity:AVLayerVideoGravityResizeAspect]; 
    CALayer *rootLayer = [previewView layer]; 
    [rootLayer setMasksToBounds:YES]; 
    [previewLayer setFrame:[rootLayer bounds]]; 
    [rootLayer addSublayer:previewLayer]; //这里应该是将预览层加入到已有层的最上级吧,应该是 栈 方式添加。  
    [session startRunning]; 
     
     
//选择input设备失败  
bail: 
    [session release]; 
    if (error) { 
        UIAlertView *alertView = [[UIAlertView alloc] initWithTitle:[NSString stringWithFormat:@"Failed with error %d", (int)[error code]] 
                                                            message:[error localizedDescription] 
                                                           delegate:nil  
                                                  cancelButtonTitle:@"Dismiss"  
                                                  otherButtonTitles:nil]; 
        [alertView show]; 
        [alertView release]; 
        [self teardownAVCapture]; 
    } 

 
// clean up capture setup  
- (void)teardownAVCapture 

    [videoDataOutput release]; 
    if (videoDataOutputQueue) 
        dispatch_release(videoDataOutputQueue); 
    [stillImageOutput removeObserver:self forKeyPath:@"isCapturingStillImage"]; 
    [stillImageOutput release]; 
    [previewLayer removeFromSuperlayer]; 
    [previewLayer release]; 

 
// perform a flash bulb animation using KVO to monitor the value of the capturingStillImage property of the AVCaptureStillImageOutput class  
- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context 

    if ( context == AVCaptureStillImageIsCapturingStillImageContext ) { 
        BOOL isCapturingStillImage = [[change objectForKey:NSKeyValueChangeNewKey] boolValue]; 
         
        if ( isCapturingStillImage ) { 
            // do flash bulb like animation  
            flashView = [[UIView alloc] initWithFrame:[previewView frame]]; 
            [flashView setBackgroundColor:[UIColor whiteColor]]; 
            [flashView setAlpha:0.f]; 
            [[[self view] window] addSubview:flashView]; 
             
            [UIView animateWithDuration:.4f 
                             animations:^{ 
                                 [flashView setAlpha:1.f]; 
                             } 
             ]; 
        } 
        else { 
            [UIView animateWithDuration:.4f 
                             animations:^{ 
                                 [flashView setAlpha:0.f]; 
                             } 
                             completion:^(BOOL finished){ 
                                 [flashView removeFromSuperview]; 
                                 [flashView release]; 
                                 flashView = nil; 
                             } 
             ]; 
        } 
    } 

 
// utility routing used during image capture to set up capture orientation  
- (AVCaptureVideoOrientation)avOrientationForDeviceOrientation:(UIDeviceOrientation)deviceOrientation 

    AVCaptureVideoOrientation result = deviceOrientation; 
    if ( deviceOrientation == UIDeviceOrientationLandscapeLeft ) 
        result = AVCaptureVideoOrientationLandscapeRight; 
    else if ( deviceOrientation == UIDeviceOrientationLandscapeRight ) 
        result = AVCaptureVideoOrientationLandscapeLeft; 
    return result; 

 
// utility routine to create a new image with the red square overlay with appropriate orientation  
// and return the new composited image which can be saved to the camera roll  
- (CGImageRef)newSquareOverlayedImageForFeatures:(NSArray *)features  
                                            inCGImage:(CGImageRef)backgroundImage  
                                      withOrientation:(UIDeviceOrientation)orientation  
                                          frontFacing:(BOOL)isFrontFacing 

    CGImageRef returnImage = NULL; 
    CGRect backgroundImageRect = CGRectMake(0., 0., CGImageGetWidth(backgroundImage), CGImageGetHeight(backgroundImage)); 
    CGContextRef bitmapContext = CreateCGBitmapContextForSize(backgroundImageRect.size); 
    CGContextClearRect(bitmapContext, backgroundImageRect); 
    CGContextDrawImage(bitmapContext, backgroundImageRect, backgroundImage); 
    CGFloat rotationDegrees = 0.; 
     
    switch (orientation) { 
        case UIDeviceOrientationPortrait: 
            rotationDegrees = -90.; 
            break; 
        case UIDeviceOrientationPortraitUpsideDown: 
            rotationDegrees = 90.; 
            break; 
        case UIDeviceOrientationLandscapeLeft: 
            if (isFrontFacing) rotationDegrees = 180.; 
            else rotationDegrees = 0.; 
            break; 
        case UIDeviceOrientationLandscapeRight: 
            if (isFrontFacing) rotationDegrees = 0.; 
            else rotationDegrees = 180.; 
            break; 
        case UIDeviceOrientationFaceUp: 
        case UIDeviceOrientationFaceDown: 
        default: 
            break; // leave the layer in its last known orientation  
    } 
    UIImage *rotatedSquareImage = [square imageRotatedByDegrees:rotationDegrees]; 
     
    // features found by the face detector  
    for ( CIFaceFeature *ff in features ) { 
        CGRect faceRect = [ff bounds]; 
        CGContextDrawImage(bitmapContext, faceRect, [rotatedSquareImage CGImage]); 
    } 
    returnImage = CGBitmapContextCreateImage(bitmapContext); 
    CGContextRelease (bitmapContext); 
     
    return returnImage; 

 
// utility routine used after taking a still image to write the resulting image to the camera roll  
- (BOOL)writeCGImageToCameraRoll:(CGImageRef)cgImage withMetadata:(NSDictionary *)metadata 

    CFMutableDataRef destinationData = CFDataCreateMutable(kCFAllocatorDefault, 0); 
    CGImageDestinationRef destination = CGImageDestinationCreateWithData(destinationData,  
                                                                         CFSTR("public.jpeg"),  
                                                                         1,  
                                                                         NULL); 
    BOOL success = (destination != NULL); 
    require(success, bail); 
 
    const float JPEGCompQuality = 0.85f; // JPEGHigherQuality  
    CFMutableDictionaryRef optionsDict = NULL; 
    CFNumberRef qualityNum = NULL; 
     
    qualityNum = CFNumberCreate(0, kCFNumberFloatType, &JPEGCompQuality);     
    if ( qualityNum ) { 
        optionsDict = CFDictionaryCreateMutable(0, 0, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks); 
        if ( optionsDict ) 
            CFDictionarySetValue(optionsDict, kCGImageDestinationLossyCompressionQuality, qualityNum); 
        CFRelease( qualityNum ); 
    } 
     
    CGImageDestinationAddImage( destination, cgImage, optionsDict ); 
    success = CGImageDestinationFinalize( destination ); 
 
    if ( optionsDict ) 
        CFRelease(optionsDict); 
     
    require(success, bail); 
     
    CFRetain(destinationData); 
    ALAssetsLibrary *library = [ALAssetsLibrary new]; 
    [library writeImageDataToSavedPhotosAlbum:(id)destinationData metadata:metadata completionBlock:^(NSURL *assetURL, NSError *error) { 
        if (destinationData) 
            CFRelease(destinationData); 
    }]; 
    [library release]; 
 
//go to语句,require(success, bail);中的success如果为否就跳转到这里  
bail: 
    if (destinationData) 
        CFRelease(destinationData); 
    if (destination) 
        CFRelease(destination); 
    return success; 

 
// utility routine to display error aleart if takePicture fails  
- (void)displayErrorOnMainQueue:(NSError *)error withMessage:(NSString *)message 

    dispatch_async(dispatch_get_main_queue(), ^(void) { 
        UIAlertView *alertView = [[UIAlertView alloc] initWithTitle:[NSString stringWithFormat:@"%@ (%d)", message, (int)[error code]] 
                                                            message:[error localizedDescription] 
                                                           delegate:nil  
                                                  cancelButtonTitle:@"Dismiss"  
                                                  otherButtonTitles:nil]; 
        [alertView show]; 
        [alertView release]; 
    }); 

 
// turn on/off face detection  
- (IBAction)toggleFaceDetection:(id)sender 

    detectFaces = [(UISwitch *)sender isOn]; 
    [[videoDataOutput connectionWithMediaType:AVMediaTypeVideo] setEnabled:detectFaces]; 
    if (!detectFaces) { 
        dispatch_async(dispatch_get_main_queue(), ^(void) { 
            // clear out any squares currently displaying.  
            [self drawFaceBoxesForFeatures:[NSArray array] forVideoBox:CGRectZero orientation:UIDeviceOrientationPortrait]; 
        }); 
    } 

 
// find where the video box is positioned within the preview layer based on the video size and gravity  
+ (CGRect)videoPreviewBoxForGravity:(NSString *)gravity frameSize:(CGSize)frameSize apertureSize:(CGSize)apertureSize 

    CGFloat apertureRatio = apertureSize.height / apertureSize.width; 
    CGFloat viewRatio = frameSize.width / frameSize.height; 
     
    CGSize size = CGSizeZero; 
    if ([gravity isEqualToString:AVLayerVideoGravityResizeAspectFill]) { 
        if (viewRatio > apertureRatio) { 
            size.width = frameSize.width; 
            size.height = apertureSize.width * (frameSize.width / apertureSize.height); 
        } else { 
            size.width = apertureSize.height * (frameSize.height / apertureSize.width); 
            size.height = frameSize.height; 
        } 
    } else if ([gravity isEqualToString:AVLayerVideoGravityResizeAspect]) { 
        if (viewRatio > apertureRatio) { 
            size.width = apertureSize.height * (frameSize.height / apertureSize.width); 
            size.height = frameSize.height; 
        } else { 
            size.width = frameSize.width; 
            size.height = apertureSize.width * (frameSize.width / apertureSize.height); 
        } 
    } else if ([gravity isEqualToString:AVLayerVideoGravityResize]) { 
        size.width = frameSize.width; 
        size.height = frameSize.height; 
    } 
     
    CGRect videoBox; 
    videoBox.size = size; 
    if (size.width < frameSize.width) 
        videoBox.origin.x = (frameSize.width - size.width) / 2; 
    else 
        videoBox.origin.x = (size.width - frameSize.width) / 2; 
     
    if ( size.height < frameSize.height ) 
        videoBox.origin.y = (frameSize.height - size.height) / 2; 
    else 
        videoBox.origin.y = (size.height - frameSize.height) / 2; 
     
    return videoBox; 

 
// called asynchronously as the capture output is capturing sample buffers, this method asks the face detector (if on)  
// to detect features and for each draw the red square in a layer and set appropriate orientation  
- (void)drawFaceBoxesForFeatures:(NSArray *)features forVideoBox:(CGRect)clap orientation:(UIDeviceOrientation)orientation 

    NSArray *sublayers = [NSArray arrayWithArray:[previewLayer sublayers]]; 
    NSInteger sublayersCount = [sublayers count], currentSublayer = 0; 
    NSInteger featuresCount = [features count], currentFeature = 0; 
     
    //CATransaction, core animation对象,用于批处理将layer树原子更新到渲染树(render tree),对layer tree的每隔一个操作必须是transaction的一部分,同时支持嵌入事物(transaction)  
    [CATransaction begin]; 
     
    //kCFBooleanTrue是将boolean对象打包成CF属性用于core fundation, core foundation是一个提供基础软件服务的框架,用于应用服务, 应用环境 和应用本身。同时用于提供common data type抽象, XML属性列表, URL资源访问和偏好(iOS整个就是各种框架攒起来的,结构性非常好啊)  
    [CATransaction setValue:(id)kCFBooleanTrue forKey:kCATransactionDisableActions]; 
     
    // hide all the face layers  
    for ( CALayer *layer in sublayers ) { 
        if ( [[layer name] isEqualToString:@"FaceLayer"] ) 
            [layer setHidden:YES]; 
    }    
     
    if ( featuresCount == 0 || !detectFaces ) { 
         
        [CATransaction commit]; 
        return; // early bail.  
    } 
         
    CGSize parentFrameSize = [previewView frame].size; 
    NSString *gravity = [previewLayer videoGravity]; 
    BOOL isMirrored = [previewLayer isMirrored]; 
    CGRect previewBox = [SquareCamViewController videoPreviewBoxForGravity:gravity  
                                                               frameSize:parentFrameSize  
                                                            apertureSize:clap.size]; 
     
    for ( CIFaceFeature *ff in features ) { 
        // find the correct position for the square layer within the previewLayer  
        // the feature box originates in the bottom left of the video frame.  
        // (Bottom right if mirroring is turned on)  
        CGRect faceRect = [ff bounds]; 
 
        // flip preview width and height  
        CGFloat temp = faceRect.size.width; 
        faceRect.size.width = faceRect.size.height; 
        faceRect.size.height = temp; 
        temp = faceRect.origin.x; 
        faceRect.origin.x = faceRect.origin.y; 
        faceRect.origin.y = temp; 
        // scale coordinates so they fit in the preview box, which may be scaled  
        CGFloat widthScaleBy = previewBox.size.width / clap.size.height; 
        CGFloat heightScaleBy = previewBox.size.height / clap.size.width; 
        faceRect.size.width *= widthScaleBy; 
        faceRect.size.height *= heightScaleBy; 
        faceRect.origin.x *= widthScaleBy; 
        faceRect.origin.y *= heightScaleBy; 
 
        if ( isMirrored ) 
            faceRect = CGRectOffset(faceRect, previewBox.origin.x + previewBox.size.width - faceRect.size.width - (faceRect.origin.x * 2), previewBox.origin.y); 
        else 
            faceRect = CGRectOffset(faceRect, previewBox.origin.x, previewBox.origin.y); 
         
        CALayer *featureLayer = nil; 
         
        // re-use an existing layer if possible  
        while ( !featureLayer && (currentSublayer < sublayersCount) ) { 
            CALayer *currentLayer = [sublayers objectAtIndex:currentSublayer++]; 
            if ( [[currentLayer name] isEqualToString:@"FaceLayer"] ) { 
                featureLayer = currentLayer; 
                [currentLayer setHidden:NO]; 
            } 
        } 
         
        // create a new one if necessary  
        if ( !featureLayer ) { 
            featureLayer = [CALayer new]; 
            [featureLayer setContents:(id)[square CGImage]]; 
            [featureLayer setName:@"FaceLayer"]; 
            [previewLayer addSublayer:featureLayer]; 
            [featureLayer release]; 
        } 
        [featureLayer setFrame:faceRect]; 
         
        switch (orientation) { 
            case UIDeviceOrientationPortrait: 
                [featureLayer setAffineTransform:CGAffineTransformMakeRotation(DegreesToRadians(0.))]; 
                break; 
            case UIDeviceOrientationPortraitUpsideDown: 
                [featureLayer setAffineTransform:CGAffineTransformMakeRotation(DegreesToRadians(180.))]; 
                break; 
            case UIDeviceOrientationLandscapeLeft: 
                [featureLayer setAffineTransform:CGAffineTransformMakeRotation(DegreesToRadians(90.))]; 
                break; 
            case UIDeviceOrientationLandscapeRight: 
                [featureLayer setAffineTransform:CGAffineTransformMakeRotation(DegreesToRadians(-90.))]; 
                break; 
            case UIDeviceOrientationFaceUp: 
            case UIDeviceOrientationFaceDown: 
            default: 
                break; // leave the layer in its last known orientation  
        } 
        currentFeature++; 
    } 
     
    [CATransaction commit]; 

 
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection 
{    
    // 实时处理带监测的图片  
    CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); 
 
    //CMAttachmentBearer是一个基于CF的对象,支持键/值/模式 附件API。 任何CF对象都可以添加到CMAttachmentBearer对象,来存储额外信息。  
    CFDictionaryRef attachments = CMCopyDictionaryOfAttachments(kCFAllocatorDefault, sampleBuffer, kCMAttachmentMode_ShouldPropagate); 
    CIImage *ciImage = [[CIImage alloc] initWithCVPixelBuffer:pixelBuffer options:(NSDictionary *)attachments]; 
    if (attachments) 
        CFRelease(attachments); 
    NSDictionary *imageOptions = nil; 
    UIDeviceOrientation curDeviceOrientation = [[UIDevice currentDevice] orientation]; 
    int exifOrientation; 
     
    /* kCGImagePropertyOrientation values
        The intended display orientation of the image. If present, this key is a CFNumber value with the same value as defined
        by the TIFF and EXIF specifications -- see enumeration of integer constants. 
        The value specified where the origin (0,0) of the image is located. If not present, a value of 1 is assumed.
        
        used when calling featuresInImage: options: The value for this key is an integer NSNumber from 1..8 as found in kCGImagePropertyOrientation.
        If present, the detection will be done based on that orientation but the coordinates in the returned features will still be based on those of the image. */ 
         
    enum { 
        PHOTOS_EXIF_0ROW_TOP_0COL_LEFT          = 1, //   1  =  0th row is at the top, and 0th column is on the left (THE DEFAULT).  
        PHOTOS_EXIF_0ROW_TOP_0COL_RIGHT         = 2, //   2  =  0th row is at the top, and 0th column is on the right.    
        PHOTOS_EXIF_0ROW_BOTTOM_0COL_RIGHT      = 3, //   3  =  0th row is at the bottom, and 0th column is on the right.    
        PHOTOS_EXIF_0ROW_BOTTOM_0COL_LEFT       = 4, //   4  =  0th row is at the bottom, and 0th column is on the left.    
        PHOTOS_EXIF_0ROW_LEFT_0COL_TOP          = 5, //   5  =  0th row is on the left, and 0th column is the top.    
        PHOTOS_EXIF_0ROW_RIGHT_0COL_TOP         = 6, //   6  =  0th row is on the right, and 0th column is the top.    
        PHOTOS_EXIF_0ROW_RIGHT_0COL_BOTTOM      = 7, //   7  =  0th row is on the right, and 0th column is the bottom.    
        PHOTOS_EXIF_0ROW_LEFT_0COL_BOTTOM       = 8  //   8  =  0th row is on the left, and 0th column is the bottom.    
    }; 
     
    switch (curDeviceOrientation) { 
        case UIDeviceOrientationPortraitUpsideDown:  // Device oriented vertically, home button on the top  
            exifOrientation = PHOTOS_EXIF_0ROW_LEFT_0COL_BOTTOM; 
            break; 
        case UIDeviceOrientationLandscapeLeft:       // Device oriented horizontally, home button on the right  
            if (isUsingFrontFacingCamera) 
                exifOrientation = PHOTOS_EXIF_0ROW_BOTTOM_0COL_RIGHT; 
            else 
                exifOrientation = PHOTOS_EXIF_0ROW_TOP_0COL_LEFT; 
            break; 
        case UIDeviceOrientationLandscapeRight:      // Device oriented horizontally, home button on the left  
            if (isUsingFrontFacingCamera) 
                exifOrientation = PHOTOS_EXIF_0ROW_TOP_0COL_LEFT; 
            else 
                exifOrientation = PHOTOS_EXIF_0ROW_BOTTOM_0COL_RIGHT; 
            break; 
        case UIDeviceOrientationPortrait:            // Device oriented vertically, home button on the bottom  
        default: 
            exifOrientation = PHOTOS_EXIF_0ROW_RIGHT_0COL_TOP; 
            break; 
    } 
 
    imageOptions = [NSDictionary dictionaryWithObject:[NSNumber numberWithInt:exifOrientation] forKey:CIDetectorImageOrientation]; 
     
    //监测人脸  
    NSArray *features = [faceDetector featuresInImage:ciImage options:imageOptions]; 
    [ciImage release]; 
     
    // get the clean aperture  
    // the clean aperture is a rectangle that defines the portion of the encoded pixel dimensions  
    // that represents image data valid for display.  
    CMFormatDescriptionRef fdesc = CMSampleBufferGetFormatDescription(sampleBuffer); 
    CGRect clap = CMVideoFormatDescriptionGetCleanAperture(fdesc, false /*originIsTopLeft == false*/); 
     
    dispatch_async(dispatch_get_main_queue(), ^(void) { 
        [self drawFaceBoxesForFeatures:features forVideoBox:clap orientation:curDeviceOrientation]; 
    }); 

 
- (void)dealloc 

    [self teardownAVCapture]; 
    [faceDetector release]; 
    [square release]; 
    [super dealloc]; 

 
// 前后摄像头切换  
- (IBAction)switchCameras:(id)sender 

    AVCaptureDevicePosition desiredPosition; 
    if (isUsingFrontFacingCamera) 
        desiredPosition = AVCaptureDevicePositionBack; 
    else 
        desiredPosition = AVCaptureDevicePositionFront; 
     
    for (AVCaptureDevice *d in [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) { 
        if ([d position] == desiredPosition) { 
            [[previewLayer session] beginConfiguration]; 
            AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:d error:nil]; 
            for (AVCaptureInput *oldInput in [[previewLayer session] inputs]) { 
                [[previewLayer session] removeInput:oldInput]; 
            } 
            [[previewLayer session] addInput:input]; 
            [[previewLayer session] commitConfiguration]; 
            break; 
        } 
    } 
    isUsingFrontFacingCamera = !isUsingFrontFacingCamera; 

 
- (void)didReceiveMemoryWarning 

    [super didReceiveMemoryWarning]; 
    // Release any cached data, images, etc that aren't in use.  

 
#pragma mark - View lifecycle  
 
- (void)viewDidLoad 

    [super viewDidLoad]; 
    // Do any additional setup after loading the view, typically from a nib.  
    [self setupAVCapture]; 
    square = [[UIImage imageNamed:@"squarePNG"] retain]; 
    NSDictionary *detectorOptions = [[NSDictionary alloc] initWithObjectsAndKeys:CIDetectorAccuracyLow, CIDetectorAccuracy, nil]; 
    faceDetector = [[CIDetector detectorOfType:CIDetectorTypeFace context:nil options:detectorOptions] retain]; 
    [detectorOptions release]; 

 
- (void)viewDidUnload 

    [super viewDidUnload]; 
    // Release any retained subviews of the main view.  
    // e.g. self.myOutlet = nil;  

 
- (void)viewWillAppear:(BOOL)animated 

    [super viewWillAppear:animated]; 

 
- (void)viewDidAppear:(BOOL)animated 

    [super viewDidAppear:animated]; 

 
- (void)viewWillDisappear:(BOOL)animated 

    [super viewWillDisappear:animated]; 

 
- (void)viewDidDisappear:(BOOL)animated 

    [super viewDidDisappear:animated]; 

 
- (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation 

    // Return YES for supported orientations  
    return (interfaceOrientation == UIInterfaceOrientationPortrait); 

 
- (BOOL)gestureRecognizerShouldBegin:(UIGestureRecognizer *)gestureRecognizer 

    if ( [gestureRecognizer isKindOfClass:[UIPinchGestureRecognizer class]] ) { 
        beginGestureScale = effectiveScale; 
    } 
    return YES; 

 
// scale image depending on users pinch gesture  
- (IBAction)handlePinchGesture:(UIPinchGestureRecognizer *)recognizer 

    BOOL allTouchesAreOnThePreviewLayer = YES; 
    NSUInteger numTouches = [recognizer numberOfTouches], i; 
    for ( i = 0; i < numTouches; ++i ) { 
        CGPoint location = [recognizer locationOfTouch:i inView:previewView]; 
        CGPoint convertedLocation = [previewLayer convertPoint:location fromLayer:previewLayer.superlayer]; 
        if ( ! [previewLayer containsPoint:convertedLocation] ) { 
            allTouchesAreOnThePreviewLayer = NO; 
            break; 
        } 
    } 
     
    if ( allTouchesAreOnThePreviewLayer ) { 
        effectiveScale = beginGestureScale * recognizer.scale; 
        if (effectiveScale < 1.0) 
            effectiveScale = 1.0; 
        CGFloat maxScaleAndCropFactor = [[stillImageOutput connectionWithMediaType:AVMediaTypeVideo] videoMaxScaleAndCropFactor]; 
        if (effectiveScale > maxScaleAndCropFactor) 
            effectiveScale = maxScaleAndCropFactor; 
        [CATransaction begin]; 
        [CATransaction setAnimationDuration:.025]; 
        [previewLayer setAffineTransform:CGAffineTransformMakeScale(effectiveScale, effectiveScale)]; 
        [CATransaction commit]; 
    } 

 
@end 

// used for KVO observation of the @"capturingStillImage" property to perform flash bulb animation
static const NSString *AVCaptureStillImageIsCapturingStillImageContext = @"AVCaptureStillImageIsCapturingStillImageContext";

static CGFloat DegreesToRadians(CGFloat degrees) {return degrees * M_PI / 180;};

static void ReleaseCVPixelBuffer(void *pixel, const void *data, size_t size);
static void ReleaseCVPixelBuffer(void *pixel, const void *data, size_t size)
{
    /**
     CVPixelBuffer(core video pixel buffer): 指的是主内存中的图片缓存,用来保存图片像素数据。应用程序在产生图片帧、解压缩视频数据或调用Core Image的时候可以调用此对象
     **/
 CVPixelBufferRef pixelBuffer = (CVPixelBufferRef)pixel;
 CVPixelBufferUnlockBaseAddress( pixelBuffer, 0 );
 CVPixelBufferRelease( pixelBuffer );
}


//用给定的pixel buffer创建CGImage对象, pixel buffer必须为未压缩的kCVPixelFormatType_32ARGB 或者 kCVPixelFormatType_32BGRA
static OSStatus CreateCGImageFromCVPixelBuffer(CVPixelBufferRef pixelBuffer, CGImageRef *imageOut);
static OSStatus CreateCGImageFromCVPixelBuffer(CVPixelBufferRef pixelBuffer, CGImageRef *imageOut)

 OSStatus err = noErr;
 OSType sourcePixelFormat;
 size_t width, height, sourceRowBytes;
 void *sourceBaseAddr = NULL;
 CGBitmapInfo bitmapInfo;
 CGColorSpaceRef colorspace = NULL;
   
    //一些Quartz程序直接提供block数据给程序,而不是从内存中读取,CGDataProviderRef允许用户以这种方式提供给Quartz数据。(等于是Quartz定义的一种数据格式)
 CGDataProviderRef provider = NULL;
 CGImageRef image = NULL;
 
 sourcePixelFormat = CVPixelBufferGetPixelFormatType( pixelBuffer );
 if ( kCVPixelFormatType_32ARGB == sourcePixelFormat )
  bitmapInfo = kCGBitmapByteOrder32Big | kCGImageAlphaNoneSkipFirst;
 else if ( kCVPixelFormatType_32BGRA == sourcePixelFormat )
  bitmapInfo = kCGBitmapByteOrder32Little | kCGImageAlphaNoneSkipFirst;
 else
  return -95014; // only uncompressed pixel formats
 
   
    //获取pixel buffer中数据属性
 sourceRowBytes = CVPixelBufferGetBytesPerRow( pixelBuffer );
 width = CVPixelBufferGetWidth( pixelBuffer );
 height = CVPixelBufferGetHeight( pixelBuffer );
 
 CVPixelBufferLockBaseAddress( pixelBuffer, 0 );
   
    //获取pixel buffer的其实地址?直接内存操作了啊? 靠
 sourceBaseAddr = CVPixelBufferGetBaseAddress( pixelBuffer );
 
 colorspace = CGColorSpaceCreateDeviceRGB();
   
 CVPixelBufferRetain( pixelBuffer );
   
    //将Data转换为Quartz直接访问的数据类型,目测其逻辑为制定数据内容,给定起始地址、长度等信息。
 provider = CGDataProviderCreateWithData( (void *)pixelBuffer, sourceBaseAddr, sourceRowBytes * height, ReleaseCVPixelBuffer);
 image = CGImageCreate(width, height, 8, 32, sourceRowBytes, colorspace, bitmapInfo, provider, NULL, true, kCGRenderingIntentDefault);
 
bail:
 if ( err && image ) {
  CGImageRelease( image );
  image = NULL;
 }
 if ( provider ) CGDataProviderRelease( provider );
 if ( colorspace ) CGColorSpaceRelease( colorspace );
 *imageOut = image;
 return err;
}

// utility used by newSquareOverlayedImageForFeatures for
static CGContextRef CreateCGBitmapContextForSize(CGSize size);
static CGContextRef CreateCGBitmapContextForSize(CGSize size)
{
    CGContextRef    context = NULL;
    CGColorSpaceRef colorSpace;   //
    int             bitmapBytesPerRow;
 
    bitmapBytesPerRow = (size.width * 4);
 
    colorSpace = CGColorSpaceCreateDeviceRGB();
    context = CGBitmapContextCreate (NULL,
          size.width,
          size.height,
          8,      // bits per component
          bitmapBytesPerRow,
          colorSpace,
          kCGImageAlphaPremultipliedLast);
 CGContextSetAllowsAntialiasing(context, NO);
    CGColorSpaceRelease( colorSpace );
    return context;
}

#pragma mark-

@interface UIImage (RotationMethods)
- (UIImage *)imageRotatedByDegrees:(CGFloat)degrees;
@end

@implementation UIImage (RotationMethods)

- (UIImage *)imageRotatedByDegrees:(CGFloat)degrees
{  
 // calculate the size of the rotated view's containing box for our drawing space
 UIView *rotatedViewBox = [[UIView alloc] initWithFrame:CGRectMake(0,0,self.size.width, self.size.height)];
   
    //CGAffineTransform 结构体用于旋转一个坐标系统,
 CGAffineTransform t = CGAffineTransformMakeRotation(DegreesToRadians(degrees));
 rotatedViewBox.transform = t;
 CGSize rotatedSize = rotatedViewBox.frame.size;
 [rotatedViewBox release];
 
 // Create the bitmap context
 UIGraphicsBeginImageContext(rotatedSize); //创建一个bitmap类型的graphic context,并将其置为当前context
 CGContextRef bitmap = UIGraphicsGetCurrentContext();
 
 // Move the origin to the middle of the image so we will rotate and scale around the center.
 CGContextTranslateCTM(bitmap, rotatedSize.width/2, rotatedSize.height/2);
 
 //   // Rotate the image context
 CGContextRotateCTM(bitmap, DegreesToRadians(degrees));
 
 // Now, draw the rotated/scaled image into the context
 CGContextScaleCTM(bitmap, 1.0, -1.0);
 CGContextDrawImage(bitmap, CGRectMake(-self.size.width / 2, -self.size.height / 2, self.size.width, self.size.height), [self CGImage]);
 
 UIImage *newImage = UIGraphicsGetImageFromCurrentImageContext();
 UIGraphicsEndImageContext();
 return newImage;
 
}

@end

#pragma mark-

@interface SquareCamViewController (InternalMethods)
- (void)setupAVCapture;
- (void)teardownAVCapture;
- (void)drawFaceBoxesForFeatures:(NSArray *)features forVideoBox:(CGRect)clap orientation:(UIDeviceOrientation)orientation;
@end

@implementation SquareCamViewController

- (void)setupAVCapture
{
 NSError *error = nil;
 
 AVCaptureSession *session = [AVCaptureSession new];
    //UIDevice是一个单例对象,代表当前设备,可以获取设备信息,比如说分配的名字,设备型号,OS名字,版本号等
 if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPhone)
     [session setSessionPreset:AVCaptureSessionPreset640x480];
 else
     [session setSessionPreset:AVCaptureSessionPresetPhoto];
 
    // Select a video device, make an input
 AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
 AVCaptureDeviceInput *deviceInput = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
 require( error == nil, bail );
 
    isUsingFrontFacingCamera = NO;
 if ( [session canAddInput:deviceInput] )
  [session addInput:deviceInput];
 
    //AVCaptureStillImageOutput对象,AVCaptureOutput的子类,用来捕捉高质量的持续图片
 stillImageOutput = [AVCaptureStillImageOutput new];
 [stillImageOutput addObserver:self forKeyPath:@"capturingStillImage" options:NSKeyValueObservingOptionNew context:AVCaptureStillImageIsCapturingStillImageContext];
 if ( [session canAddOutput:stillImageOutput] )
  [session addOutput:stillImageOutput];
 
    // Make a video data output
 videoDataOutput = [AVCaptureVideoDataOutput new];
 
    // we want BGRA, both CoreGraphics and OpenGL work well with 'BGRA'
 NSDictionary *rgbOutputSettings = [NSDictionary dictionaryWithObject:
            [NSNumber numberWithInt:kCMPixelFormat_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey];
 [videoDataOutput setVideoSettings:rgbOutputSettings];
 [videoDataOutput setAlwaysDiscardsLateVideoFrames:YES]; // discard if the data output queue is blocked (as we process the still image)
 
 //dispatch_queue_t对象, 设置AVCaptureVideoDataOutput对SampleBuffer的处理为FIFO队列方式,这样没个帧到 captureOutput:didOutputSampleBuffer:fromConnection:里面处理的时候都会按照FIFO方式,并且可以为了处理最新帧,抛弃旧的帧,保证不会错乱顺序。
    videoDataOutputQueue = dispatch_queue_create("VideoDataOutputQueue", DISPATCH_QUEUE_SERIAL);
 [videoDataOutput setSampleBufferDelegate:self queue:videoDataOutputQueue];
 
    if ( [session canAddOutput:videoDataOutput] )
  [session addOutput:videoDataOutput];
 [[videoDataOutput connectionWithMediaType:AVMediaTypeVideo] setEnabled:NO];
 
 effectiveScale = 1.0;
    //给定session中获取一个预览层
 previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
 [previewLayer setBackgroundColor:[[UIColor blackColor] CGColor]];
 [previewLayer setVideoGravity:AVLayerVideoGravityResizeAspect];
 CALayer *rootLayer = [previewView layer];
 [rootLayer setMasksToBounds:YES];
 [previewLayer setFrame:[rootLayer bounds]];
 [rootLayer addSublayer:previewLayer]; //这里应该是将预览层加入到已有层的最上级吧,应该是 栈 方式添加。
 [session startRunning];
   
   
//选择input设备失败
bail:
 [session release];
 if (error) {
  UIAlertView *alertView = [[UIAlertView alloc] initWithTitle:[NSString stringWithFormat:@"Failed with error %d", (int)[error code]]
               message:[error localizedDescription]
                 delegate:nil
              cancelButtonTitle:@"Dismiss"
              otherButtonTitles:nil];
  [alertView show];
  [alertView release];
  [self teardownAVCapture];
 }
}

// clean up capture setup
- (void)teardownAVCapture
{
 [videoDataOutput release];
 if (videoDataOutputQueue)
  dispatch_release(videoDataOutputQueue);
 [stillImageOutput removeObserver:self forKeyPath:@"isCapturingStillImage"];
 [stillImageOutput release];
 [previewLayer removeFromSuperlayer];
 [previewLayer release];
}

// perform a flash bulb animation using KVO to monitor the value of the capturingStillImage property of the AVCaptureStillImageOutput class
- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context
{
 if ( context == AVCaptureStillImageIsCapturingStillImageContext ) {
  BOOL isCapturingStillImage = [[change objectForKey:NSKeyValueChangeNewKey] boolValue];
  
  if ( isCapturingStillImage ) {
   // do flash bulb like animation
   flashView = [[UIView alloc] initWithFrame:[previewView frame]];
   [flashView setBackgroundColor:[UIColor whiteColor]];
   [flashView setAlpha:0.f];
   [[[self view] window] addSubview:flashView];
   
   [UIView animateWithDuration:.4f
        animations:^{
         [flashView setAlpha:1.f];
        }
    ];
  }
  else {
   [UIView animateWithDuration:.4f
        animations:^{
         [flashView setAlpha:0.f];
        }
        completion:^(BOOL finished){
         [flashView removeFromSuperview];
         [flashView release];
         flashView = nil;
        }
    ];
  }
 }
}

// utility routing used during image capture to set up capture orientation
- (AVCaptureVideoOrientation)avOrientationForDeviceOrientation:(UIDeviceOrientation)deviceOrientation
{
 AVCaptureVideoOrientation result = deviceOrientation;
 if ( deviceOrientation == UIDeviceOrientationLandscapeLeft )
  result = AVCaptureVideoOrientationLandscapeRight;
 else if ( deviceOrientation == UIDeviceOrientationLandscapeRight )
  result = AVCaptureVideoOrientationLandscapeLeft;
 return result;
}

// utility routine to create a new image with the red square overlay with appropriate orientation
// and return the new composited image which can be saved to the camera roll
- (CGImageRef)newSquareOverlayedImageForFeatures:(NSArray *)features
           inCGImage:(CGImageRef)backgroundImage
           withOrientation:(UIDeviceOrientation)orientation
            frontFacing:(BOOL)isFrontFacing
{
 CGImageRef returnImage = NULL;
 CGRect backgroundImageRect = CGRectMake(0., 0., CGImageGetWidth(backgroundImage), CGImageGetHeight(backgroundImage));
 CGContextRef bitmapContext = CreateCGBitmapContextForSize(backgroundImageRect.size);
 CGContextClearRect(bitmapContext, backgroundImageRect);
 CGContextDrawImage(bitmapContext, backgroundImageRect, backgroundImage);
 CGFloat rotationDegrees = 0.;
 
 switch (orientation) {
  case UIDeviceOrientationPortrait:
   rotationDegrees = -90.;
   break;
  case UIDeviceOrientationPortraitUpsideDown:
   rotationDegrees = 90.;
   break;
  case UIDeviceOrientationLandscapeLeft:
   if (isFrontFacing) rotationDegrees = 180.;
   else rotationDegrees = 0.;
   break;
  case UIDeviceOrientationLandscapeRight:
   if (isFrontFacing) rotationDegrees = 0.;
   else rotationDegrees = 180.;
   break;
  case UIDeviceOrientationFaceUp:
  case UIDeviceOrientationFaceDown:
  default:
   break; // leave the layer in its last known orientation
 }
 UIImage *rotatedSquareImage = [square imageRotatedByDegrees:rotationDegrees];
 
    // features found by the face detector
 for ( CIFaceFeature *ff in features ) {
  CGRect faceRect = [ff bounds];
  CGContextDrawImage(bitmapContext, faceRect, [rotatedSquareImage CGImage]);
 }
 returnImage = CGBitmapContextCreateImage(bitmapContext);
 CGContextRelease (bitmapContext);
 
 return returnImage;
}

// utility routine used after taking a still image to write the resulting image to the camera roll
- (BOOL)writeCGImageToCameraRoll:(CGImageRef)cgImage withMetadata:(NSDictionary *)metadata
{
 CFMutableDataRef destinationData = CFDataCreateMutable(kCFAllocatorDefault, 0);
 CGImageDestinationRef destination = CGImageDestinationCreateWithData(destinationData,
                   CFSTR("public.jpeg"),
                   1,
                   NULL);
 BOOL success = (destination != NULL);
 require(success, bail);

 const float JPEGCompQuality = 0.85f; // JPEGHigherQuality
 CFMutableDictionaryRef optionsDict = NULL;
 CFNumberRef qualityNum = NULL;
 
 qualityNum = CFNumberCreate(0, kCFNumberFloatType, &JPEGCompQuality);   
 if ( qualityNum ) {
  optionsDict = CFDictionaryCreateMutable(0, 0, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks);
  if ( optionsDict )
   CFDictionarySetValue(optionsDict, kCGImageDestinationLossyCompressionQuality, qualityNum);
  CFRelease( qualityNum );
 }
 
 CGImageDestinationAddImage( destination, cgImage, optionsDict );
 success = CGImageDestinationFinalize( destination );

 if ( optionsDict )
  CFRelease(optionsDict);
 
 require(success, bail);
 
 CFRetain(destinationData);
 ALAssetsLibrary *library = [ALAssetsLibrary new];
 [library writeImageDataToSavedPhotosAlbum:(id)destinationData metadata:metadata completionBlock:^(NSURL *assetURL, NSError *error) {
  if (destinationData)
   CFRelease(destinationData);
 }];
 [library release];

//go to语句,require(success, bail);中的success如果为否就跳转到这里
bail:
 if (destinationData)
  CFRelease(destinationData);
 if (destination)
  CFRelease(destination);
 return success;
}

// utility routine to display error aleart if takePicture fails
- (void)displayErrorOnMainQueue:(NSError *)error withMessage:(NSString *)message
{
 dispatch_async(dispatch_get_main_queue(), ^(void) {
  UIAlertView *alertView = [[UIAlertView alloc] initWithTitle:[NSString stringWithFormat:@"%@ (%d)", message, (int)[error code]]
               message:[error localizedDescription]
                 delegate:nil
              cancelButtonTitle:@"Dismiss"
              otherButtonTitles:nil];
  [alertView show];
  [alertView release];
 });
}

// turn on/off face detection
- (IBAction)toggleFaceDetection:(id)sender
{
 detectFaces = [(UISwitch *)sender isOn];
 [[videoDataOutput connectionWithMediaType:AVMediaTypeVideo] setEnabled:detectFaces];
 if (!detectFaces) {
  dispatch_async(dispatch_get_main_queue(), ^(void) {
   // clear out any squares currently displaying.
   [self drawFaceBoxesForFeatures:[NSArray array] forVideoBox:CGRectZero orientation:UIDeviceOrientationPortrait];
  });
 }
}

// find where the video box is positioned within the preview layer based on the video size and gravity
+ (CGRect)videoPreviewBoxForGravity:(NSString *)gravity frameSize:(CGSize)frameSize apertureSize:(CGSize)apertureSize
{
    CGFloat apertureRatio = apertureSize.height / apertureSize.width;
    CGFloat viewRatio = frameSize.width / frameSize.height;
   
    CGSize size = CGSizeZero;
    if ([gravity isEqualToString:AVLayerVideoGravityResizeAspectFill]) {
        if (viewRatio > apertureRatio) {
            size.width = frameSize.width;
            size.height = apertureSize.width * (frameSize.width / apertureSize.height);
        } else {
            size.width = apertureSize.height * (frameSize.height / apertureSize.width);
            size.height = frameSize.height;
        }
    } else if ([gravity isEqualToString:AVLayerVideoGravityResizeAspect]) {
        if (viewRatio > apertureRatio) {
            size.width = apertureSize.height * (frameSize.height / apertureSize.width);
            size.height = frameSize.height;
        } else {
            size.width = frameSize.width;
            size.height = apertureSize.width * (frameSize.width / apertureSize.height);
        }
    } else if ([gravity isEqualToString:AVLayerVideoGravityResize]) {
        size.width = frameSize.width;
        size.height = frameSize.height;
    }
 
 CGRect videoBox;
 videoBox.size = size;
 if (size.width < frameSize.width)
  videoBox.origin.x = (frameSize.width - size.width) / 2;
 else
  videoBox.origin.x = (size.width - frameSize.width) / 2;
 
 if ( size.height < frameSize.height )
  videoBox.origin.y = (frameSize.height - size.height) / 2;
 else
  videoBox.origin.y = (size.height - frameSize.height) / 2;
   
 return videoBox;
}

// called asynchronously as the capture output is capturing sample buffers, this method asks the face detector (if on)
// to detect features and for each draw the red square in a layer and set appropriate orientation
- (void)drawFaceBoxesForFeatures:(NSArray *)features forVideoBox:(CGRect)clap orientation:(UIDeviceOrientation)orientation
{
 NSArray *sublayers = [NSArray arrayWithArray:[previewLayer sublayers]];
 NSInteger sublayersCount = [sublayers count], currentSublayer = 0;
 NSInteger featuresCount = [features count], currentFeature = 0;
 
    //CATransaction, core animation对象,用于批处理将layer树原子更新到渲染树(render tree),对layer tree的每隔一个操作必须是transaction的一部分,同时支持嵌入事物(transaction)
 [CATransaction begin];
   
    //kCFBooleanTrue是将boolean对象打包成CF属性用于core fundation, core foundation是一个提供基础软件服务的框架,用于应用服务, 应用环境 和应用本身。同时用于提供common data type抽象, XML属性列表, URL资源访问和偏好(iOS整个就是各种框架攒起来的,结构性非常好啊)
 [CATransaction setValue:(id)kCFBooleanTrue forKey:kCATransactionDisableActions];
 
 // hide all the face layers
 for ( CALayer *layer in sublayers ) {
  if ( [[layer name] isEqualToString:@"FaceLayer"] )
   [layer setHidden:YES];
 } 
 
 if ( featuresCount == 0 || !detectFaces ) {
       
  [CATransaction commit];
  return; // early bail.
 }
  
 CGSize parentFrameSize = [previewView frame].size;
 NSString *gravity = [previewLayer videoGravity];
 BOOL isMirrored = [previewLayer isMirrored];
 CGRect previewBox = [SquareCamViewController videoPreviewBoxForGravity:gravity
                  frameSize:parentFrameSize
               apertureSize:clap.size];
 
 for ( CIFaceFeature *ff in features ) {
  // find the correct position for the square layer within the previewLayer
  // the feature box originates in the bottom left of the video frame.
  // (Bottom right if mirroring is turned on)
  CGRect faceRect = [ff bounds];

  // flip preview width and height
  CGFloat temp = faceRect.size.width;
  faceRect.size.width = faceRect.size.height;
  faceRect.size.height = temp;
  temp = faceRect.origin.x;
  faceRect.origin.x = faceRect.origin.y;
  faceRect.origin.y = temp;
  // scale coordinates so they fit in the preview box, which may be scaled
  CGFloat widthScaleBy = previewBox.size.width / clap.size.height;
  CGFloat heightScaleBy = previewBox.size.height / clap.size.width;
  faceRect.size.width *= widthScaleBy;
  faceRect.size.height *= heightScaleBy;
  faceRect.origin.x *= widthScaleBy;
  faceRect.origin.y *= heightScaleBy;

  if ( isMirrored )
   faceRect = CGRectOffset(faceRect, previewBox.origin.x + previewBox.size.width - faceRect.size.width - (faceRect.origin.x * 2), previewBox.origin.y);
  else
   faceRect = CGRectOffset(faceRect, previewBox.origin.x, previewBox.origin.y);
  
  CALayer *featureLayer = nil;
  
  // re-use an existing layer if possible
  while ( !featureLayer && (currentSublayer < sublayersCount) ) {
   CALayer *currentLayer = [sublayers objectAtIndex:currentSublayer++];
   if ( [[currentLayer name] isEqualToString:@"FaceLayer"] ) {
    featureLayer = currentLayer;
    [currentLayer setHidden:NO];
   }
  }
  
  // create a new one if necessary
  if ( !featureLayer ) {
   featureLayer = [CALayer new];
   [featureLayer setContents:(id)[square CGImage]];
   [featureLayer setName:@"FaceLayer"];
   [previewLayer addSublayer:featureLayer];
   [featureLayer release];
  }
  [featureLayer setFrame:faceRect];
  
  switch (orientation) {
   case UIDeviceOrientationPortrait:
    [featureLayer setAffineTransform:CGAffineTransformMakeRotation(DegreesToRadians(0.))];
    break;
   case UIDeviceOrientationPortraitUpsideDown:
    [featureLayer setAffineTransform:CGAffineTransformMakeRotation(DegreesToRadians(180.))];
    break;
   case UIDeviceOrientationLandscapeLeft:
    [featureLayer setAffineTransform:CGAffineTransformMakeRotation(DegreesToRadians(90.))];
    break;
   case UIDeviceOrientationLandscapeRight:
    [featureLayer setAffineTransform:CGAffineTransformMakeRotation(DegreesToRadians(-90.))];
    break;
   case UIDeviceOrientationFaceUp:
   case UIDeviceOrientationFaceDown:
   default:
    break; // leave the layer in its last known orientation
  }
  currentFeature++;
 }
 
 [CATransaction commit];
}

- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection

 // 实时处理带监测的图片
 CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);

    //CMAttachmentBearer是一个基于CF的对象,支持键/值/模式 附件API。 任何CF对象都可以添加到CMAttachmentBearer对象,来存储额外信息。
 CFDictionaryRef attachments = CMCopyDictionaryOfAttachments(kCFAllocatorDefault, sampleBuffer, kCMAttachmentMode_ShouldPropagate);
 CIImage *ciImage = [[CIImage alloc] initWithCVPixelBuffer:pixelBuffer options:(NSDictionary *)attachments];
 if (attachments)
  CFRelease(attachments);
 NSDictionary *imageOptions = nil;
 UIDeviceOrientation curDeviceOrientation = [[UIDevice currentDevice] orientation];
 int exifOrientation;
 
    /* kCGImagePropertyOrientation values
        The intended display orientation of the image. If present, this key is a CFNumber value with the same value as defined
        by the TIFF and EXIF specifications -- see enumeration of integer constants.
        The value specified where the origin (0,0) of the image is located. If not present, a value of 1 is assumed.
       
        used when calling featuresInImage: options: The value for this key is an integer NSNumber from 1..8 as found in kCGImagePropertyOrientation.
        If present, the detection will be done based on that orientation but the coordinates in the returned features will still be based on those of the image. */
       
 enum {
  PHOTOS_EXIF_0ROW_TOP_0COL_LEFT   = 1, //   1  =  0th row is at the top, and 0th column is on the left (THE DEFAULT).
  PHOTOS_EXIF_0ROW_TOP_0COL_RIGHT   = 2, //   2  =  0th row is at the top, and 0th column is on the right. 
  PHOTOS_EXIF_0ROW_BOTTOM_0COL_RIGHT      = 3, //   3  =  0th row is at the bottom, and 0th column is on the right. 
  PHOTOS_EXIF_0ROW_BOTTOM_0COL_LEFT       = 4, //   4  =  0th row is at the bottom, and 0th column is on the left. 
  PHOTOS_EXIF_0ROW_LEFT_0COL_TOP          = 5, //   5  =  0th row is on the left, and 0th column is the top. 
  PHOTOS_EXIF_0ROW_RIGHT_0COL_TOP         = 6, //   6  =  0th row is on the right, and 0th column is the top. 
  PHOTOS_EXIF_0ROW_RIGHT_0COL_BOTTOM      = 7, //   7  =  0th row is on the right, and 0th column is the bottom. 
  PHOTOS_EXIF_0ROW_LEFT_0COL_BOTTOM       = 8  //   8  =  0th row is on the left, and 0th column is the bottom. 
 };
 
 switch (curDeviceOrientation) {
  case UIDeviceOrientationPortraitUpsideDown:  // Device oriented vertically, home button on the top
   exifOrientation = PHOTOS_EXIF_0ROW_LEFT_0COL_BOTTOM;
   break;
  case UIDeviceOrientationLandscapeLeft:       // Device oriented horizontally, home button on the right
   if (isUsingFrontFacingCamera)
    exifOrientation = PHOTOS_EXIF_0ROW_BOTTOM_0COL_RIGHT;
   else
    exifOrientation = PHOTOS_EXIF_0ROW_TOP_0COL_LEFT;
   break;
  case UIDeviceOrientationLandscapeRight:      // Device oriented horizontally, home button on the left
   if (isUsingFrontFacingCamera)
    exifOrientation = PHOTOS_EXIF_0ROW_TOP_0COL_LEFT;
   else
    exifOrientation = PHOTOS_EXIF_0ROW_BOTTOM_0COL_RIGHT;
   break;
  case UIDeviceOrientationPortrait:            // Device oriented vertically, home button on the bottom
  default:
   exifOrientation = PHOTOS_EXIF_0ROW_RIGHT_0COL_TOP;
   break;
 }

 imageOptions = [NSDictionary dictionaryWithObject:[NSNumber numberWithInt:exifOrientation] forKey:CIDetectorImageOrientation];
   
    //监测人脸
 NSArray *features = [faceDetector featuresInImage:ciImage options:imageOptions];
 [ciImage release];
 
    // get the clean aperture
    // the clean aperture is a rectangle that defines the portion of the encoded pixel dimensions
    // that represents image data valid for display.
 CMFormatDescriptionRef fdesc = CMSampleBufferGetFormatDescription(sampleBuffer);
 CGRect clap = CMVideoFormatDescriptionGetCleanAperture(fdesc, false /*originIsTopLeft == false*/);
 
 dispatch_async(dispatch_get_main_queue(), ^(void) {
  [self drawFaceBoxesForFeatures:features forVideoBox:clap orientation:curDeviceOrientation];
 });
}

- (void)dealloc
{
 [self teardownAVCapture];
 [faceDetector release];
 [square release];
 [super dealloc];
}

// 前后摄像头切换
- (IBAction)switchCameras:(id)sender
{
 AVCaptureDevicePosition desiredPosition;
 if (isUsingFrontFacingCamera)
  desiredPosition = AVCaptureDevicePositionBack;
 else
  desiredPosition = AVCaptureDevicePositionFront;
 
 for (AVCaptureDevice *d in [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) {
  if ([d position] == desiredPosition) {
   [[previewLayer session] beginConfiguration];
   AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:d error:nil];
   for (AVCaptureInput *oldInput in [[previewLayer session] inputs]) {
    [[previewLayer session] removeInput:oldInput];
   }
   [[previewLayer session] addInput:input];
   [[previewLayer session] commitConfiguration];
   break;
  }
 }
 isUsingFrontFacingCamera = !isUsingFrontFacingCamera;
}

- (void)didReceiveMemoryWarning
{
    [super didReceiveMemoryWarning];
    // Release any cached data, images, etc that aren't in use.
}

#pragma mark - View lifecycle

- (void)viewDidLoad
{
    [super viewDidLoad];
 // Do any additional setup after loading the view, typically from a nib.
 [self setupAVCapture];
 square = [[UIImage imageNamed:@"squarePNG"] retain];
 NSDictionary *detectorOptions = [[NSDictionary alloc] initWithObjectsAndKeys:CIDetectorAccuracyLow, CIDetectorAccuracy, nil];
 faceDetector = [[CIDetector detectorOfType:CIDetectorTypeFace context:nil options:detectorOptions] retain];
 [detectorOptions release];
}

- (void)viewDidUnload
{
    [super viewDidUnload];
    // Release any retained subviews of the main view.
    // e.g. self.myOutlet = nil;
}

- (void)viewWillAppear:(BOOL)animated
{
    [super viewWillAppear:animated];
}

- (void)viewDidAppear:(BOOL)animated
{
    [super viewDidAppear:animated];
}

- (void)viewWillDisappear:(BOOL)animated
{
 [super viewWillDisappear:animated];
}

- (void)viewDidDisappear:(BOOL)animated
{
 [super viewDidDisappear:animated];
}

- (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation
{
    // Return YES for supported orientations
 return (interfaceOrientation == UIInterfaceOrientationPortrait);
}

- (BOOL)gestureRecognizerShouldBegin:(UIGestureRecognizer *)gestureRecognizer
{
 if ( [gestureRecognizer isKindOfClass:[UIPinchGestureRecognizer class]] ) {
  beginGestureScale = effectiveScale;
 }
 return YES;
}

// scale image depending on users pinch gesture
- (IBAction)handlePinchGesture:(UIPinchGestureRecognizer *)recognizer
{
 BOOL allTouchesAreOnThePreviewLayer = YES;
 NSUInteger numTouches = [recognizer numberOfTouches], i;
 for ( i = 0; i < numTouches; ++i ) {
  CGPoint location = [recognizer locationOfTouch:i inView:previewView];
  CGPoint convertedLocation = [previewLayer convertPoint:location fromLayer:previewLayer.superlayer];
  if ( ! [previewLayer containsPoint:convertedLocation] ) {
   allTouchesAreOnThePreviewLayer = NO;
   break;
  }
 }
 
 if ( allTouchesAreOnThePreviewLayer ) {
  effectiveScale = beginGestureScale * recognizer.scale;
  if (effectiveScale < 1.0)
   effectiveScale = 1.0;
  CGFloat maxScaleAndCropFactor = [[stillImageOutput connectionWithMediaType:AVMediaTypeVideo] videoMaxScaleAndCropFactor];
  if (effectiveScale > maxScaleAndCropFactor)
   effectiveScale = maxScaleAndCropFactor;
  [CATransaction begin];
  [CATransaction setAnimationDuration:.025];
  [previewLayer setAffineTransform:CGAffineTransformMakeScale(effectiveScale, effectiveScale)];
  [CATransaction commit];
 }
}

@end


 

相关TAG标签
上一篇:ubuntu安装配置FTP
下一篇:linux shell获取执行脚本文件所在的目录
相关文章
图文推荐

关于我们 | 联系我们 | 广告服务 | 投资合作 | 版权申明 | 在线帮助 | 网站地图 | 作品发布 | Vip技术培训 | 举报中心

版权所有: 红黑联盟--致力于做实用的IT技术学习网站