最近在搞直接一个类似于二维码的东西,同样也是需要获取其中某个区域的图片。直接上最为主要的一些代码吧。
下面这个是初始化AV部分,这样就可以将图像在view上面展示了。这里简单的阐述一下在这其中碰到的问题和解决方法。
1.如果在layer上面搞出一个“洞 ”,就是真正的裁剪区域,在这里用的是CAShapeLayer,利用fillMode,这样就可以通过mask方式作用在将覆盖在perviewLayer上面的coverLayer了。
2. 我们可以很容易的拿到整个的image,就可以在delegate中的sampleBuffer中拿到了。这里我使用的是AVCaptureVideoDataOutput,这样就可以不断的获取到采样的流了。
3. 从整个image中拿到裁剪区域中的图片。在这个问题上面花了不少时间和心思始终不能正确的拿到裁剪区域的图像。先是用了CGImageFromImage ,一个是才出来的图片位置和大小不对。之后转用cgcontext的方式。但是还是不太对。不断的使用google啊,怎么搞呢,琢磨啊。因为刚开始layer的呈现方式是fill的,这样实际的图片大小并不是和屏幕的大小是一样的。思前想后,可以确定是这个问题了,然后开始吧。针对不同的videoGravity的方式计算出裁剪区域实际在图片中对象的位置和大小,于是就有了一个calcRect的方法,这个方法就是将之前在屏幕上挖出来的“洞”对应到图片中的位置去。
总算是搞出来了。有兴趣的看看吧。
// // // xxoo // // Created by Tommy on 13-11-6. // Copyright (c) 2013年 Tommy. All rights reserved. // #import "" #import <AVFoundation/> static inline double radians (double degrees) {return degrees * M_PI/180;} @interface ScanView()<AVCaptureVideoDataOutputSampleBufferDelegate> @property AVCaptureVideoPreviewLayer* previewLayer; @property AVCaptureSession* session; @property AVCaptureDevice* videoDevice; @property dispatch_queue_t camera_sample_queue; @property CALayer* coverLayer; @property CAShapeLayer* cropLayer; @property CALayer* stillImageLayer; @property AVCaptureStillImageOutput* stillImageOutput; @property UIImageView* stillImageView; @property UIImage* cropImage; @property BOOL hasSetFocus; @end @implementation ScanView - (id)initWithFrame:(CGRect)frame { self = [super initWithFrame:frame]; if (self) { // Initialization code = NO; [self initAVCaptuer]; [self initOtherLayers]; } return self; } /* // Only override drawRect: if you perform custom drawing. // An empty implementation adversely affects performance during animation. - (void)drawRect:(CGRect)rect { // Drawing code } */ -(void)layoutSubviews { [ setFrame:]; [ setFrame:]; = ; } - (void) initAVCaptuer{ = CGRectZero; = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; AVCaptureDeviceInput* input = [[AVCaptureDeviceInput alloc]initWithDevice: error:nil]; AVCaptureVideoDataOutput* output = [[AVCaptureVideoDataOutput alloc]init]; = YES; self.camera_sample_queue = dispatch_queue_create (".sample_queue", DISPATCH_QUEUE_SERIAL); [output setSampleBufferDelegate:self queue:self.camera_sample_queue]; NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey; NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA]; NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:value forKey:key]; [output setVideoSettings:videoSettings]; = [[AVCaptureStillImageOutput alloc]init]; NSDictionary* outputSettings = @{AVVideoCodecKey:AVVideoCodecJPEG}; [ setOutputSettings:outputSettings]; = [[AVCaptureSession alloc]init]; = AVCaptureSessionPresetMedium; if ([ canAddInput:input]) { [ addInput:input]; if ([ canAddOutput:output]) { [ addOutput:]; [ addOutput:output]; = [AVCaptureVideoPreviewLayer layerWithSession:]; = AVLayerVideoGravityResizeAspect; [ addSublayer: ]; return; // success } } = nil; } - (void)setCropRect:(CGRect)cropRect { _cropRect = cropRect; if(!CGRectEqualToRect(CGRectZero, )){ = [[CAShapeLayer alloc] init]; CGMutablePathRef path = CGPathCreateMutable(); CGPathAddRect(path, nil, ); CGPathAddRect(path, nil, ); [ setFillRule:kCAFillRuleEvenOdd]; [ setPath:path]; [ setFillColor:[[UIColor whiteColor] CGColor]]; [ setNeedsDisplay]; //[self setVideoFocus]; } [ setFrame:CGRectMake(100, 450, CGRectGetWidth(cropRect), CGRectGetHeight(cropRect))]; } - (void) setVideoFocus{ NSError *error; CGPoint foucsPoint = CGPointMake(CGRectGetMidX(), CGRectGetMidY()); if([ isFocusPointOfInterestSupported] &&[ lockForConfiguration:&error] &&!){ = YES; [ setFocusPointOfInterest:[self convertToPointOfInterestFromViewCoordinates:foucsPoint]]; [ setFocusMode:AVCaptureFocusModeContinuousAutoFocus]; [ unlockForConfiguration]; } // [ setFocusMode:AVCaptureFocusModeAutoFocus]; NSLog(@"error:%@",error); } - (CGPoint)convertToPointOfInterestFromViewCoordinates:(CGPoint)viewCoordinates { CGPoint pointOfInterest = CGPointMake(.5f, .5f); CGSize frameSize = ; AVCaptureVideoPreviewLayer *videoPreviewLayer = ; if ([ isMirrored]) { = - ; } if ( [[videoPreviewLayer videoGravity] isEqualToString:AVLayerVideoGravityResize] ) { pointOfInterest = CGPointMake( / , - ( / )); } else { CGRect cleanAperture; for (AVCaptureInputPort *port in [[[[self session] inputs] lastObject] ports]) { if ([port mediaType] == AVMediaTypeVideo) { cleanAperture = CMVideoFormatDescriptionGetCleanAperture([port formatDescription], YES); CGSize apertureSize = ; CGPoint point = viewCoordinates; CGFloat apertureRatio = / ; CGFloat viewRatio = / ; CGFloat xc = .5f; CGFloat yc = .5f; if ( [[videoPreviewLayer videoGravity] isEqualToString:AVLayerVideoGravityResizeAspect] ) { if (viewRatio > apertureRatio) { CGFloat y2 = ; CGFloat x2 = * apertureRatio; CGFloat x1 = ; CGFloat blackBar = (x1 - x2) / 2; if ( >= blackBar && <= blackBar + x2) { xc = / y2; yc = - (( - blackBar) / x2); } } else { CGFloat y2 = / apertureRatio; CGFloat y1 = ; CGFloat x2 = ; CGFloat blackBar = (y1 - y2) / 2; if ( >= blackBar && <= blackBar + y2) { xc = (( - blackBar) / y2); yc = - ( / x2); } } } else if ([[videoPreviewLayer videoGravity] isEqualToString:AVLayerVideoGravityResizeAspectFill]) { if (viewRatio > apertureRatio) { CGFloat y2 = * ( / ); xc = ( + ((y2 - ) / )) / y2; yc = ( - ) / ; } else { CGFloat x2 = * ( / ); yc = - (( + ((x2 - ) / 2)) / x2); xc = / ; } } pointOfInterest = CGPointMake(xc, yc); break; } } } return pointOfInterest; } - (void) initOtherLayers{ = [CALayer layer]; = [[[UIColor blackColor] colorWithAlphaComponent:0.6] CGColor]; [ addSublayer:]; if(!CGRectEqualToRect(CGRectZero, )){ = [[CAShapeLayer alloc] init]; CGMutablePathRef path = CGPathCreateMutable(); CGPathAddRect(path, nil, ); CGPathAddRect(path, nil, ); [ setFillRule:kCAFillRuleEvenOdd]; [ setPath:path]; [ setFillColor:[[UIColor redColor] CGColor]]; } = [CALayer layer]; = [[UIColor yellowColor] CGColor]; = kCAGravityResizeAspect; [ addSublayer:]; = [[UIImageView alloc]initWithFrame:CGRectMake(0,300, 100, 100)]; = [UIColor redColor]; = UIViewContentModeScaleAspectFit; [self addSubview:]; = kCAGravityResizeAspect; } - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection{ [self setVideoFocus]; UIImage *image = [self imageFromSampleBuffer:sampleBuffer]; = [self cropImageInRect:image]; dispatch_async(dispatch_get_main_queue(), ^{ [ setImage:image]; // [ setContents:(id)[ CGImage]]; }); } // 通过抽样缓存数据创建一个UIImage对象 - (UIImage *) imageFromSampleBuffer:(CMSampleBufferRef) sampleBuffer { // 为媒体数据设置一个CMSampleBuffer的Core Video图像缓存对象 CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); // 锁定pixel buffer的基地址 CVPixelBufferLockBaseAddress(imageBuffer, 0); // 得到pixel buffer的基地址 void *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer); // 得到pixel buffer的行字节数 size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer); // 得到pixel buffer的宽和高 size_t width = CVPixelBufferGetWidth(imageBuffer); size_t height = CVPixelBufferGetHeight(imageBuffer); //NSLog(@"%zu,%zu",width,height); // 创建一个依赖于设备的RGB颜色空间 CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); // 用抽样缓存的数据创建一个位图格式的图形上下文(graphics context)对象 CGContextRef context = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst); // 根据这个位图context中的像素数据创建一个Quartz image对象 CGImageRef quartzImage = CGBitmapContextCreateImage(context); // 解锁pixel buffer CVPixelBufferUnlockBaseAddress(imageBuffer,0); // 释放context和颜色空间 CGContextRelease(context); CGColorSpaceRelease(colorSpace); // cgimageget` // 用Quartz image创建一个UIImage对象image //UIImage *image = [UIImage imageWithCGImage:quartzImage]; UIImage *image = [UIImage imageWithCGImage:quartzImage scale:1.0f orientation:UIImageOrientationRight]; // 释放Quartz image对象 CGImageRelease(quartzImage); return (image); } - (CGRect) calcRect:(CGSize)imageSize{ NSString* gravity = ; CGRect cropRect = ; CGSize screenSize = ; CGFloat screenRatio = / ; CGFloat imageRatio = /; CGRect presentImageRect = ; CGFloat scale = 1.0; if([AVLayerVideoGravityResizeAspect isEqual: gravity]){ CGFloat presentImageWidth = ; CGFloat presentImageHeigth = ; if(screenRatio > imageRatio){ presentImageWidth = ; presentImageHeigth = presentImageWidth * imageRatio; }else{ presentImageHeigth = ; presentImageWidth = presentImageHeigth / imageRatio; } = CGSizeMake(presentImageWidth, presentImageHeigth); = CGPointMake((-presentImageWidth)/2.0, (-presentImageHeigth)/2.0); }else if([AVLayerVideoGravityResizeAspectFill isEqual:gravity]){ CGFloat presentImageWidth = ; CGFloat presentImageHeigth = ; if(screenRatio > imageRatio){ presentImageHeigth = ; presentImageWidth = presentImageHeigth / imageRatio; }else{ presentImageWidth = ; presentImageHeigth = presentImageWidth * imageRatio; } = CGSizeMake(presentImageWidth, presentImageHeigth); = CGPointMake((-presentImageWidth)/2.0, (-presentImageHeigth)/2.0); }else{ NSAssert(0, @"dont support:%@",gravity); } scale = CGRectGetWidth(presentImageRect) / ; CGRect rect = cropRect; = CGPointMake(CGRectGetMinX(cropRect)-CGRectGetMinX(presentImageRect), CGRectGetMinY(cropRect)-CGRectGetMinY(presentImageRect)); /= scale; /= scale; /= scale; /= scale; return rect; } #define SUBSET_SIZE 360 - (UIImage*) cropImageInRect:(UIImage*)image{ CGSize size = [image size]; CGRect cropRect = [self calcRect:size]; float scale = fminf(1.0f, fmaxf(SUBSET_SIZE / , SUBSET_SIZE / )); CGPoint offset = CGPointMake(-, -); size_t subsetWidth = * scale; size_t subsetHeight = * scale; CGColorSpaceRef grayColorSpace = CGColorSpaceCreateDeviceGray(); CGContextRef ctx = CGBitmapContextCreate(nil, subsetWidth, subsetHeight, 8, 0, grayColorSpace, kCGImageAlphaNone|kCGBitmapByteOrderDefault); CGColorSpaceRelease(grayColorSpace); CGContextSetInterpolationQuality(ctx, kCGInterpolationNone); CGContextSetAllowsAntialiasing(ctx, false); // adjust the coordinate system CGContextTranslateCTM(ctx, 0.0, subsetHeight); CGContextScaleCTM(ctx, 1.0, -1.0); UIGraphicsPushContext(ctx); CGRect rect = CGRectMake( * scale, * scale, scale * , scale * ); [image drawInRect:rect]; UIGraphicsPopContext(); CGContextFlush(ctx); CGImageRef subsetImageRef = CGBitmapContextCreateImage(ctx); UIImage* subsetImage = [UIImage imageWithCGImage:subsetImageRef]; CGImageRelease(subsetImageRef); CGContextRelease(ctx); return subsetImage; } - (void) start{ dispatch_sync (self.camera_sample_queue, ^{ [ startRunning]; }); } - (void) stop{ if(){ [ stopRunning]; } } @end