如何在iOS中将kCVPixelFormatType_420YpCbCr8BiPlanarFullRange缓冲区转换为UIImage

我尝试在原始帖子中回答这个问题,但是不会让我这么做.希望有更多权限的人可以将其合并到原始问题中.

好的,这是一个更完整的答案.首先,设置捕获:

// Create capture session
self.captureSession = [[AVCaptureSession alloc] init];

[self.captureSession setSessionPreset:AVCaptureSessionPresetPhoto];

// Setup capture input
self.inputDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
AVCaptureDeviceInput *captureInput = [AVCaptureDeviceInput deviceInputWithDevice:self.inputDevice
                                                                           error:nil];
[self.captureSession addInput:captureInput];

// Setup video processing (capture output)
AVCaptureVideoDataOutput *captureOutput = [[AVCaptureVideoDataOutput alloc] init];
// Don't add frames to the queue if frames are already processing
captureOutput.alwaysdiscardsLateVideoFrames = YES;

// Create a serial queue to handle processing of frames
_videoQueue = dispatch_queue_create("cameraQueue",NULL);
[captureOutput setSampleBufferDelegate:self queue:_videoQueue];

// Set the video output to store frame in YUV
Nsstring* key = (Nsstring*)kCVPixelBufferPixelFormatTypeKey;

NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_420YpCbCr8BiPlanarFullRange];
NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:value forKey:key];
[captureOutput setVideoSettings:videoSettings];
[self.captureSession addOutput:captureOutput];

现在好了委托/回调的实现:

- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
   fromConnection:(AVCaptureConnection *)connection
{

// Create autorelease pool because we are not in the main_queue
@autoreleasepool {

    CVImageBufferRef imageBuffer = CMSampleBufferGetimageBuffer(sampleBuffer);

    //Lock the imagebuffer
    CVPixelBufferLockBaseAddress(imageBuffer,0);

    // Get information about the image
    uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer);

    //    size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
    size_t width = CVPixelBufferGetWidth(imageBuffer);
    size_t height = CVPixelBufferGetHeight(imageBuffer);
    size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);

    CVPlanarPixelBufferInfo_ycbcrBiPlanar *bufferInfo = (CVPlanarPixelBufferInfo_ycbcrBiPlanar *)baseAddress;

    // This just moved the pointer past the offset
    baseAddress = (uint8_t *)CVPixelBufferGetBaseAddressOfPlane(imageBuffer,0);


    // convert the image
    _prefImageView.image = [self makeUIImage:baseAddress bufferInfo:bufferInfo width:width height:height bytesPerRow:bytesPerRow];

    // Update the display with the captured image for DEBUG purposes
    dispatch_async(dispatch_get_main_queue(),^{
        [_myMainView.yUVImage setimage:_prefImageView.image];
    });        
}

最后这里是从YUV转换为UIImage的方法

- (UIImage *)makeUIImage:(uint8_t *)inBaseAddress bufferInfo:(CVPlanarPixelBufferInfo_ycbcrBiPlanar *)inBufferInfo width:(size_t)inWidth height:(size_t)inHeight bytesPerRow:(size_t)inBytesPerRow {

NSUInteger yPitch = EndianU32_BtoN(inBufferInfo->componentInfoY.rowBytes);

uint8_t *rgbBuffer = (uint8_t *)malloc(inWidth * inHeight * 4);
uint8_t *yBuffer = (uint8_t *)inBaseAddress;
uint8_t val;
int bytesPerPixel = 4;

// for each byte in the input buffer,fill in the output buffer with four bytes
// the first byte is the Alpha channel,then the next three contain the same
// value of the input buffer
for(int y = 0; y < inHeight*inWidth; y++)
{
    val = yBuffer[y];
    // Alpha channel
    rgbBuffer[(y*bytesPerPixel)] = 0xff;

    // next three bytes same as input
    rgbBuffer[(y*bytesPerPixel)+1] = rgbBuffer[(y*bytesPerPixel)+2] =  rgbBuffer[y*bytesPerPixel+3] = val;
}

// Create a device-dependent RGB color space
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();

CGContextRef context = CGBitmapContextCreate(rgbBuffer,yPitch,inHeight,8,yPitch*bytesPerPixel,colorSpace,kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedLast);

CGImageRef quartzImage = CGBitmapContextCreateImage(context);

CGContextRelease(context);
CGColorSpaceRelease(colorSpace);

UIImage *image = [UIImage imageWithCGImage:quartzImage];

CGImageRelease(quartzImage);
free(rgbBuffer);
return  image;
}

你还需要#import“Endian.h”

请注意,对CGBitmapContextCreate的调用比我预期的要复杂得多.我对视频处理并不是很精明,但是这个电话让我感到困惑了一段时间.然后,当它最终起作用时,它就像魔术一样.

解决方法

背景信息:@Michaelg的版本仅访问y缓冲区,因此您只获得亮度而不是颜色.如果缓冲区中的音高和像素数不匹配(无论出于何种原因填充行尾的填充字节),它也会有缓冲区溢出错误.这里发生的背景是这是一种平面图像格式,它为每个像素分配一个字节用于亮度,每4个像素分配2个字节用于颜色信息.这些存储为“平面”而不是连续存储在存储器中,其中Y或亮度平面具有其自己的存储块,并且CbCr或彩色平面也具有其自己的存储块. CbCr平面由Y平面的1/4数量的样本(半高和宽度)组成,并且CbCr平面中的每个像素对应于Y平面中的2×2块.希望这背景有所帮助.

编辑:他的版本和旧版本都有可能超出缓冲区,如果图像缓冲区中的行在每行末尾都有填充字节,则无法工作.此外,我的cbcr平面缓冲区没有使用正确的偏移量创建.要正确执行此操作,您应始终使用核心视频函数,如CVPixelBufferGetWidthOfPlane和CVPixelBufferGetBaseAddressOfPlane.这将确保您正确地解释缓冲区,无论缓冲区是否有标题以及是否搞砸指针数学,它都将起作用.您应该使用Apple函数的行大小和函数中的缓冲区基址.这些记录在:https://developer.apple.com/library/prerelease/ios/documentation/QuartzCore/Reference/CVPixelBufferRef/index.html请注意,虽然这个版本使用Apple的功能和一些标题使用,但最好只使用Apple的功能.我将来可能会更新它,根本不使用标题.

这会将kcvpixelformattype_420ypcbcr8biplanarfullrange缓冲区缓冲区转换为您可以使用的UIImage.

首先,0); // Get information about the image uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer); // size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer); size_t width = CVPixelBufferGetWidth(imageBuffer); size_t height = CVPixelBufferGetHeight(imageBuffer); size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer); CVPlanarPixelBufferInfo_ycbcrBiPlanar *bufferInfo = (CVPlanarPixelBufferInfo_ycbcrBiPlanar *)baseAddress; //get the cbrbuffer base address uint8_t* cbrBuff = (uint8_t *)CVPixelBufferGetBaseAddressOfPlane(imageBuffer,1); // This just moved the pointer past the offset baseAddress = (uint8_t *)CVPixelBufferGetBaseAddressOfPlane(imageBuffer,0); // convert the image _prefImageView.image = [self makeUIImage:baseAddress cBCrBuffer:cbrBuff bufferInfo:bufferInfo width:width height:height bytesPerRow:bytesPerRow]; // Update the display with the captured image for DEBUG purposes dispatch_async(dispatch_get_main_queue(),^{ [_myMainView.yUVImage setimage:_prefImageView.image]; }); }

最后这里是从YUV转换为UIImage的方法

- (UIImage *)makeUIImage:(uint8_t *)inBaseAddress cBCrBuffer:(uint8_t*)cbCrBuffer bufferInfo:(CVPlanarPixelBufferInfo_ycbcrBiPlanar *)inBufferInfo width:(size_t)inWidth height:(size_t)inHeight bytesPerRow:(size_t)inBytesPerRow {

     NSUInteger yPitch = EndianU32_BtoN(inBufferInfo->componentInfoY.rowBytes);
 NSUInteger cbCrOffset = EndianU32_BtoN(inBufferInfo->componentInfoCbCr.offset);
 uint8_t *rgbBuffer = (uint8_t *)malloc(inWidth * inHeight * 4);
 NSUInteger cbCrPitch = EndianU32_BtoN(inBufferInfo->componentInfoCbCr.rowBytes);
 uint8_t *yBuffer = (uint8_t *)inBaseAddress;
 //uint8_t *cbCrBuffer = inBaseAddress + cbCrOffset;
 uint8_t val;
 int bytesPerPixel = 4;

 for(int y = 0; y < inHeight; y++)
 {
 uint8_t *rgbBufferLine = &rgbBuffer[y * inWidth * bytesPerPixel];
 uint8_t *yBufferLine = &yBuffer[y * yPitch];
 uint8_t *cbCrBufferLine = &cbCrBuffer[(y >> 1) * cbCrPitch];

 for(int x = 0; x < inWidth; x++)
 {
 int16_t y = yBufferLine[x];
 int16_t cb = cbCrBufferLine[x & ~1] - 128; 
 int16_t cr = cbCrBufferLine[x | 1] - 128;

 uint8_t *rgbOutput = &rgbBufferLine[x*bytesPerPixel];

     int16_t r = (int16_t)roundf( y + cr *  1.4 );
     int16_t g = (int16_t)roundf( y + cb * -0.343 + cr * -0.711 );
     int16_t b = (int16_t)roundf( y + cb *  1.765);

 //ABGR
 rgbOutput[0] = 0xff;
     rgbOutput[1] = clamp(b);
     rgbOutput[2] = clamp(g);
     rgbOutput[3] = clamp(r);
 }
 }

 // Create a device-dependent RGB color space
 CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
 NSLog(@"ypitch:%lu inHeight:%zu bytesPerPixel:%d",(unsigned long)yPitch,bytesPerPixel);
 NSLog(@"cbcrPitch:%lu",cbCrPitch);
 CGContextRef context = CGBitmapContextCreate(rgbBuffer,inWidth,inWidth*bytesPerPixel,kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedLast);

 CGImageRef quartzImage = CGBitmapContextCreateImage(context);

 CGContextRelease(context);
 CGColorSpaceRelease(colorSpace);

 UIImage *image = [UIImage imageWithCGImage:quartzImage];

 CGImageRelease(quartzImage);
 free(rgbBuffer);
 return  image;
 }

您还需要#import“Endian.h”和定义#define clamp(a)(a> 255?255:(a< 0?0:a)); 请注意,它就像魔术一样.

相关文章

当我们远离最新的 iOS 16 更新版本时,我们听到了困扰 Apple...
欧版/美版 特别说一下,美版选错了 可能会永久丧失4G,不过只...
一般在接外包的时候, 通常第三方需要安装你的app进行测...
前言为了让更多的人永远记住12月13日,各大厂都在这一天将应...