获取AVCaptureSession samplebuffer 一像素的 rgb值
typedef unsigned char byte;
typedef struct RGBPixel{
byte red, green, blue;
} RGBPixel; - (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection
{ CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
CVPixelBufferLockBaseAddress(imageBuffer,); size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
uint8_t *src_buff = (uint8_t*)CVPixelBufferGetBaseAddress(imageBuffer); CVPixelBufferUnlockBaseAddress(imageBuffer, );
RGBPixel *pixelData = (RGBPixel *)src_buff; int len = bytesPerRow * height;
for(int i=; i<len; i+=){ RGBPixel pixel = pixelData[i/]; int a = ;
int r = pixel.red;
int g = pixel.green;
int b = pixel.blue; NSLog(@"first values = r:%d g:%d b:%d", r, g, b); a = src_buff[i+];
r = src_buff[i+];
g = src_buff[i+];
b = src_buff[i]; NSLog(@"second values = r:%d g:%d b:%d", r, g, b); } }
如果是BGRA
typedef struct RGBPixel{
byte blue, green, red;
} RGBPixel;
参考:http://iphonedevsdk.com/forum/iphone-sdk-development/