pragma mark buffer回転イメージ
(UIImage *)convertSampleBufferToUIImageSampleBuffer:(CMSampleBufferRef)sampleBuffer{//Get a CMSampleBuffer's Core Video image buffer for the media data CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);//Lock the base address of the pixel buffer CVPixelBufferLockBaseAddress(imageBuffer, 0);//Get the number of bytes per row for the plane pixel buffer void *baseAddress = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0);//Get the number of bytes per row for the plane pixel buffer size_t bytesPerRow = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer,0);//Get the pixel buffer width and height size_t width = CVPixelBufferGetWidth(imageBuffer); size_t height = CVPixelBufferGetHeight(imageBuffer);//Create a device-dependent gray color space CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceGray();//Create a bitmap graphics context with the sample buffer data CGContextRef context = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGImageAlphaNone);//Create a Quartz image from the pixel data in the bitmap graphics context CGImageRef quartzImage = CGBitmapContextCreateImage(context);//Unlock the pixel buffer CVPixelBufferUnlockBaseAddress(imageBuffer,0);//Free up the context and color space CGContextRelease(context); CGColorSpaceRelease(colorSpace);//Create an image object from the Quartz image//UIImage *image = [UIImage imageWithCGImage:quartzImage]; UIImage *image = [UIImage imageWithCGImage:quartzImage scale:1.0 orientation:UIImageOrientationRight];//Release the Quartz image CGImageRelease(quartzImage); return (image); } (UIImage*)imageFromSampleBuffer:(CMSampleBufferRef)sampleBuffer{////メディアデータにCMSampleBufferのCore VideoイメージキャッシュオブジェクトCVImageBufferRefimageBuffer=CMSampleBufferGetImageBuffer(sampleBuffer);//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////*baseAddress=CVPixelBufferGetBaseAddress(imageBuffer);//pixel bufferを得る行バイト数size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);//pixel bufferの幅と高さsize_を得るt width = CVPixelBufferGetWidth(imageBuffer); size_t height = CVPixelBufferGetHeight(imageBuffer);//デバイスに依存するRGB色空間CGcolorSpaceRef corSpace=CGcolorSpaceCreateDeviceRGB();サンプリングキャッシュされたデータからビットマップ形式のグラフィックコンテキストオブジェクトCGcontextRefcontext=CGBitmapContextCreate(baseAddress,width,height,8,bytesPerRow,corSpace,kCGBitmapByteOrder 32 Little|kCGImageAlphaPremultipliedFirst);このビットマップcontextのピクセルデータからQuartzイメージオブジェクトCGImageRef quartzImage=CGBitmapContextCreateImage(context);pixel buffer CVPixelBufferUnlockBaseAddress(imageBuffer,0);//contextと色空間CGcontextRelease(context)を解放します.CGColorSpaceRelease(colorSpace);//QuartzイメージでUIIMageオブジェクトimage//UIIMage*image=[UIIMage imageWithCGImage:quartzImage]; UIImage *image = nil;
if (_input.device.position == AVCaptureDevicePositionFront) {
image = [UIImage imageWithCGImage:quartzImage scale:1.0 orientation:UIImageOrientationLeftMirrored];
}else{
image = [UIImage imageWithCGImage:quartzImage scale:1.0 orientation:UIImageOrientationRight];
}
//UIImageOrientationRight
// Quartz image
CGImageRelease(quartzImage);
return (image);
}