zoukankan      html  css  js  c++  java
  • 第43月第11天 kCVPixelFormatType_420YpCbCr8BiPlanarFullRange转rgb

    1.

    #define clamp(a) (a>255?255:(a<0?0:a))
    
    - (UIImage *)imageFromSampleBuffer:(CMSampleBufferRef)sampleBuffer {
        CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
        CVPixelBufferLockBaseAddress(imageBuffer,0);
    
        size_t width = CVPixelBufferGetWidth(imageBuffer);
        size_t height = CVPixelBufferGetHeight(imageBuffer);
        uint8_t *yBuffer = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0);
        size_t yPitch = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, 0);
        uint8_t *cbCrBuffer = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 1);
        size_t cbCrPitch = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, 1);
    
        int bytesPerPixel = 4;
        uint8_t *rgbBuffer = malloc(width * height * bytesPerPixel);
    
        for(int y = 0; y < height; y++) {
            uint8_t *rgbBufferLine = &rgbBuffer[y * width * bytesPerPixel];
            uint8_t *yBufferLine = &yBuffer[y * yPitch];
            uint8_t *cbCrBufferLine = &cbCrBuffer[(y >> 1) * cbCrPitch];
    
            for(int x = 0; x < width; x++) {
                int16_t y = yBufferLine[x];
                int16_t cb = cbCrBufferLine[x & ~1] - 128;
                int16_t cr = cbCrBufferLine[x | 1] - 128;
    
                uint8_t *rgbOutput = &rgbBufferLine[x*bytesPerPixel];
    
                int16_t r = (int16_t)roundf( y + cr *  1.4 );
                int16_t g = (int16_t)roundf( y + cb * -0.343 + cr * -0.711 );
                int16_t b = (int16_t)roundf( y + cb *  1.765);
    
                rgbOutput[0] = 0xff;
                rgbOutput[1] = clamp(b);
                rgbOutput[2] = clamp(g);
                rgbOutput[3] = clamp(r);
            }
        }
    
        CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
        CGContextRef context = CGBitmapContextCreate(rgbBuffer, width, height, 8, width * bytesPerPixel, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaNoneSkipLast);
        CGImageRef quartzImage = CGBitmapContextCreateImage(context);
        UIImage *image = [UIImage imageWithCGImage:quartzImage];
    
        CGContextRelease(context);
        CGColorSpaceRelease(colorSpace);
        CGImageRelease(quartzImage);
        free(rgbBuffer);
    
        CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
    
        return image;
    }

    https://stackoverflow.com/questions/8838481/kcvpixelformattype-420ypcbcr8biplanarfullrange-frame-to-uiimage-conversion

    2.rgb写文件

    ffplay -s 720x405 -pix_fmt rgb32  0.rgb

    - (void)dealWithSampleBuffer:(CMSampleBufferRef)buffer {
    
        CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(buffer);
        CIImage *ciimage = [CIImage imageWithCVPixelBuffer:pixelBuffer];
        size_t width = CVPixelBufferGetWidth(pixelBuffer);
        size_t height = CVPixelBufferGetHeight(pixelBuffer);
    
        NSLog(@"pixelBuffer %d height:%d",width,height);
        
        CGFloat widthScale = width/720.0;
        CGFloat heightScale = height/1280.0;
        CGFloat realWidthScale = 1;
        CGFloat realHeightScale = 1;
        
        if (widthScale > 1 || heightScale > 1) {
            if (widthScale < heightScale) {
                realHeightScale = 1280.0/height;
                CGFloat nowWidth = width * 1280 / height;
                height = 1280;
                realWidthScale = nowWidth/width;
                width = nowWidth;
            } else {
                realWidthScale = 720.0/width;
                CGFloat nowHeight = 720 * height / width;
                width = 720;
                realHeightScale = nowHeight/height;
                height = nowHeight;
            }
        }
        
        
        {
            _ciContext = [CIContext contextWithOptions:nil];
            
            CIImage *newImage = [ciimage imageByApplyingTransform:CGAffineTransformMakeScale(realWidthScale, realHeightScale)];
    //        UIImage *tmpImage = [self imageWithColor:[UIColor redColor] AndRect:CGRectMake(0, 0, width, height)];
    //        CIImage *newImage = [CIImage imageWithCGImage:tmpImage.CGImage];
            
            CVPixelBufferLockBaseAddress(pixelBuffer, 0);
            CVPixelBufferRef newPixcelBuffer = nil;
            CVPixelBufferCreate(kCFAllocatorDefault, width, height, kCVPixelFormatType_32BGRA, nil, &newPixcelBuffer);
            [_ciContext render:newImage toCVPixelBuffer:newPixcelBuffer];
            CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
    //        [self.videoEncoder encodeVideoData:newPixcelBuffer timeStamp:(CACurrentMediaTime()*1000)];
            
            size_t newWidth = CVPixelBufferGetWidth(newPixcelBuffer);
            size_t newHeight = CVPixelBufferGetHeight(newPixcelBuffer);
            NSLog(@"newPixcelBuffer %d height:%d",newWidth,newHeight);
            
            UIImage* sampleImage = [self imageFromSamplePlanerPixelBuffer:newPixcelBuffer];
            
            CVPixelBufferRelease(newPixcelBuffer);
        }
    }
    
    - (UIImage *)imageWithColor:(UIColor *)color AndRect:(CGRect)rect{
    
        UIGraphicsBeginImageContext(rect.size);
    
        CGContextRef context = UIGraphicsGetCurrentContext();
    
        
    
        CGContextSetFillColorWithColor(context, [color CGColor]);
    
        CGContextFillRect(context, rect);
    
        
    
        UIImage *image = UIGraphicsGetImageFromCurrentImageContext();
    
        UIGraphicsEndImageContext();
    
        
    
        return image;
    
    }
    
    - (UIImage *) imageFromSamplePlanerPixelBuffer:(CVPixelBufferRef)imageBuffer{
        @autoreleasepool {
    //        // Get a CMSampleBuffer's Core Video image buffer for the media data
    //        CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
            // Lock the base address of the pixel buffer
            CVPixelBufferLockBaseAddress(imageBuffer, 0);
            
            // Get the number of bytes per row for the plane pixel buffer
            void *baseAddress = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0);
            
            // Get the number of bytes per row for the plane pixel buffer
            size_t bytesPerRow = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer,0);
            // Get the pixel buffer width and height
            size_t width = CVPixelBufferGetWidth(imageBuffer);
            size_t height = CVPixelBufferGetHeight(imageBuffer);
            
    
            size_t size = CVPixelBufferGetDataSize(imageBuffer);
    
            OSType type = CVPixelBufferGetPixelFormatType(imageBuffer);
            
            NSLog(@"buffer type:%d size:%d",type,size);
            
            static int i=0;
            i++;
            if (i<4) {
                
    
    
                int len = (int)width * height *4;
                uint8_t *rgb_frame = (uint8_t *)malloc(len);
                
                for(int y = 0; y < height; y++) {
                     uint8_t *yBufferLine = &baseAddress[y * bytesPerRow];
                    for(int x = 0; x < bytesPerRow; x++) {
                    
                        rgb_frame[x+y * bytesPerRow] = yBufferLine[x];
                    }
                }
                
                NSString *path2 = [self getHome2Path];
                const char *resultCString2 = NULL;
                if ([path2 canBeConvertedToEncoding:NSUTF8StringEncoding]) {
                    resultCString2 = [path2 cStringUsingEncoding:NSUTF8StringEncoding];
                }
                
                unsigned char *buffer = (unsigned char *)rgb_frame;
    
                FILE* fpyuv = fopen(resultCString2, "wb");
                for (int i = 0; i < len; i ++) {
                    fwrite(buffer, 1, 1, fpyuv);
                    buffer ++;
                }
                fclose(fpyuv);
                
                free(rgb_frame);
            }
            
            
            // Create a device-dependent RGB color space
            CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
            
            // Create a bitmap graphics context with the sample buffer data
            CGContextRef context = CGBitmapContextCreate(baseAddress, width, height, 8,
                                                         bytesPerRow, colorSpace, kCGImageAlphaNoneSkipFirst | kCGBitmapByteOrder32Little);
            // Create a Quartz image from the pixel data in the bitmap graphics context
            CGImageRef quartzImage = CGBitmapContextCreateImage(context);
            // Unlock the pixel buffer
            CVPixelBufferUnlockBaseAddress(imageBuffer,0);
            
            // Free up the context and color space
            CGContextRelease(context);
            CGColorSpaceRelease(colorSpace);
            
            // Create an image object from the Quartz image
            UIImage *image = [UIImage imageWithCGImage:quartzImage];
            
            // Release the Quartz image
            CGImageRelease(quartzImage);
            return (image);
        }
    }
  • 相关阅读:
    Servlet的几种跳转(转)
    Java String.split()用法小结(转)
    表单数据提交的方法
    gedit文本编辑器乱码解决办法
    J-Link烧写bootloader到mini2440的Nor Flash
    虚拟机安装Fedora10系统遇到异常
    linux系统忘记root密码怎么办?
    编译busybox时出错及解决方案
    source insight代码查看器如何自定义添加文件类型
    < Objective-C >文件操作-NSFileHandle
  • 原文地址:https://www.cnblogs.com/javastart/p/12679791.html
Copyright © 2011-2022 走看看