• 设为首页
  • 点击收藏
  • 手机版
    手机扫一扫访问
    迪恩网络手机版
  • 关注官方公众号
    微信扫一扫关注
    迪恩网络公众号

ios - 保存高质量图像,进行实时处理——最好的方法是什么?

[复制链接]
菜鸟教程小白 发表于 2022-12-12 11:00:07 | 显示全部楼层 |阅读模式 打印 上一主题 下一主题

我仍在学习 AVFoundation,所以我不确定我应该如何最好地解决需要捕获高质量静止图像但提供低质量预览视频流的问题。

我有一个应用程序需要拍摄高质量图像 (AVCaptureSessionPresetPhoto),但使用 OpenCV 处理预览视频流 - 可以接受低得多的分辨率。只需使用底座 OpenCV Video Camera class不好,因为将 defaultAVCaptureSessionPreset 设置为 AVCaptureSessionPresetPhoto 会导致将全分辨率帧传递给 processImage - 这确实很慢。

如何才能与可用于捕获静止图像的设备建立高质量连接,以及可处理和显示的低质量连接?对我需要如何设置 session /连接的描述将非常有帮助。是否有此类应用的开源示例?



Best Answer-推荐答案


我做了类似的事情——我在委托(delegate)方法中抓取了像素,为它们制作了一个 CGImageRef,然后将其发送到正常的优先级队列,在那里进行了修改。由于 AVFoundation 必须将 CADisplayLink 用于回调方法,因此它具有最高优先级。在我的特殊情况下,我没有抓取所有像素,因此它可以在 iPhone 4 上以 30fps 的速度运行。根据您要运行的设备,您需要在像素数、fps 等方面进行权衡。

另一个想法是获取 2 个像素子集的幂 - 例如,每行的每 4 个像素和每 4 行的像素。我再次在我的应用程序中以 20-30fps 的速度做了类似的事情。然后,您可以在分派(dispatch)的 block 中进一步操作这个较小的图像。

如果这看起来令人生畏,请悬赏工作代码。

代码:

// Image is oriented with bottle neck to the left and the bottle bottom on the right
- (void)captureOutputAVCaptureVideoDataOutput *)captureOutput didOutputSampleBufferCMSampleBufferRef)sampleBuffer fromConnectionAVCaptureConnection *)connection
{
#if 1   
    AVCaptureDevice *camera = [(AVCaptureDeviceInput *)[captureSession.inputs lastObject] device];
    if(camera.adjustingWhiteBalance || camera.adjustingExposure) NSLog(@"GOTCHA: %d %d", camera.adjustingWhiteBalance, camera.adjustingExposure);
    printf("foo\n");
#endif

    if(saveState != saveOne && saveState != saveAll) return;


    @autoreleasepool {
        CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); 
        //NSLog(@"E: value=%lld timeScale=%d flags=%x", prStamp.value, prStamp.timescale, prStamp.flags);

        /*Lock the image buffer*/
        CVPixelBufferLockBaseAddress(imageBuffer,0); 

        NSRange captureRange;
        if(saveState == saveOne) {
#if 0 // B G R A MODE ! 
NSLog(@"IXEL_TYPE: 0x%lx", CVPixelBufferGetPixelFormatType(imageBuffer));
uint8_t *newPtr = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer);
NSLog(@"ONE VAL %x %x %x %x", newPtr[0], newPtr[1], newPtr[2], newPtr[3]);
}
exit(0);
#endif
            [edgeFinder setupImageBuffer:imageBuffer];

            BOOL success = [edgeFinder delineate:1];

            if(!success) {
                dispatch_async(dispatch_get_main_queue(), ^{ edgeFinder = nil; [delegate error]; });
                saveState = saveNone;
            } else 
                bottleRange = edgeFinder.sides;
                xRange.location = edgeFinder.shoulder;
                xRange.length = edgeFinder.bottom - xRange.location;

                NSLog(@"bottleRange 1: %@ neck=%d bottom=%d", NSStringFromRange(bottleRange), edgeFinder.shoulder, edgeFinder.bottom );
                //searchRows = [edgeFinder expandRange:bottleRange];

                rowsPerSwath = lrintf((bottleRange.length*NUM_DEGREES_TO_GRAB)*(float)M_PI/360.0f);
NSLog(@"rowsPerSwath = %d", rowsPerSwath);
                saveState = saveIdling;

                captureRange = NSMakeRange(0, [WLIPBase numRows]);
                dispatch_async(dispatch_get_main_queue(), ^
                    {
                        [delegate focusDone];
                        edgeFinder = nil;
                        captureOutput.alwaysDiscardsLateVideoFrames = YES;
                    });
        } else {        
            NSInteger rows = rowsPerSwath;
            NSInteger newOffset = bottleRange.length - rows;
            if(newOffset & 1) {
                --newOffset;
                ++rows;
            }
            captureRange = NSMakeRange(bottleRange.location + newOffset/2, rows);
        }
        //NSLog(@"captureRange=%u %u", captureRange.location, captureRange.length);

        /*Get information about the image*/
        uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer); 
        size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer); 
        size_t width = CVPixelBufferGetWidth(imageBuffer); 

        // Note Apple sample code cheats big time - the phone is big endian so this reverses the "apparent" order of bytes
        CGContextRef newContext = CGBitmapContextCreate(NULL, width, captureRange.length, 8, bytesPerRow, colorSpace, kCGImageAlphaNoneSkipFirst | kCGBitmapByteOrder32Little); // Video in ARGB format

assert(newContext);

        uint8_t *newPtr = (uint8_t *)CGBitmapContextGetData(newContext);
        size_t offset   = captureRange.location * bytesPerRow;

        memcpy(newPtr, baseAddress + offset, captureRange.length * bytesPerRow);

        CVPixelBufferUnlockBaseAddress(imageBuffer, 0);

        OSAtomicIncrement32(&totalImages);
        int32_t curDepth = OSAtomicIncrement32(&queueDepth);
        if(curDepth > maxDepth) maxDepth = curDepth;

#define kImageContext   @"kImageContext"
#define kState          @"kState"
#define kPresTime       @"kPresTime"

        CMTime prStamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);      // when it was taken?
        //CMTime deStamp = CMSampleBufferGetDecodeTimeStamp(sampleBuffer);          // now?

        NSDictionary *dict = [NSDictionary dictionaryWithObjectsAndKeys:
            [NSValue valueWithBytes:&saveState objCTypeencode(saveImages)], kState,
            [NSValue valueWithNonretainedObject__bridge id)newContext], kImageContext,
            [NSValue valueWithBytes:&prStamp objCTypeencode(CMTime)], kPresTime,
            nil ];
        dispatch_async(imageQueue, ^
            {
                // could be on any thread now
                OSAtomicDecrement32(&queueDepth);

                if(!isCancelled) {
                    saveImages state; [(NSValue *)[dict objectForKey:kState] getValue:&state];
                    CGContextRef context; [(NSValue *)[dict objectForKey:kImageContext] getValue:&context];
                    CMTime stamp; [(NSValue *)[dict objectForKey:kPresTime] getValue:&stamp];

                    CGImageRef newImageRef = CGBitmapContextCreateImage(context); 
                    CGContextRelease(context);
                    UIImageOrientation orient = state == saveOne ? UIImageOrientationLeft : UIImageOrientationUp;
                    UIImage *image = [UIImage imageWithCGImage:newImageRef scale:1.0 orientationrient]; // imageWithCGImage:  UIImageOrientationUp  UIImageOrientationLeft
                    CGImageRelease(newImageRef);
                    NSData *data = UIImagePNGRepresentation(image);

                    // NSLog(@"STATE:[%d]: value=%lld timeScale=%d flags=%x", state, stamp.value, stamp.timescale, stamp.flags);

                    {
                        NSString *name = [NSString stringWithFormat"%d.png", num];
                        NSString *path = [[wlAppDelegate snippetsDirectory] stringByAppendingPathComponent:name];
                        BOOL ret = [data writeToFile:path atomically:NO];
//NSLog(@"WROTE %d err=%d w/time %f path:%@", num, ret, (double)stamp.value/(double)stamp.timescale, path);
                        if(!ret) {
                            ++errors;
                        } else {
                            dispatch_async(dispatch_get_main_queue(), ^
                                {
                                    if(num) [delegate progressCGFloat)num/(CGFloat)(MORE_THAN_ONE_REV * SNAPS_PER_SEC) file:path];
                                } );
                        }
                        ++num;
                    }
                } else NSLog(@"CANCELLED");

            } );
    }
}

关于ios - 保存高质量图像,进行实时处理——最好的方法是什么?,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/16648394/

回复

使用道具 举报

懒得打字嘛,点击右侧快捷回复 【右侧内容,后台自定义】
您需要登录后才可以回帖 登录 | 立即注册

本版积分规则

关注0

粉丝2

帖子830918

发布主题
阅读排行 更多
广告位

扫描微信二维码

查看手机版网站

随时了解更新最新资讯

139-2527-9053

在线客服(服务时间 9:00~18:00)

在线QQ客服
地址:深圳市南山区西丽大学城创智工业园
电邮:jeky_zhao#qq.com
移动电话:139-2527-9053

Powered by 互联科技 X3.4© 2001-2213 极客世界.|Sitemap