我正在使用AVCaptureScreenInput对屏幕进行采样,并使用AVCaptureVideoDataOutput将其输出,但它不起作用。它确实输出的图像是空白的,但看起来似乎我在按照已阅读的所有文档正确地做所有事情。

我确保将AVCaptureVideoDataOutput设置为可以被CGImage (kCVPixelFormatType_32BGRA)读取的内容。当我运行相同的代码并将其输出到AVCaptureMovieFileOutput时,影片呈现良好,并且一切看起来都很好-但我真正想要的是一系列图像。

#import "ScreenRecorder.h"
#import <QuartzCore/QuartzCore.h>

@interface ScreenRecorder() <AVCaptureFileOutputRecordingDelegate, AVCaptureVideoDataOutputSampleBufferDelegate>  {
    BOOL _isRecording;
@private
    AVCaptureSession *_session;
    AVCaptureOutput *_movieFileOutput;
    AVCaptureStillImageOutput *_imageFileOutput;

    NSUInteger _frameIndex;

    NSTimer *_timer;

    NSString *_outputDirectory;
}
@end

@implementation ScreenRecorder

- (BOOL)recordDisplayImages:(CGDirectDisplayID)displayId toURL:(NSURL *)fileURL windowBounds:(CGRect)windowBounds duration:(NSTimeInterval)duration {
    if (_isRecording) {
        return NO;
    }

    _frameIndex = 0;

    // Create a capture session
    _session = [[AVCaptureSession alloc] init];

    // Set the session preset as you wish
    _session.sessionPreset = AVCaptureSessionPresetHigh;

    // Create a ScreenInput with the display and add it to the session
    AVCaptureScreenInput *input = [[[AVCaptureScreenInput alloc] initWithDisplayID:displayId] autorelease];
    if (!input) {
        [_session release];
        _session = nil;
        return NO;
    }
    if ([_session canAddInput:input]) {
        [_session addInput:input];
    }

    input.cropRect = windowBounds;

    // Create a MovieFileOutput and add it to the session
    _movieFileOutput = [[[AVCaptureVideoDataOutput alloc] init] autorelease];
    [((AVCaptureVideoDataOutput *)_movieFileOutput) setVideoSettings:[NSDictionary dictionaryWithObjectsAndKeys:@(kCVPixelFormatType_32BGRA),kCVPixelBufferPixelFormatTypeKey, nil]];
//    ((AVCaptureVideoDataOutput *)_movieFileOutput).alwaysDiscardsLateVideoFrames = YES;

    if ([_session canAddOutput:_movieFileOutput])
        [_session addOutput:_movieFileOutput];

    // Start running the session
    [_session startRunning];

    // Delete any existing movie file first
    if ([[NSFileManager defaultManager] fileExistsAtPath:[fileURL path]])
    {
        NSError *err;
        if (![[NSFileManager defaultManager] removeItemAtPath:[fileURL path] error:&err])
        {
            NSLog(@"Error deleting existing movie %@",[err localizedDescription]);
        }
    }

    _outputDirectory = [[fileURL path] retain];
    [[NSFileManager defaultManager] createDirectoryAtPath:_outputDirectory withIntermediateDirectories:YES attributes:nil error:nil];

    // Set the recording delegate to self
    dispatch_queue_t queue = dispatch_queue_create("com.schaefer.lolz", 0);
    [(AVCaptureVideoDataOutput *)_movieFileOutput setSampleBufferDelegate:self queue:queue];
    //dispatch_release(queue);

    if (0 != duration) {
        _timer = [[NSTimer scheduledTimerWithTimeInterval:duration target:self selector:@selector(finishRecord:) userInfo:nil repeats:NO] retain];
    }
    _isRecording = YES;

    return _isRecording;
}

- (void)dealloc
{
    if (nil != _session) {
        [_session stopRunning];
        [_session release];
    }

    [_outputDirectory release];
    _outputDirectory = nil;

    [super dealloc];
}

- (void)stopRecording {
    if (!_isRecording) {
        return;
    }
    _isRecording = NO;

    // Stop recording to the destination movie file
    if ([_movieFileOutput isKindOfClass:[AVCaptureFileOutput class]]) {
        [_movieFileOutput performSelector:@selector(stopRecording)];
    }
    [_session stopRunning];

    [_session release];
    _session = nil;

    [_timer release];
    _timer = nil;

}

-(void)finishRecord:(NSTimer *)timer
{
    [self stopRecording];
}

//AVCaptureVideoDataOutputSampleBufferDelegate
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
    CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
    CVPixelBufferLockBaseAddress(imageBuffer,0);        // Lock the image buffer

    uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0);   // Get information of the image
    size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
    size_t width = CVPixelBufferGetWidth(imageBuffer);
    size_t height = CVPixelBufferGetHeight(imageBuffer);
    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();

    CGContextRef newContext = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
    CGImageRef image = CGBitmapContextCreateImage(newContext);
    CGContextRelease(newContext);

    CGColorSpaceRelease(colorSpace);
    _frameIndex++;
    CVPixelBufferUnlockBaseAddress(imageBuffer,0);

    dispatch_async(dispatch_get_main_queue(), ^{
        NSURL *URL = [NSURL fileURLWithPath:[_outputDirectory stringByAppendingPathComponent:[NSString stringWithFormat:@"%d.jpg", (int)_frameIndex]]];

        CGImageDestinationRef destination = CGImageDestinationCreateWithURL((CFURLRef)URL, kUTTypeJPEG, 1, NULL);
        CGImageDestinationAddImage(destination, image, nil);

        if (!CGImageDestinationFinalize(destination)) {
            NSLog(@"Failed to write image to %@", URL);
        }

        CFRelease(destination);
        CFRelease(image);
    });
}
@end

最佳答案

您的数据不是平面的,因此没有平面0的基地址-没有平面0。(可以肯定的是,您可以使用CVPixelBufferIsPlanar进行检查。)您将需要CVPixelBufferGetBaseAddress以获得指向该平面的指针。第一个像素。所有数据将被交织。

关于cocoa - 从AVCaptureScreenInput捕获空白剧照?,我们在Stack Overflow上找到一个类似的问题:https://stackoverflow.com/questions/15916808/

10-09 16:26