我正在使用AVPlayerCAKeyFrameAnimations上播放AVSynchronizedLayer。为了使播放器继续播放,因为我在动画过程中不播放AVAsset,所以将forwardPlaybackEndTimeAVPlayerItem设置为所需动画的持续时间。不幸。在此seekToTime:中似乎无法使用forwardPlaybackEndTime,因为AVPlayer总是追溯到开始。可能是因为它试图探查AVplayerItem的持续时间。

我如何创建一个具有真实持续时间的虚拟AVPlayerItem来欺骗AVPlayer播放一些空的AVPlayerItem并让我seekToTime

最佳答案

不幸的是,seekToTime仅会查询AVPlayerItem's持续时间。因此,需要创建一个虚拟玩家物品以生成可搜索的持续时间。为了快速执行此操作,需要创建一个虚拟AVplayerItem。这是生成此类项目的实现示例。很长,但这是必需的。祝好运!

@interface FakeAsset ()

+ (CVPixelBufferRef)blackImagePixelBuffer;

@end

@implementation FakeAsset

+ (void)assetWithDuration:(CMTime)duration
        completitionBlock:(void (^)(AVAsset *))callBack
{
    NSError * error      = nil;
    NSString * assetPath = nil;
    NSUInteger i         = 0;
    do
    {
        assetPath =
        [NSTemporaryDirectory() stringByAppendingPathComponent:[NSString stringWithFormat:@"dummyAsset%i.m4v",i]];
        i++;
    }
    while ([[NSFileManager defaultManager] fileExistsAtPath:assetPath
                                                isDirectory:NO]);

    NSURL * fileURL = [NSURL fileURLWithPath:assetPath];

    NSParameterAssert(fileURL);

    AVAssetWriter * videoWriter =
    [[AVAssetWriter alloc] initWithURL:fileURL
                              fileType:AVFileTypeAppleM4V
                                 error:&error];
    NSParameterAssert(videoWriter);

    NSDictionary * compression  =
  @{
    AVVideoAverageBitRateKey      : @10,
    AVVideoProfileLevelKey        : AVVideoProfileLevelH264Main31,
    AVVideoMaxKeyFrameIntervalKey : @300
    };

    NSDictionary * outputSettings =
  @{
    AVVideoCodecKey                 : AVVideoCodecH264,
    AVVideoCompressionPropertiesKey : compression,
    AVVideoWidthKey                 : @120,
    AVVideoHeightKey                : @80
    };

    AVAssetWriterInput * videoWriterInput =
    [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo
                                       outputSettings:outputSettings];
    NSParameterAssert(videoWriterInput);

    NSDictionary * parameters =
    @{(NSString *)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_32ARGB),
      (NSString *)kCVPixelBufferWidthKey           : @120,
      (NSString *)kCVPixelBufferHeightKey          : @80
      };

    AVAssetWriterInputPixelBufferAdaptor * adaptor =
    [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput
                                                                     sourcePixelBufferAttributes:parameters];
    NSParameterAssert(adaptor);
    NSParameterAssert([videoWriter canAddInput:videoWriterInput]);

    videoWriterInput.expectsMediaDataInRealTime = NO;

    [videoWriter addInput:videoWriterInput];

    NSParameterAssert([videoWriter startWriting]);

    [videoWriter startSessionAtSourceTime:kCMTimeZero];

    dispatch_queue_t dispatchQueue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0);

    [videoWriterInput requestMediaDataWhenReadyOnQueue:dispatchQueue
                                            usingBlock:^
    {
        int frame = 0;
        while (videoWriterInput.isReadyForMoreMediaData)
        {
            if (frame < 2)
            {
                CMTime frameTime = frame ? duration : kCMTimeZero;
                CVPixelBufferRef buffer = [self blackImagePixelBuffer];

                [adaptor appendPixelBuffer:buffer
                      withPresentationTime:frameTime];

                CVBufferRelease(buffer);

                ++frame;
            }
            else
            {
                [videoWriterInput markAsFinished];
                [videoWriter endSessionAtSourceTime:duration];

                dispatch_async(dispatch_get_main_queue(), ^
                {
                    [videoWriter finishWritingWithCompletionHandler:^()
                     {
                         NSLog(@"did finish writing the video!");
                         AVURLAsset * asset =
                         [AVURLAsset assetWithURL:videoWriter.outputURL];
                         callBack(asset);
                     }];
                });
                break;
            }
        }
    }];
}

+ (CVPixelBufferRef)blackImagePixelBuffer
{
    NSDictionary * options =
    @{
      (id)kCVPixelBufferCGImageCompatibilityKey         : @YES,
      (id)kCVPixelBufferCGBitmapContextCompatibilityKey : @YES
      };

    CVPixelBufferRef pxbuffer = NULL;
    CVReturn status =
    CVPixelBufferCreate(kCFAllocatorDefault, 120, 80, kCVPixelFormatType_32ARGB, (__bridge CFDictionaryRef)options, &pxbuffer);

    NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);

    CVPixelBufferLockBaseAddress(pxbuffer, 0);

    void * pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
    NSParameterAssert(pxdata != NULL);

    CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
    //kCGImageAlphaPremultipliedFirst
    CGContextRef context = CGBitmapContextCreate(pxdata, 120, 80, 8, 4*120, rgbColorSpace, (CGBitmapInfo)kCGImageAlphaNoneSkipFirst);

    NSParameterAssert(context);
    CGContextSetFillColorWithColor(context, [UIColor blackColor].CGColor);
    CGContextFillRect(context,CGRectMake(0.f, 0.f, 120.f, 80.f));
    CGColorSpaceRelease(rgbColorSpace);
    CGContextRelease(context);

    CVPixelBufferUnlockBaseAddress(pxbuffer, 0);

    return pxbuffer;
}

关于ios - 如何创建一个具有真实持续时间的虚拟AVPlayerItem?,我们在Stack Overflow上找到一个类似的问题:https://stackoverflow.com/questions/21245182/

10-09 01:26