我知道这个问题过去曾被问过几次,我已经阅读了对这些问题的答复。但似乎没有什么能像我想要的那样工作。有多个视频,所有视频都添加到 AVQueuePlayer 队列中。

我尝试以其他页面中提到的两种方式添加:

AVPlayerItem *item1 = [AVPlayerItem playerItemWithURL:url1];
AVPlayerItem *item2 = [AVPlayerItem playerItemWithURL:url2];

NSArray *playerItems = [[NSArray alloc] initWithObjects:item1, item2, nil];
avPlayer = [[AVQueuePlayer alloc] initWithItems:playerItems];

这样:
    avPlayer = [[AVQueuePlayer alloc] init];

    AVURLAsset *asset1 = [[AVURLAsset alloc] initWithURL:url1 options:nil];
    NSArray *keys = [NSArray arrayWithObject:@"playable"];

    [asset loadValuesAsynchronouslyForKeys:keys completionHandler:^()
        {
            dispatch_async(dispatch_get_main_queue(), ^
                           {
                               AVPlayerItem *playerItem = [[AVPlayerItem alloc] initWithAsset:asset1];
                               [avPlayer insertItem:playerItem afterItem:nil];
                           });

        }];
AVURLAsset *asset2 = [[AVURLAsset alloc] initWithURL:url2 options:nil];
[asset loadValuesAsynchronouslyForKeys:keys completionHandler:^()
         {
             dispatch_async(dispatch_get_main_queue(), ^
                            {
                                AVPlayerItem *playerItem = [[AVPlayerItem alloc] initWithAsset:asset2];
                                [avPlayer insertItem:playerItem afterItem:nil];
                            });

         }];

但是在前进到下一个项目时,这些都无法消除黑屏。
在下一个项目开始播放之前有大约 1 秒的间隔。我怎样才能消除这个差距?

更新 :我也尝试过 AVMutableComposition 。差距明显缩小,但仍然很明显。有没有办法完全消除这些差距?
AVMutableComposition 代码:
AVMutableComposition* mixComposition = [[AVMutableComposition alloc] init];

NSMutableArray *arrayInstruction = [[NSMutableArray alloc] init];

AVMutableVideoCompositionInstruction * MainInstruction =
[AVMutableVideoCompositionInstruction videoCompositionInstruction];
AVMutableCompositionTrack *audioTrack;

audioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio
                                         preferredTrackID:kCMPersistentTrackID_Invalid];

CMTime duration = kCMTimeZero;

for(int i = 0; i <= 5; i++)
{
    AVAsset *currentAsset;
    currentAsset = [self currentAsset:i]; // i take the for loop for getting the asset
        AVMutableCompositionTrack *currentTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
        [currentTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, currentAsset.duration) ofTrack:[[currentAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:duration error:nil];

        [audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, currentAsset.duration) ofTrack:[[currentAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:duration error:nil];

        AVMutableVideoCompositionLayerInstruction *currentAssetLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:currentTrack];
        AVAssetTrack *currentAssetTrack = [[currentAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
        ALAssetOrientation currentAssetOrientation  = ALAssetOrientationUp;
        BOOL  isCurrentAssetPortrait  = YES;
        CGAffineTransform currentTransform = currentAssetTrack.preferredTransform;

        if(currentTransform.a == 0 && currentTransform.b == 1.0 && currentTransform.c == -1.0 && currentTransform.d == 0)  {currentAssetOrientation= ALAssetOrientationRight; isCurrentAssetPortrait = YES;}
        if(currentTransform.a == 0 && currentTransform.b == -1.0 && currentTransform.c == 1.0 && currentTransform.d == 0)  {currentAssetOrientation =  ALAssetOrientationLeft; isCurrentAssetPortrait = YES;}
        if(currentTransform.a == 1.0 && currentTransform.b == 0 && currentTransform.c == 0 && currentTransform.d == 1.0)   {currentAssetOrientation =  ALAssetOrientationUp;}
        if(currentTransform.a == -1.0 && currentTransform.b == 0 && currentTransform.c == 0 && currentTransform.d == -1.0) {currentAssetOrientation = ALAssetOrientationDown;}

        CGFloat FirstAssetScaleToFitRatio = 640.0/640.0;
        if(isCurrentAssetPortrait){
            FirstAssetScaleToFitRatio = 640.0/640.0;
            CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
            [currentAssetLayerInstruction setTransform:CGAffineTransformConcat(currentAssetTrack.preferredTransform, FirstAssetScaleFactor) atTime:duration];
        }else{
            CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
            [currentAssetLayerInstruction setTransform:CGAffineTransformConcat(CGAffineTransformConcat(currentAssetTrack.preferredTransform, FirstAssetScaleFactor),CGAffineTransformMakeTranslation(0, 0)) atTime:duration];
        }
        duration=CMTimeAdd(duration, currentAsset.duration);
        [arrayInstruction addObject:currentAssetLayerInstruction];
}

MainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, duration);
MainInstruction.layerInstructions = arrayInstruction;
AVMutableVideoComposition *MainCompositionInst = [AVMutableVideoComposition videoComposition];

MainCompositionInst.instructions = [NSArray arrayWithObject:MainInstruction];
MainCompositionInst.frameDuration = CMTimeMake(1, 30);
MainCompositionInst.renderSize = CGSizeMake(640.0, 640.0);

NSString* filename = [NSString stringWithFormat:@"mergedVideo.mp4"];
pathForFile = [NSTemporaryDirectory() stringByAppendingPathComponent:filename];
NSFileManager *fileManager = [NSFileManager defaultManager];
BOOL delete = [fileManager removeItemAtPath:pathForFile error:NULL];
NSLog(@"Deletion Succesful???? :: %d",delete);

NSURL *url = [NSURL fileURLWithPath:pathForFile];
NSLog(@"\n\nurl ::::::::::: %@\n\n",url);
NSError *err;
if ([url checkResourceIsReachableAndReturnError:&err] == NO)
    NSLog(@"\n\nFINEEEEEEEEEEEEE\n\n");
else
    NSLog(@"\n\nERRRRRORRRRRRRRRRRRRR\n\n");

AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
exporter.outputURL=url;
exporter.outputFileType = AVFileTypeQuickTimeMovie;
exporter.videoComposition = MainCompositionInst;
exporter.shouldOptimizeForNetworkUse = YES;
[exporter exportAsynchronouslyWithCompletionHandler:^
 {
     switch (exporter.status)
     {
         case AVAssetExportSessionStatusCompleted:
         {
             NSURL *outputURL = exporter.outputURL;

             ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
             if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:outputURL]) {

                 ALAssetsLibrary* library = [[ALAssetsLibrary alloc]init];
                 [library writeVideoAtPathToSavedPhotosAlbum:outputURL completionBlock:^(NSURL *assetURL, NSError *error)
                  {
                      NSLog(@"ASSET URL %@",assetURL);
                      if (error)
                      {
                          NSLog(@"EROR %@ ", error);
                      }else{
                          NSLog(@"VIDEO SAVED ");
                      }

                  }];
                 NSLog(@"Video Merge SuccessFullt");
                 currentFile ++;
             }
         }
             break;
         case AVAssetExportSessionStatusFailed:
             NSLog(@"Failed:%@", exporter.error.description);
             break;
         case AVAssetExportSessionStatusCancelled:
             NSLog(@"Canceled:%@", exporter.error);
             break;
         case AVAssetExportSessionStatusExporting:
             NSLog(@"Exporting!");
             break;
         case AVAssetExportSessionStatusWaiting:
             NSLog(@"Waiting");
             break;
         default:
             break;
     }
 }];

最佳答案

对于 Ultravisual,我们使用了 AVMutableComposition ,只要我们先建立构图,然后建立播放器来播放它,我们就能在除了循环之外的任何地方获得完美无间隙的播放。

您能否遍历 AVMutableComposition 中的所有轨道并验证没有间隙?不要忘记音轨。有时音频和视频有不同的时间戳 - 您可能需要在 AVMutableComposition 中添加另一个轨道来解决这个问题。

关于ios - 如何在 AVQueuePlayer 或 AVMutableComposition 中播放多个视频而不会出现任何间隙或卡住?,我们在Stack Overflow上找到一个类似的问题:https://stackoverflow.com/questions/29122080/

10-09 02:15