基于这个答案https://stackoverflow.com/a/16035330/1615183,我在Swift中创建了以下代码来压缩视频:

var videoWriter:AVAssetWriter!
var videoWriterInput:AVAssetWriterInput!
var processingQueue:dispatch_queue_t  = dispatch_queue_create("processingQueue1", nil)
var processingQueue2:dispatch_queue_t = dispatch_queue_create("processingQueue2", nil)
var audioWriterInput:AVAssetWriterInput!

func encode(){

    NSFileManager.defaultManager().removeItemAtURL(self.outputFile, error: nil)

    let videoCleanApertureSettings = [AVVideoCleanApertureHeightKey: 720,
        AVVideoCleanApertureWidthKey: 1280,
        AVVideoCleanApertureHorizontalOffsetKey: 2,
        AVVideoCleanApertureVerticalOffsetKey: 2
    ]
    let codecSettings  = [AVVideoAverageBitRateKey: 1024000,
        AVVideoCleanApertureKey: videoCleanApertureSettings
    ]

    let videoSettings = [AVVideoCodecKey: AVVideoCodecKey,
        AVVideoCompressionPropertiesKey: codecSettings,
        AVVideoHeightKey: 720, AVVideoWidthKey: 1280]


    //setup video writer
    var error:NSError?
    let asset = AVURLAsset(URL: self.inputFile, options: nil)

    let videoTrack:AVAssetTrack = asset.tracksWithMediaType(AVMediaTypeVideo)[0] as AVAssetTrack
    let videoSize:CGSize = videoTrack.naturalSize

    videoWriterInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: videoSettings)
    videoWriterInput.expectsMediaDataInRealTime = false
    videoWriterInput.transform = videoTrack.preferredTransform
    videoWriter = AVAssetWriter(URL: self.outputFile, fileType: AVFileTypeQuickTimeMovie, error: &error)

    if videoWriter.canAddInput(videoWriterInput) {
        videoWriter.addInput(videoWriterInput)
    }else{
        println("cant add video writer input")
        return
    }

    //setup video reader

    let videoReaderSettings = [ kCVPixelBufferPixelFormatTypeKey as String : kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange]

    let videoReaderOutput:AVAssetReaderTrackOutput = AVAssetReaderTrackOutput(track: videoTrack, outputSettings: videoReaderSettings) // should it be videoReaderSettings?

    let videoReader:AVAssetReader = AVAssetReader(asset: asset, error: &error)
    if videoReader.canAddOutput(videoReaderOutput) {
        videoReader.addOutput(videoReaderOutput)
    }

    //setup audio writer
    audioWriterInput = AVAssetWriterInput(mediaType: AVMediaTypeAudio, outputSettings: nil)

    audioWriterInput.expectsMediaDataInRealTime = false
    if videoWriter.canAddInput(audioWriterInput){
        videoWriter.addInput(audioWriterInput)
    }

    //setup audio reader
    let audioTrack:AVAssetTrack = asset.tracksWithMediaType(AVMediaTypeVideo)[0] as AVAssetTrack
    let audioReaderOutput:AVAssetReaderOutput = AVAssetReaderTrackOutput(track: audioTrack, outputSettings: nil)
    let audioReader:AVAssetReader = AVAssetReader(asset: asset, error: &error)

    if audioReader.canAddOutput(audioReaderOutput) {
        audioReader.addOutput(audioReaderOutput)
    }else {
        println("cant add audio reader")
        return
    }


    videoWriter.startWriting()
    videoReader.startReading()

    videoWriter.startSessionAtSourceTime(kCMTimeZero)




    videoWriterInput.requestMediaDataWhenReadyOnQueue(processingQueue) {
        while self.videoWriterInput.readyForMoreMediaData {
            println("First loop")
            var sampleBuffer = videoReaderOutput.copyNextSampleBuffer()
            if videoReader.status == .Reading && sampleBuffer != nil {
                println("Appending")
                self.videoWriterInput.appendSampleBuffer(sampleBuffer)
            }else {
                self.videoWriterInput.markAsFinished()
                if videoReader.status == .Completed {

                    audioReader.startReading()
                    self.videoWriter.startSessionAtSourceTime(kCMTimeZero)

                    self.audioWriterInput.requestMediaDataWhenReadyOnQueue(self.processingQueue2) {
                        while self.audioWriterInput.readyForMoreMediaData {
                            println("Second loop")
                            var sampleBuffer2:CMSampleBufferRef? = audioReaderOutput.copyNextSampleBuffer()
                            if audioReader.status == .Reading && sampleBuffer2 != nil {
                                self.audioWriterInput.appendSampleBuffer(sampleBuffer2)
                            }else {
                                self.audioWriterInput.markAsFinished()
                                println("Audio finish")
                                self.videoWriter.finishWritingWithCompletionHandler { println("Done") }
                            }
                        }

                    }


                }
                else {
                    println("Video Reader not completed")
                }
                println("Finished")
                break
            }// else vidoSampleBuffer
        }
    }

 }

但是,如果我删除音频部分,我只会得到一个空文件。如果我按原样运行它,那么第二个循环第一次运行就没有问题,但是在第二次迭代中,它崩溃并显示以下错误:
*** Terminating app due to uncaught exception 'NSInvalidArgumentException', reason: '*** -[AVAssetWriterInput appendSampleBuffer:] Media type of sample buffer must match receiver's media type ("soun")'

有人遇到过同样的问题吗?

最佳答案

AVMediaTypeVideo更改为Audio:

let audioTrack:AVAssetTrack = asset.tracksWithMediaType(AVMediaTypeVideo)[0] as AVAssetTrack

应该
let audioTrack:AVAssetTrack = asset.tracksWithMediaType(AVMediaTypeAudio)[0] as AVAssetTrack

10-08 12:14