我是新手,尝试构建一个可以应用实时滤镜并与应用的滤镜一起保存的相机应用。

到目前为止,我可以使用所应用的滤镜进行实时预览,但是当我将视频保存为全黑时。

import UIKit
import AVFoundation
import AssetsLibrary
import CoreMedia
import Photos

class ViewController: UIViewController , AVCaptureVideoDataOutputSampleBufferDelegate {

    var captureSession: AVCaptureSession!

    @IBOutlet weak var previewView: UIView!
    @IBOutlet weak var recordButtton: UIButton!
    @IBOutlet weak var imageView: UIImageView!

    var assetWriter: AVAssetWriter?
    var assetWriterPixelBufferInput: AVAssetWriterInputPixelBufferAdaptor?
    var isWriting = false
    var currentSampleTime: CMTime?
    var currentVideoDimensions: CMVideoDimensions?

    override func viewDidLoad() {
        super.viewDidLoad()
        FilterVendor.register()
        setupCaptureSession()
    }

    override func didReceiveMemoryWarning() {
        super.didReceiveMemoryWarning()
    }

    func setupCaptureSession() {
        let captureSession = AVCaptureSession()
        captureSession.sessionPreset = AVCaptureSessionPresetPhoto

        guard let captureDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo), let input = try? AVCaptureDeviceInput(device: captureDevice) else {
            print("Can't access the camera")
            return
        }

        if captureSession.canAddInput(input) {
            captureSession.addInput(input)
        }

        let videoOutput = AVCaptureVideoDataOutput()

        videoOutput.setSampleBufferDelegate(self, queue: DispatchQueue.main)
        if captureSession.canAddOutput(videoOutput) {
            captureSession.addOutput(videoOutput)
        }

        let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
        if((previewLayer) != nil) {
            view.layer.addSublayer(previewLayer!)
        }

        captureSession.startRunning()
    }

    @IBAction func record(_ sender: Any) {
        if isWriting {
            print("stop record")
            self.isWriting = false
            assetWriterPixelBufferInput = nil
            assetWriter?.finishWriting(completionHandler: {[unowned self] () -> Void in
                self.saveMovieToCameraRoll()
            })
        } else {
            print("start record")
            createWriter()
            assetWriter?.startWriting()
            assetWriter?.startSession(atSourceTime: currentSampleTime!)
            isWriting = true
        }
    }

    func saveMovieToCameraRoll() {
        PHPhotoLibrary.shared().performChanges({
            PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: self.movieURL() as URL)
        }) { saved, error in
            if saved {
                print("saved")
            }
        }
    }

    func movieURL() -> NSURL {
        let tempDir = NSTemporaryDirectory()
        let url = NSURL(fileURLWithPath: tempDir).appendingPathComponent("tmpMov.mov")
        return url! as NSURL
    }

    func checkForAndDeleteFile() {
        let fm = FileManager.default
        let url = movieURL()
        let exist = fm.fileExists(atPath: url.path!)

        if exist {
            do {
                try fm.removeItem(at: url as URL)
            } catch let error as NSError {
                print(error.localizedDescription)
            }
        }
    }

    func createWriter() {
        self.checkForAndDeleteFile()

        do {
            assetWriter = try AVAssetWriter(outputURL: movieURL() as URL, fileType: AVFileTypeQuickTimeMovie)
        } catch let error as NSError {
            print(error.localizedDescription)
            return
        }

        let outputSettings = [
            AVVideoCodecKey : AVVideoCodecH264,
            AVVideoWidthKey : Int(currentVideoDimensions!.width),
            AVVideoHeightKey : Int(currentVideoDimensions!.height)
        ] as [String : Any]

        let assetWriterVideoInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: outputSettings as? [String : AnyObject])
        assetWriterVideoInput.expectsMediaDataInRealTime = true
        assetWriterVideoInput.transform = CGAffineTransform(rotationAngle: CGFloat(M_PI / 2.0))

        let sourcePixelBufferAttributesDictionary = [
            String(kCVPixelBufferPixelFormatTypeKey) : Int(kCVPixelFormatType_32BGRA),
            String(kCVPixelBufferWidthKey) : Int(currentVideoDimensions!.width),
            String(kCVPixelBufferHeightKey) : Int(currentVideoDimensions!.height),
            String(kCVPixelFormatOpenGLESCompatibility) : kCFBooleanTrue
        ] as [String : Any]

        assetWriterPixelBufferInput = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: assetWriterVideoInput,
                                                                           sourcePixelBufferAttributes: sourcePixelBufferAttributesDictionary)

        if assetWriter!.canAdd(assetWriterVideoInput) {
            assetWriter!.add(assetWriterVideoInput)
        } else {
            print("no way\(assetWriterVideoInput)")
        }
    }

    func captureOutput(_ captureOutput: AVCaptureOutput, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection) {
        autoreleasepool {

            connection.videoOrientation = AVCaptureVideoOrientation.landscapeLeft;

            guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return }
            let cameraImage = CIImage(cvPixelBuffer: pixelBuffer)

            let filter = CIFilter(name: "Fİlter")!
            filter.setValue(cameraImage, forKey: kCIInputImageKey)


            let formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer)!
            self.currentVideoDimensions = CMVideoFormatDescriptionGetDimensions(formatDescription)
            self.currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer)

            if self.isWriting {
                if self.assetWriterPixelBufferInput?.assetWriterInput.isReadyForMoreMediaData == true {
                    var newPixelBuffer: CVPixelBuffer? = nil

                    CVPixelBufferPoolCreatePixelBuffer(nil, self.assetWriterPixelBufferInput!.pixelBufferPool!, &newPixelBuffer)

                    let success = self.assetWriterPixelBufferInput?.append(newPixelBuffer!, withPresentationTime: self.currentSampleTime!)

                    if success == false {
                        print("Pixel Buffer failed")
                    }
                }
            }

            DispatchQueue.main.async {

                if let outputValue = filter.value(forKey: kCIOutputImageKey) as? CIImage {
                    let filteredImage = UIImage(ciImage: outputValue)
                    self.imageView.image = filteredImage
                }
            }
        }
    }
}

最佳答案

我在以下关键部分添加了一些评论:

func captureOutput(_ captureOutput: AVCaptureOutput, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection) {
    autoreleasepool {

        connection.videoOrientation = AVCaptureVideoOrientation.landscapeLeft;

        // COMMENT: This line makes sense - this is your pixelbuffer from the camera.
        guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return }

        // COMMENT: OK, so you turn pixelBuffer into a CIImage...
        let cameraImage = CIImage(cvPixelBuffer: pixelBuffer)

        // COMMENT: And now you've create a CIImage with a Filter instruction...
        let filter = CIFilter(name: "Fİlter")!
        filter.setValue(cameraImage, forKey: kCIInputImageKey)


        let formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer)!
        self.currentVideoDimensions = CMVideoFormatDescriptionGetDimensions(formatDescription)
        self.currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer)

        if self.isWriting {
            if self.assetWriterPixelBufferInput?.assetWriterInput.isReadyForMoreMediaData == true {
                // COMMENT: Here's where it gets weird. You've declared a new, empty pixelBuffer... but you already have one (pixelBuffer) that contains the image you want to write...
                var newPixelBuffer: CVPixelBuffer? = nil

                // COMMENT: And you grabbed memory from the pool.
                CVPixelBufferPoolCreatePixelBuffer(nil, self.assetWriterPixelBufferInput!.pixelBufferPool!, &newPixelBuffer)

                // COMMENT: And now you wrote an empty pixelBuffer back <-- this is what's causing the black frame.
                let success = self.assetWriterPixelBufferInput?.append(newPixelBuffer!, withPresentationTime: self.currentSampleTime!)

                if success == false {
                    print("Pixel Buffer failed")
                }
            }
        }

        // COMMENT: And now you're sending the filtered image back to the screen.
        DispatchQueue.main.async {

            if let outputValue = filter.value(forKey: kCIOutputImageKey) as? CIImage {
                let filteredImage = UIImage(ciImage: outputValue)
                self.imageView.image = filteredImage
            }
        }
    }
}

在我看来,您基本上是在获取屏幕图像,创建过滤后的副本,然后制作一个空的NEW像素缓冲区并将其写出。

如果编写的是抓取的pixelBuffer而不是要创建的新像素缓冲区,则应成功写入图像。

要成功写出经过过滤的视频,您需要从CIImage创建一个新的CVPixelBuffer-该解决方案已经存在于StackOverflow上,我知道,因为我自己需要这一步!

关于ios - 在Swift中使用实时滤镜录制视频,我们在Stack Overflow上找到一个类似的问题:https://stackoverflow.com/questions/45172250/

10-13 05:41