我发现了如何从FDWaveformView Pods创建声波,但是我无法修改以在下面创建这种类型的声波屏幕截图:

这是我想做的事情:

屏幕截图来自Iphone:

这段代码创建了正弦波,但我需要创建脉冲波:

    soundWaveView.delegate = self
    soundWaveView.alpha = 0.0
    soundWaveView.audioURL = mainTrackURL
    soundWaveView.zoomSamples = 0 ..< soundWaveView.totalSamples / 3
    soundWaveView.doesAllowScrubbing = false
    soundWaveView.doesAllowStretch = false
    soundWaveView.doesAllowScroll = false
    soundWaveView.wavesColor = UIColor(red: 46/255, green: 188/255, blue: 191/255, alpha: 1)
    soundWaveView.progressColor = UIColor(red: 251/255, green: 237/255, blue: 101/255, alpha: 1)
    do {
        try AVAudioSession.sharedInstance().setActive(true)
        playerSoundWave = try AVAudioPlayer(contentsOf: mainTrackURL!, fileTypeHint: AVFileType.mp4.rawValue)
        songTotalTime = Double((playerSoundWave?.duration)!)
    }
    catch let error {
        print(error.localizedDescription)
    }

问题:如何修改或创建声波?

有人可以向我解释如何创建和修改声波,我已经尝试修改但没有结果。任何帮助将不胜感激。

提前致谢。

最佳答案

试试这个代码

import Foundation
import AVFoundation
import Accelerate


public enum WaveformStyle {
case soundcloud
}

public class WaveformZoomable : UIView {

public var zoomFactor: Float = 1.0 {
    didSet {
        if zoomFactor > 1.0 {
            zoomFactor = 1.0
        }
        else if zoomFactor < 0.01 {
            zoomFactor = 0.01
        }
    }
}

public var style: WaveformStyle = .soundcloud {
    didSet {
        self.reload(zoomFactor: zoomFactor)
    }
}

struct readFile {
    static var floatValuesLeft = [Float]()
    static var floatValuesRight = [Float]()
    static var leftPoints = [CGPoint]()
    static var rightPoints = [CGPoint]()
    static var populated = false
}

let pixelWidth: CGFloat = 2.0
let pixelSpacing: CGFloat = 2.0

public convenience init(withFile: URL, style: WaveformStyle = .soundcloud) {
    self.init()

    openFile(withFile)

    self.style = style
}

public func openFile(_ file: URL) {
    var audioFile = AVAudioFile()
    do {
        audioFile = try AVAudioFile(forReading: file)
    }catch{
        SVProgressHUD.showInfo(withStatus:"Error Sound File Crupted")
        return
    }

    // specify the format we WANT for the buffer
    let format = AVAudioFormat(commonFormat: .pcmFormatFloat32, sampleRate: audioFile.fileFormat.sampleRate, channels: audioFile.fileFormat.channelCount, interleaved: false)

    // initialize and fill the buffer
    let buffer = AVAudioPCMBuffer(pcmFormat: format!, frameCapacity: UInt32(audioFile.length))
    try! audioFile.read(into: buffer!)

    // copy buffer to readFile struct
    readFile.floatValuesLeft = Array(UnsafeBufferPointer(start: buffer?.floatChannelData?[0], count:Int(buffer!.frameLength)))

    readFile.populated = true

    reload(zoomFactor: zoomFactor)
}

public func reload(zoomFactor: Float = 1.0) {
    self.zoomFactor = zoomFactor
    setNeedsDisplay()
}

func makePoints() {
    if !readFile.populated { return }

    let viewWidth = bounds.width

    let sampleCount = Int(Float(readFile.floatValuesLeft.count) * zoomFactor)
    //        print(sampleCount)

    // grab every nth sample (samplesPerPixel)
    let samplesPerPixel = Int(floor(Float(sampleCount) / Float(viewWidth)))
    //        print(samplesPerPixel)

    // the expected sample count
    let reducedSampleCount = sampleCount / samplesPerPixel
    //        print(reducedSampleCount)

    // left channel
    var processingBuffer = [Float](repeating: 0.0,
                                   count: sampleCount)

    // get absolute values
    vDSP_vabs(readFile.floatValuesLeft, 1, &processingBuffer, 1, vDSP_Length(sampleCount))

    // This is supposed to do what I'm doing below - using a sliding window to find maximums, but it was producing strange results
    // vDSP_vswmax(processingBuffer, samplePrecision, &maxSamplesBuffer, 1, newSampleCount, vDSP_Length(samplePrecision))

    // Instead, we use a for loop with a stride of length samplePrecision to specify a range of samples
    // This range is passed to our own maximumIn() method

    var maxSamplesBuffer = [Float](repeating: 0.0,
                                   count: reducedSampleCount)

    var offset = 0

    for i in stride(from: 0, to: sampleCount-samplesPerPixel, by: samplesPerPixel) {
        maxSamplesBuffer[offset] = maximumIn(processingBuffer, from: i, to: i+samplesPerPixel)
        offset = offset + 1
    }

    // Convert the maxSamplesBuffer values to CGPoints for drawing
    // We also normalize them for display here
    readFile.leftPoints = maxSamplesBuffer.enumerated().map({ (index, value) -> CGPoint in
        let normalized = normalizeForDisplay(value)
        let point = CGPoint(x: CGFloat(index), y: CGFloat(normalized))
        return point
    })

    // Interpolate points for smoother drawing
    for (index, point) in readFile.leftPoints.enumerated() {
        if index > 0 {
            let interpolatedPoint = CGPoint.lerp(start: readFile.leftPoints[index - 1], end: point, t: 0.5)
            readFile.leftPoints[index] = interpolatedPoint
        }
    }
}

func drawDetailedWaveform(_ rect: CGRect) {
    let path = UIBezierPath()

    path.move(to: CGPoint(x: 0.0, y: rect.height/2))

    // left channel

    for point in readFile.leftPoints {
        let drawFrom = CGPoint(x: point.x, y: path.currentPoint.y)

        path.move(to: drawFrom)

        // bottom half
        let drawPointBottom = CGPoint(x: point.x, y: path.currentPoint.y + (point.y))
        path.addLine(to: drawPointBottom)

        path.close()

        // top half
        let drawPointTop = CGPoint(x: point.x, y: path.currentPoint.y - (point.y))
        path.addLine(to: drawPointTop)

        path.close()
    }

    UIColor.orange.set()
    path.stroke()
    path.fill()
}

func drawSoundcloudWaveform(_ rect: CGRect) {
    let path = UIBezierPath()

    path.move(to: CGPoint(x: 0.0, y: rect.height/2))

    // left channel

    var index = 0

    while index < readFile.leftPoints.count {
        let point = readFile.leftPoints[index]

        let drawFrom = CGPoint(x: point.x, y: path.currentPoint.y)

        // bottom half
        path.move(to: drawFrom)

        let drawPointBottom = CGPoint(x: point.x, y: path.currentPoint.y + (point.y))
        path.addLine(to: drawPointBottom)
        path.addLine(to: CGPoint(x: drawPointBottom.x + pixelWidth, y: drawPointBottom.y))
        path.addLine(to: CGPoint(x: drawFrom.x + pixelWidth, y: drawFrom.y))

        path.close()

        // top half
        path.move(to: drawFrom)

        let drawPointTop = CGPoint(x: point.x, y: path.currentPoint.y - (point.y))
        path.addLine(to: drawPointTop)
        path.addLine(to: CGPoint(x: drawPointTop.x + pixelWidth, y: drawPointTop.y))
        path.addLine(to: CGPoint(x: drawFrom.x + pixelWidth, y: drawFrom.y))

        path.close()

        // increment index
        index = index + Int(pixelWidth) + Int(pixelSpacing)
    }

    UIColor(red:0.21, green:0.77, blue:0.78, alpha:1.0).set()
    path.stroke()
    path.fill()
}

override public func draw(_ rect: CGRect) {
    makePoints()

    // this clears the rect
    backgroundColor = .black

    switch style {
    case .soundcloud:
        drawSoundcloudWaveform(rect)
         }
        }
     }

   public extension UIView {
   func normalizeForDisplay(_ value: Float) -> Float {
    let maxHeight = Float(bounds.height)
    let minHeight = Float(maxHeight / 2.0)
    let normalized = value * minHeight
    return normalized
     }
   }

用法示例:
waveform.backgroundColor = UIColor(hexString: "F4F4F4")
waveform.openFile(URL(string: SongUrl)!)

关于ios - 如何快速更改声波,我们在Stack Overflow上找到一个类似的问题:https://stackoverflow.com/questions/56680551/

10-11 14:24