我正在尝试录制音频,然后使用AudioKit.renderToFile
脱机保存,然后使用AKPlayer
播放原始录制的音频文件。
import UIKit
import AudioKit
class ViewController: UIViewController {
private var recordUrl:URL!
private var isRecording:Bool = false
public var player:AKPlayer!
private let format = AVAudioFormat(commonFormat: .pcmFormatFloat64, sampleRate: 44100, channels: 2, interleaved: true)!
private var amplitudeTracker:AKAmplitudeTracker!
private var boostedMic:AKBooster!
private var mic:AKMicrophone!
private var micMixer:AKMixer!
private var silence:AKBooster!
public var recorder: AKNodeRecorder!
@IBOutlet weak var recordButton: UIButton!
override func viewDidLoad() {
super.viewDidLoad()
//self.recordUrl = Bundle.main.url(forResource: "sound", withExtension: "caf")
//self.startAudioPlayback(url: self.recordUrl!)
self.recordUrl = self.urlForDocument("record.caf")
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
func requestMic(completion: @escaping () -> Void) {
AVAudioSession.sharedInstance().requestRecordPermission({ (granted: Bool) in
if granted { completion()}
})
}
public func switchToMicrophone() {
stopEngine()
do {
try AKSettings.setSession(category: .playAndRecord, with: .allowBluetoothA2DP)
} catch {
AKLog("Could not set session category.")
}
mic = AKMicrophone()
micMixer = AKMixer(mic)
boostedMic = AKBooster(micMixer, gain: 5)
amplitudeTracker = AKAmplitudeTracker(boostedMic)
silence = AKBooster(amplitudeTracker, gain: 0)
AudioKit.output = silence
startEngine()
}
@IBAction func startStopRecording(_ sender: Any) {
self.isRecording = !self.isRecording
if self.isRecording {
self.startRecording()
self.recordButton.setTitle("Stop Recording", for: .normal)
} else {
self.stopRecording()
self.recordButton.setTitle("Start Recording", for: .normal)
}
}
func startRecording() {
self.requestMic() {
self.switchToMicrophone()
if let url = self.recordUrl {
do {
let audioFile = try AKAudioFile(forWriting: url, settings: self.format.settings, commonFormat: .pcmFormatFloat64, interleaved: true)
self.recorder = try AKNodeRecorder(node: self.micMixer, file: audioFile)
try self.recorder.reset()
try self.recorder.record()
} catch {
print("error setting up recording", error)
}
}
}
}
func stopRecording() {
recorder.stop()
startAudioPlayback(url: self.recordUrl)
}
@IBAction func saveToDisk(_ sender: Any) {
if let source = self.player, let saveUrl = self.urlForDocument("pitchAudio.caf") {
do {
source.stop()
let audioFile = try AKAudioFile(forWriting: saveUrl, settings: self.format.settings, commonFormat: .pcmFormatFloat64, interleaved: true)
try AudioKit.renderToFile(audioFile, duration: source.duration, prerender: {
source.play()
})
print("audio file rendered")
} catch {
print("error rendering", error)
}
// PROBLEM STARTS HERE //
self.startAudioPlayback(url: self.recordUrl)
}
}
public func startAudioPlayback(url:URL) {
print("loading playback audio", url)
self.stopEngine()
do {
try AKSettings.setSession(category: .playback)
player = AKPlayer.init()
try player.load(url: url)
}
catch {
print("error setting up audio playback", error)
return
}
player.prepare()
player.isLooping = true
self.setPitch(pitch: self.getPitch(), saveValue: false)
AudioKit.output = player
startEngine()
startPlayer()
}
public func startPlayer() {
if AudioKit.engine.isRunning { self.player.play() }
else { print("audio engine not running, can't play") }
}
public func startEngine() {
if !AudioKit.engine.isRunning {
print("starting engine")
do { try AudioKit.start() }
catch {
print("error starting audio", error)
}
}
}
public func stopEngine(){
if AudioKit.engine.isRunning {
print("stopping engine")
do {
try AudioKit.stop()
}
catch {
print("error stopping audio", error)
}
}
//playback doesn't work without this?
mic = nil
}
@IBAction func changePitch(_ sender: UISlider) {
self.setPitch(pitch:Double(sender.value))
}
public func getPitch() -> Double {
return UserDefaults.standard.double(forKey: "pitchFactor")
}
public func setPitch(pitch:Double, saveValue:Bool = true) {
player.pitch = pitch * 1000.0
if saveValue {
UserDefaults.standard.set(pitch, forKey: "pitchFactor")
UserDefaults.standard.synchronize()
}
}
func urlForDocument(_ named:String) -> URL? {
let path = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0] as String
let url = NSURL(fileURLWithPath: path)
if let pathComponent = url.appendingPathComponent(named) {
return pathComponent
}
return nil
}
}
调用顺序为switchToMicrophone
,startRecording
,stopRecording
,startAudioPlayback
,saveToDisk
,再次是startAudioPlayback
有关github repo中的完整代码,请参见 ViewController.swift renderToFile
函数之后,重新启动播放器的AudioKit时,会发生以下错误:如果我将录制片段或离线渲染片段都取出来,但都不能同时使用,那么所有这些都可以正常工作。
最佳答案
可能是问题与您的执行顺序有关,请尝试交换startAudioPlayback,saveToDisk,以便它首先执行saveToDisk,然后再读回文件并播放它,即startAudioPlayback。
编辑:到目前为止,我认为我已经确定了问题所在。一旦保存了文件,由于某种原因,正在录制的另一个临时文件便消失了。我认为这需要缩小范围。
或者,可能是为了解决问题,并将整个saveToDisk方法发送到后台线程,而不会中断当前正在播放的文件。
在业余时间,我将尝试对其进行一些微调,然后通知您。
编辑2:
检查这个https://stackoverflow.com/a/48133092/9497657
如果您不知所措,请尝试在此处发布您的问题:
https://github.com/audiokit/AudioKit/issues/
也请查看本教程:
https://www.raywenderlich.com/145770/audiokit-tutorial-getting-started
告诉Aurelius Prochazka消息也很有用,因为他是AudioKit的开发人员,可以为您提供帮助。
关于ios - AudioKit 4.3 : record audio,使其离线呈现,然后播放,我们在Stack Overflow上找到一个类似的问题:https://stackoverflow.com/questions/50788803/