swift中的音频播放器没有获得音量和音高的价值
Posted
技术标签:
【中文标题】swift中的音频播放器没有获得音量和音高的价值【英文标题】:Audio Player in swift is not getting value of volume and pitch 【发布时间】:2020-10-14 12:17:47 【问题描述】:我正在尝试在 SwiftUI 中制作音频播放器,音频播放器应该具有这些功能。
-
播放/停止音频
循环播放
通过滑块更改音量
通过滑块更改音频音高。
目前我面临两个问题
-
音频播放器未使用音量和音高滑块值
当我停止并播放和更改音量/音高滑块时,应用程序崩溃并显示以下消息。
2020-10-14 17:34:08.957709+0530 SwiftUIAudioPlayer[1369:24886] [avae] AVAEInternal.h:109 [AVAudioFile.mm:484:-[AVAudioFile readIntoBuffer:frameCount:error:]: (ExtAudioFileRead(_imp->_extAudioFile, &ioFrames, buffer.mutableAudioBufferList)): 错误 -50
这是项目的链接。 https://github.com/varun-naharia/SwiftUIAudioPlayer
ContentView.swift
import Foundation
import SwiftUI
struct ContentView: View
@State var volume:Double = 0.00
@State var pitch:Double = 0.0
@State var musicFiles:[SoundModel] = [SoundModel(file: "metro35", name: "Metronome", fileExtension: "wav"), SoundModel(file: "johnson_tone_down_5min", name: "Johnson", fileExtension: "wav"), SoundModel(file: "sine_140_6s_fade_ogg", name: "Sine wave", fileExtension: "wav")]
@State var selectedMusicFile:SoundModel = SoundModel(file: "sine_140_6s_fade_ogg", name: "Sine wave", fileExtension: "wav")
@State var showSoundPicker = false
@State var selectedGraph = "skin_conductance"
@State var iconSize:CGFloat = 0.124
@State var iconSpace:CGFloat = 0.015
@State var heart = false
init()
Player.setPitch(pitch: Float(self.pitch))
Player.setVolume(volume: Float(self.volume))
var body: some View
GeometryReader geometry in
ZStack
VStack(alignment: .leading)
Button(action:
self.heart = !self.heart
self.selectedGraph = "heart"
if(self.heart)
Player.playMusic(musicfile: self.selectedMusicFile.file, fileExtension: self.selectedMusicFile.fileExtension)
else
Player.stopMusic()
self.selectedGraph = ""
)
Image(self.selectedGraph == "heart" ? "heart" : "heart_disabled")
.resizable()
.frame(width: geometry.size.height*self.iconSize, height: geometry.size.height*self.iconSize)
.frame(width: geometry.size.height*self.iconSize, height: geometry.size.height*self.iconSize)
.padding(.bottom, geometry.size.height*(self.iconSpace/2))
Button(action:
self.showSoundPicker = !self.showSoundPicker
)
Image("tone")
.resizable()
.frame(width: geometry.size.height*self.iconSize, height: geometry.size.height*self.iconSize)
.frame(width: geometry.size.height*self.iconSize, height: geometry.size.height*self.iconSize)
.padding(.bottom, geometry.size.height*(self.iconSpace/2))
HStack
SwiftUISlider(
thumbColor: .green,
thumbImage: "musicNote 2",
value: self.$volume
).padding(.horizontal)
Button(action:
)
Image("centerGraph")
.resizable()
.frame(width: geometry.size.width*0.05, height: geometry.size.width*0.05)
.frame(width: geometry.size.width*0.03, height: geometry.size.width*0.03)
SwiftUISlider(
thumbColor: .green,
thumbImage: "timerSlider 2",
minValue: 0,
maxValue: 20,
value: self.$pitch
)
.padding(.horizontal)
.frame(width: (geometry.size.width/2)-geometry.size.width*0.05, height: geometry.size.width*0.05)
.background(Color(UIColor.lightGray))
.frame(width: geometry.size.width, height: geometry.size.height*0.10)
if(self.showSoundPicker)
ChooseSoundView(
musicFiles: self.musicFiles,
selectedMusicFile: self.$selectedMusicFile ,
showSoundPicker: self.$showSoundPicker,
isPlaying: self.selectedGraph != ""
)
.frame(width: geometry.size.width*0.6, height: geometry.size.height*0.7, alignment: .center)
.background(Color.white)
.frame(maxWidth: geometry.size.width,
maxHeight: geometry.size.height)
.background(Color(UIColor.lightGray))
struct ContentView_Previews: PreviewProvider
static var previews: some View
ContentView()
struct ChooseSoundView: View
@State var musicFiles:[SoundModel]
@Binding var selectedMusicFile:SoundModel
@Binding var showSoundPicker:Bool
@State var isPlaying:Bool
var body: some View
GeometryReader geometry in
VStack(alignment: .leading)
List(self.musicFiles, id: \.name)
item in
Image(self.selectedMusicFile.file == item.file ? "radio-button_on" : "radio-button_off")
.resizable()
.frame(width: 15, height: 15)
Button(action:
print(item.name)
self.selectedMusicFile = item
self.showSoundPicker = false
if(self.isPlaying)
// Player.stopMusic()
// Player.playMusic(musicfile: self.selectedMusicFile.file, fileExtension: self.selectedMusicFile.fileExtension)
)
Text(item.name)
.frame(width: geometry.size.width*90,
height: 50.0,
alignment: .leading)
.frame(width: geometry.size.width*90, height: 50.0)
HStack
Button(action:
self.showSoundPicker = false
)
Text("Done")
.frame(width: geometry.size.width*0.45,
height: 50.0,
alignment: .center)
.frame(width: geometry.size.width*0.45, height: 50.0)
Button(action:
self.showSoundPicker = false
)
Text("Cancel")
.frame(width: geometry.size.width*0.45,
height: 50.0,
alignment: .center)
.frame(width: geometry.size.width*0.45, height: 50.0)
.background(Color.white)
Player.swift
import Foundation
import AVFoundation
class Player
private static var breathAudioPlayer:AVAudioPlayer?
private static var audioPlayerEngine = AVAudioEngine()
private static let speedControl = AVAudioUnitVarispeed()
private static var pitchControl = AVAudioUnitTimePitch()
private static var audioPlayerNode = AVAudioPlayerNode()
private static var volume:Float = 1.0
private static func playSounds(soundfile: String)
if let path = Bundle.main.path(forResource: soundfile, ofType: "m4a")
do
breathAudioPlayer = try AVAudioPlayer(contentsOf: URL(fileURLWithPath: path))
breathAudioPlayer?.volume = self.volume
breathAudioPlayer?.prepareToPlay()
breathAudioPlayer?.play()
catch
print("Error")
static func playMusic(musicfile: String, fileExtension:String)
if let path = Bundle.main.path(forResource: musicfile, ofType: fileExtension)
do
// 1: load the file
let audioPlayFile = try AVAudioFile(forReading: URL(fileURLWithPath: path))
let audioFileBuffer = AVAudioPCMBuffer(pcmFormat: audioPlayFile.fileFormat, frameCapacity: AVAudioFrameCount(audioPlayFile.length))
try? audioPlayFile.read(into: audioFileBuffer!)
// 2: create the audio player
audioPlayerNode = AVAudioPlayerNode()
audioPlayerEngine = AVAudioEngine()
// you can replace mp3 with anything else you like, just make sure you load it from our project
// making sure to clean up the audio hardware to avoid any damage and bugs
audioPlayerNode.stop()
audioPlayerEngine.stop()
audioPlayerEngine.reset()
audioPlayerEngine.attach(audioPlayerNode)
let pitchControl = AVAudioUnitTimePitch()
// assign the speed and pitch
audioPlayerEngine.attach(pitchControl)
audioPlayerEngine.connect(audioPlayerNode, to: pitchControl, format: nil)
audioPlayerEngine.connect(pitchControl, to: audioPlayerEngine.outputNode, format: nil)
audioPlayerNode.scheduleFile(audioPlayFile, at: nil, completionHandler: nil)
// try to start playing the audio
audioPlayerNode.scheduleBuffer(audioFileBuffer!, at: nil, options: .loops, completionHandler: nil)
do
try audioPlayerEngine.start()
catch
print(error)
// play the audio
audioPlayerNode.play()
catch
print("Error")
static func breathIn()
// Player.playSounds(soundfile: "breathin")
static func breathOut()
// Player.playSounds(soundfile: "breathout")
static func play(musicFile:String, fileExtension:String)
Player.playMusic(musicfile: musicFile,fileExtension: fileExtension)
static func stopMusic()
audioPlayerNode.pause()
audioPlayerNode.stop()
static func setPitch(pitch:Float)
pitchControl.pitch = pitch
static func setVolume(volume:Float)
audioPlayerNode.volume = volume
SwiftUISlider.swift
import Foundation
import SwiftUI
struct SwiftUISlider: UIViewRepresentable
var onChangeNotification:String = ""
final class Coordinator: NSObject
// The class property value is a binding: It’s a reference to the SwiftUISlider
// value, which receives a reference to a @State variable value in ContentView.
var value: Binding<Double>
// Create the binding when you initialize the Coordinator
init(value: Binding<Double>)
self.value = value
// Create a valueChanged(_:) action
@objc func valueChanged(_ sender: UISlider)
self.value.wrappedValue = Double(sender.value)
var thumbColor: UIColor = .white
var minTrackColor: UIColor?
var maxTrackColor: UIColor?
var thumbImage:String?
var minValue:Float?
var maxValue:Float?
@Binding var value: Double
func makeUIView(context: Context) -> UISlider
let slider = UISlider(frame: .zero)
slider.thumbTintColor = thumbColor
slider.minimumTrackTintColor = minTrackColor
slider.maximumTrackTintColor = maxTrackColor
slider.value = Float(value)
if(self.minValue != nil)
slider.minimumValue = self.minValue!
if(self.maxValue != nil)
slider.maximumValue = self.maxValue!
slider.setThumbImage(UIImage(named: self.thumbImage ?? ""), for: .normal)
slider.setThumbImage(UIImage(named: self.thumbImage ?? ""), for: .focused)
slider.setThumbImage(UIImage(named: self.thumbImage ?? ""), for: .highlighted)
slider.addTarget(
context.coordinator,
action: #selector(Coordinator.valueChanged(_:)),
for: .valueChanged
)
return slider
func onValueChange(_ sender: UISlider)
func updateUIView(_ uiView: UISlider, context: Context)
// Coordinating data between UIView and SwiftUI view
uiView.value = Float(self.value)
func makeCoordinator() -> SwiftUISlider.Coordinator
Coordinator(value: $value)
SoundModel.swift
import Foundation
import Combine
class SoundModel:ObservableObject, Identifiable
@Published var file:String
@Published var name:String
@Published var fileExtension:String
init(file:String, name:String, fileExtension:String)
self.file = file
self.name = name
self.fileExtension = fileExtension
【问题讨论】:
【参考方案1】:您的第一个问题是您没有跟踪音量/音高值的变化。为此,将它们移动到一个类中:
class PlayerSetup: ObservableObject
@Published var volume:Double = 0.00
didSet
Player.setVolume(volume: Float(self.volume))
@Published var pitch:Double = 0.0
didSet
Player.setPitch(pitch: Float(self.pitch))
在视图中声明:
@ObservedObject var playerSetup = PlayerSetup()
并绑定到您的视图:
SwiftUISlider(
thumbColor: .green,
thumbImage: "musicNote 2",
value: $playerSetup.volume
).padding(.horizontal)
SwiftUISlider(
thumbColor: .green,
thumbImage: "timerSlider 2",
minValue: 0,
maxValue: 20,
value: $playerSetup.pitch
)
播放完文件时它会崩溃,因为try? audioPlayFile.read(into: audioFileBuffer!)
失败并且您的缓冲区安排在文件为空之后。由于scheduleFile
,它第一次播放该文件。如果您想循环单个文件,请尝试调用此函数:
static func scheduleNext(audioPlayFile: AVAudioFile)
audioPlayerNode.scheduleFile(audioPlayFile, at: nil)
DispatchQueue.main.async
scheduleNext(audioPlayFile: audioPlayFile)
pitchControl 不起作用,因为您在开始播放时使用的是本地值,只需删除本地值声明即可。
关于音量。正如你在文档中看到的那样,This property is implemented only by the AVAudioEnvironmentNode and AVAudioMixerNode class mixers.
所以你不能将它用于播放器节点,你需要创建一个混音器节点,将其添加到节点链中,并更改它的音量。
另外为了清理节点代码,我建议你使用以下代码:
let nodes = [
audioPlayerNode,
pitchControl,
mixerNode,
]
nodes.forEach node in
audioPlayerEngine.attach(node)
zip(nodes, (nodes.dropFirst() + [audioPlayerEngine.outputNode]))
.forEach firstNode, secondNode in
audioPlayerEngine.connect(firstNode, to: secondNode, format: nil)
它将所有nodes
一一连接。
https://github.com/PhilipDukhov/SwiftUIAudioPlayer/tree/fixes
【讨论】:
【参考方案2】:您的代码的主要问题是它们之间存在混合视图和视图模型。很多东西现在必须在 viewModel 中,位于 View 中。
查看:
struct ContentView: View
@State var volume:Double = 0.00
@State var pitch:Double = 0.0
@State var musicFiles:[SoundModel] = [SoundModel(file: "metro35", name: "Metronome", fileExtension: "wav"), SoundModel(file: "johnson_tone_down_5min", name: "Johnson", fileExtension: "wav"), SoundModel(file: "sine_140_6s_fade_ogg", name: "Sine wave", fileExtension: "wav")]
@State var selectedMusicFile:SoundModel = SoundModel(file: "sine_140_6s_fade_ogg", name: "Sine wave", fileExtension: "wav")
@State var showSoundPicker = false
@State var selectedGraph = "skin_conductance"
@State var iconSize:CGFloat = 0.124
@State var iconSpace:CGFloat = 0.015
@State var heart = false
init()
Player.setPitch(pitch: Float(self.pitch))
Player.setVolume(volume: Float(self.volume))
视图必须是:
struct PlayerView: View
@ObservedObject var viewModel: PlayerViewModel
@State var iconSize:CGFloat = 0.124
@State var iconSpace:CGFloat = 0.015
init()
// that was wrote here must be moved to init of playerViewModel
查看您的模型:
import Foundation
import AVFoundation
class Player
private static var breathAudioPlayer:AVAudioPlayer?
private static var audioPlayerEngine = AVAudioEngine()
private static let speedControl = AVAudioUnitVarispeed()
private static var pitchControl = AVAudioUnitTimePitch()
private static var audioPlayerNode = AVAudioPlayerNode()
private static var volume:Float = 1.0
private static func playSounds(soundfile: String)
....
ViewModel 必须是:
import Foundation
import AVFoundation
class PlayerViewModel: ObservableObject
@Published var volume:Double = 0.00
@Published var pitch:Double = 0.0
@Published var musicFiles:[SoundModel] = [SoundModel(file: "metro35", name: "Metronome", fileExtension: "wav"), SoundModel(file: "johnson_tone_down_5min", name: "Johnson", fileExtension: "wav"), SoundModel(file: "sine_140_6s_fade_ogg", name: "Sine wave", fileExtension: "wav")]
@Published var selectedMusicFile:SoundModel = SoundModel(file: "sine_140_6s_fade_ogg", name: "Sine wave", fileExtension: "wav")
@Published var showSoundPicker = false
@Published var selectedGraph = "skin_conductance"
//@Published var heart = false // SoundViewModel's property
private static var breathAudioPlayer:AVAudioPlayer?
private static var audioPlayerEngine = AVAudioEngine()
private static let speedControl = AVAudioUnitVarispeed()
private static var pitchControl = AVAudioUnitTimePitch()
private static var audioPlayerNode = AVAudioPlayerNode()
private static var volume:Float = 1.0
private static func playSounds(soundfile: String)
如果您将所有这些都移到适当的位置,几乎可以肯定您的代码会更好地工作,并且您的问题可能会得到解决。
目前您需要手动跟踪任何更改。你的代码结构是这个原因。您不需要手动跟踪它,因为这是额外的无用代码。 您需要修复代码结构而不是使用“didSet”
【讨论】:
以上是关于swift中的音频播放器没有获得音量和音高的价值的主要内容,如果未能解决你的问题,请参考以下文章