iOS:使用 Swift 修剪音频文件?
Posted
技术标签:
【中文标题】iOS:使用 Swift 修剪音频文件?【英文标题】:iOS: trimming audio files with Swift? 【发布时间】:2016-11-16 13:55:36 【问题描述】:我必须合并音频文件和录制的声音。例如录制的声音是 47 秒。我必须将 4 分钟的音频歌曲剪切或修剪为 47 秒。并合并音频文件。
var url:NSURL?
if self.audioRecorder != nil
url = self.audioRecorder!.url
else
url = self.soundFileURL!
print("playing \(url)")
do
self.newplayer = try AVPlayer(URL: url!)
let avAsset = AVURLAsset(URL: url!, options: nil)
print("\(avAsset)")
let audioDuration = avAsset.duration
let totalSeconds = CMTimeGetSeconds(audioDuration)
let hours = floor(totalSeconds / 3600)
var minutes = floor(totalSeconds % 3600 / 60)
var seconds = floor(totalSeconds % 3600 % 60)
print("hours = \(hours),minutes = \(minutes),seconds = \(seconds)")
这是输出://hours = 0.0,minutes = 0.0,seconds = 42.0
对于修剪方法,我刚刚尝试过:如何设置确切的持续时间、开始时间和结束时间以及新的网址:
func exportAsset(asset:AVAsset, fileName:String)
let documentsDirectory = NSFileManager.defaultManager().URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask)[0]
let trimmedSoundFileURL = documentsDirectory.URLByAppendingPathComponent(fileName)
print("saving to \(trimmedSoundFileURL!.absoluteString)")
let filemanager = NSFileManager.defaultManager()
if filemanager.fileExistsAtPath(trimmedSoundFileURL!.absoluteString!)
print("sound exists")
let exporter = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetAppleM4A)
exporter!.outputFileType = AVFileTypeAppleM4A
exporter!.outputURL = trimmedSoundFileURL
let duration = CMTimeGetSeconds(asset.duration)
if (duration < 5.0)
print("sound is not long enough")
return
// e.g. the first 5 seconds
let startTime = CMTimeMake(0, 1)
let stopTime = CMTimeMake(5, 1)
let exportTimeRange = CMTimeRangeFromTimeToTime(startTime, stopTime)
exporter!.timeRange = exportTimeRange
// do it
exporter!.exportAsynchronouslyWithCompletionHandler(
switch exporter!.status
case AVAssetExportSessionStatus.Failed:
print("export failed \(exporter!.error)")
case AVAssetExportSessionStatus.Cancelled:
print("export cancelled \(exporter!.error)")
default:
print("export complete")
)
【问题讨论】:
【参考方案1】:我终于找到了我的问题的答案。它工作正常...我附上了下面的代码。我在其中添加了修剪音频代码。它对那些试图合并和修剪音频的人很有用(swift2.3):
func mixAudio()
let currentTime = CFAbsoluteTimeGetCurrent()
let composition = AVMutableComposition()
let compositionAudioTrack = composition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)
compositionAudioTrack.preferredVolume = 0.8
let avAsset = AVURLAsset.init(URL: soundFileURL, options: nil)
print("\(avAsset)")
var tracks = avAsset.tracksWithMediaType(AVMediaTypeAudio)
let clipAudioTrack = tracks[0]
do
try compositionAudioTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, avAsset.duration), ofTrack: clipAudioTrack, atTime: kCMTimeZero)
catch _
let compositionAudioTrack1 = composition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)
compositionAudioTrack.preferredVolume = 0.8
let avAsset1 = AVURLAsset.init(URL: soundFileURL1)
print(avAsset1)
var tracks1 = avAsset1.tracksWithMediaType(AVMediaTypeAudio)
let clipAudioTrack1 = tracks1[0]
do
try compositionAudioTrack1.insertTimeRange(CMTimeRangeMake(kCMTimeZero, avAsset1.duration), ofTrack: clipAudioTrack1, atTime: kCMTimeZero)
catch _
var paths = NSSearchPathForDirectoriesInDomains(.LibraryDirectory, .UserDomainMask, true)
let CachesDirectory = paths[0]
let strOutputFilePath = CachesDirectory.stringByAppendingString("/Fav")
print(" strOutputFilePath is \n \(strOutputFilePath)")
let requiredOutputPath = CachesDirectory.stringByAppendingString("/Fav.m4a")
print(" requiredOutputPath is \n \(requiredOutputPath)")
soundFile1 = NSURL.fileURLWithPath(requiredOutputPath)
print(" OUtput path is \n \(soundFile1)")
var audioDuration = avAsset.duration
var totalSeconds = CMTimeGetSeconds(audioDuration)
var hours = floor(totalSeconds / 3600)
var minutes = floor(totalSeconds % 3600 / 60)
var seconds = Int64(totalSeconds % 3600 % 60)
print("hours = \(hours), minutes = \(minutes), seconds = \(seconds)")
let recordSettings:[String : AnyObject] = [
AVFormatIDKey: Int(kAudioFormatMPEG4AAC),
AVSampleRateKey: 12000,
AVNumberOfChannelsKey: 1,
AVEncoderAudioQualityKey: AVAudioQuality.Low.rawValue
]
do
audioRecorder = try AVAudioRecorder(URL: soundFile1, settings: recordSettings)
audioRecorder!.delegate = self
audioRecorder!.meteringEnabled = true
audioRecorder!.prepareToRecord()
catch let error as NSError
audioRecorder = nil
print(error.localizedDescription)
do
try NSFileManager.defaultManager().removeItemAtURL(soundFile1)
catch _
let exporter = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetAppleM4A)
exporter!.outputURL = soundFile1
exporter!.outputFileType = AVFileTypeAppleM4A
let duration = CMTimeGetSeconds(avAsset1.duration)
print(duration)
if (duration < 5.0)
print("sound is not long enough")
return
// e.g. the first 30 seconds
let startTime = CMTimeMake(0, 1)
let stopTime = CMTimeMake(seconds,1)
let exportTimeRange = CMTimeRangeFromTimeToTime(startTime, stopTime)
print(exportTimeRange)
exporter!.timeRange = exportTimeRange
print(exporter!.timeRange)
exporter!.exportAsynchronouslyWithCompletionHandler
() -> Void in
print(" OUtput path is \n \(requiredOutputPath)")
print("export complete: \(CFAbsoluteTimeGetCurrent() - currentTime)")
var url:NSURL?
if self.audioRecorder != nil
url = self.audioRecorder!.url
else
url = self.soundFile1!
print(url)
print("playing \(url)")
do
print(self.soundFile1)
print(" OUtput path is \n \(requiredOutputPath)")
self.setSessionPlayback()
do
self.optData = try NSData(contentsOfURL: self.soundFile1!, options: NSDataReadingOptions.DataReadingMappedIfSafe)
print(self.optData)
self.recordencryption = self.optData.base64EncodedStringWithOptions(NSDataBase64EncodingOptions())
// print(self.recordencryption)
self.myImageUploadRequest()
self.wasteplayer = try AVAudioPlayer(contentsOfURL: self.soundFile1)
self.wasteplayer.numberOfLoops = 0
self.wasteplayer.play()
catch _
【讨论】:
这个函数中还用到了类级别的变量,请问您能评论一下哪些变量用于什么目的? @HariKarthick【参考方案2】:基于上一个答案简单的声音修剪功能。
static func trimmSound( inUrl:URL, outUrl:URL,timeRange:CMTimeRange, callBack:@escaping () -> Void)
let startTime = timeRange.start
let duration = timeRange.duration
let audioAsset = AVAsset(url: inUrl)
let composition = AVMutableComposition()
let compositionAudioTrack = composition.addMutableTrack(withMediaType: AVMediaType.audio, preferredTrackID: CMPersistentTrackID(kCMPersistentTrackID_Invalid))
let sourceAudioTrack = audioAsset.tracks(withMediaType: AVMediaType.audio).first!
do
try compositionAudioTrack?.insertTimeRange(CMTimeRangeMake(start: startTime, duration: duration), of: sourceAudioTrack, at: .zero)
catch
print(error.localizedDescription)
return
let exporter = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetAppleM4A)
exporter!.outputURL = outUrl
exporter!.outputFileType = AVFileType.m4a
exporter!.shouldOptimizeForNetworkUse = true
exporter!.exportAsynchronously
DispatchQueue.main.async
callBack()
【讨论】:
以上是关于iOS:使用 Swift 修剪音频文件?的主要内容,如果未能解决你的问题,请参考以下文章