AudioServicesPlaySystemSound sound volume dependency - ios

I'm making iOS keyboard extension.
To play click sound, I used AudioServicesPlaySystemSound.
However some users reported that the click sound sometimes depends on 'bell sound volume(bell icon)' and sometimes 'normal sound volume(speaker icon)'
I tested on Apple memo app and found there are cases about inconstant dependency.
Here is my code to init
func initTypeSound(soundIndex: Int) {
let bundle = NSBundle.mainBundle()
for var i = 0 ; i < MAX_TYPE_SOUND_COUNT ; i++ {
if let url = bundle.URLForResource(NSString(format: "type%d_%d", soundIndex, i) as String, withExtension: "wav") {
// file exist
var soundID : SystemSoundID = 0
AudioServicesCreateSystemSoundID(url, &soundID)
mTypeSoundIDs.insert(soundID, atIndex: i)
} else {
// no file
}
}
}
and code to play
func play(soundType: KKSoundType) {
if (!mHasPermission || !mIsSound) {
return
}
var session = AVAudioSession.sharedInstance()
let systemVolumn = session.outputVolume
if (systemVolumn == 0) {
return
}
var soundId : SystemSoundID!
switch (soundType) {
case .Type:
let rand = Int(arc4random_uniform(UInt32(mTypeSoundIDs.count)));
soundId = mTypeSoundIDs[rand];
break
case .Space:
soundId = mSpaceSoundID;
break
default:
return
}
if mIsSound {
AudioServicesPlaySystemSound(soundId)
}
}

Related

How to modify Engine's output on iOS AudioKit

I want to choose different sound effects to listen to when playing music
this is my code:
class EffectConductor: ObservableObject, ProcessesPlayerInput {
var engine = AudioEngine()
var player = AudioPlayer()
// var dryWetMixer : DryWetMixer
var isPlaying = false
init(path:String) {
let mediaUrl = URL.init(fileURLWithPath: path)
let file = try! AVAudioFile(forReading: mediaUrl)
try! player.load(file: file, buffered: true)
engine.output = player
}
#Published var effectType: Int = 0 {
didSet {
var node : Node? = nil
switch effectType {
case 1:
node = AutoPanner(player)
case 2:
node = AutoWah(player)
case 3:
node = Compressor(player)
case 4:
node = DynamicRangeCompressor(player)
case 5:
node = Expander(player)
case 6:
node = Phaser(player)
case 7:
node = StringResonator(player)
default:
node = nil
}
if node==nil
{
print("effect nil")
engine.output = player
}else{
engine.output = DryWetMixer(player, node!)
}
}
}
}
when i call this codeļ¼š
engine.start() player.start()
Can play music. but when i click a button and call this code:
effectType = 2(or ever value)
to change engine.output value
it's stop playing
i try this when i click a button
let progress = player.getCurrentTime()
if(isPlaying){
player.stop()
}
self.effectType = 3
if(isPlaying){
player.seek(time: progress)
player.play()
}
But there will be a pause in the playback process
how to solve this problem?
thanks everyone!

Troubles with MPRemoteCommandCenter

I have two troubles with MPRemoteCommandCenter:
1) When I change song, remote controls delete previous song image, make default song image, make next song image. I don't want to see default image. I spent a couple of time to find a solution.
2) When AVPlayer is live streaming sound, remote controls become non-active with circle arrows ( into circle arrow is number 15, what does it mean??).
Here is my code for playing sound:
public func playAVSound(trackName : String) -> String {
let path = _findPath(trackName: trackName);
if (path == "") {
return "getting url"
}
if (self.AVPlayerVC.player == nil) {
print("Init remote control events...")
UIApplication.shared.beginReceivingRemoteControlEvents()
let commandCenter = MPRemoteCommandCenter.shared()
commandCenter.nextTrackCommand.isEnabled = true
commandCenter.nextTrackCommand.addTarget(self, action:#selector(self.next))
commandCenter.previousTrackCommand.isEnabled = true
commandCenter.previousTrackCommand.addTarget(self, action:#selector(self.previous))
}
if (self.AVPlayerVC.player != nil && self.play_info.trackName == trackName) {
if (self.play_info.paused!) {
self.AVPlayerVC.player?.play()
self.updatePlayInfo(number: Global.PlayList.find_by_trackName(trackName: trackName), trackName: trackName, path: path, paused: false)
return "continue"
} else {
self.AVPlayerVC.player?.pause()
self.play_info.paused = true
return "pause"
}
}
let nsurl = NSURL(string: path)
if let url = nsurl {
print("Play AV from : \(url)")
let item = AVPlayerItem(url: url as URL)
if (self.AVPlayerVC.player?.currentItem == nil) {
self.AvPlayer = AVPlayer(playerItem: item)
self.AVPlayerVC.player = self.AvPlayer
self.AVPlayerVC.player?.automaticallyWaitsToMinimizeStalling = false
} else {
self.AvPlayer?.replaceCurrentItem(with: item)
}
NotificationCenter.default.addObserver(self, selector: #selector(self.playerDidFinishPlaying), name: .AVPlayerItemDidPlayToEndTime, object: item)
self.AVPlayerVC.player?.play()
self.updatePlayInfo(number: Global.PlayList.find_by_trackName(trackName: trackName), trackName: trackName, path: path, paused: false)
return "playing"
} else {
print ("Incorrect nsurl")
}
return "error"
}
private func _findPath (trackName: String) -> String {
let n = Global.PlayList.find_by_trackName(trackName: trackName)
var path = ""
if (n >= 0) {
let p_item = Global.PlayList.PlaylistItems[n];
if (!p_item.fromData!) {
if (p_item.playing_url == nil) {
NetLib.makeTrackUrl(trackName: trackName, closure: self.playAVSound)
} else {
path = p_item.playing_url!
}
} else {
path = NetLib.makePath(filename: p_item.filename!)
}
} else {
print("Error find_path: \(trackName) was not found in playlist")
}
return path
}
And here is code, that changes song
#objc private func previous() -> MPRemoteCommandHandlerStatus{
var n = self.play_info.number! - 1;
if (self.play_info.number == 0) {
n = Global.PlayList.size() - 1;
}
NotificationCenter.default.post(name: NSNotification.Name(rawValue: "load"), object: nil)
_ = self.playAVSound(trackName: Global.PlayList.PlaylistItems[n].trackName!)
print("Previous song \(Global.PlayList.PlaylistItems[n].trackName!)")
return .success
}
I resolve trouble with controls while audio streaming. I play audio not from main thread, so remote controls work incorrect in that thread.

How to disable audio in webrtc mobile app(ios) without changing in framework

I am working with webrtc mobile(ios). I can't disable audio in webrtc(ios). I have got no flag to disable audio.By changing in framwork/library it can done easily. My purpose is that I have to disable audio without changing in framework/library. Can anyone help me?.
Update your question with code snippet, how you are creating mediaStrem or tracks(audio/video).
Generally with default Native WebRTC Framework,
RTCMediaStream localStream = [_factory mediaStreamWithStreamId:kARDMediaStreamId];
if(audioRequired) {
RTCAudioTrack *aTrack = [_lmStream createLocalAudioTrack];
[localStream addAudioTrack:aTrack];
}
RTCVideoTrack *vTrack = [_lmStream createLocalVideoTrack];
[localStream addVideoTrack:vTrack];
[_peerConnection addStream:localStream];
If you want to mute the Audio during the call, use below function.
- (void)enableAudio:(NSString *)id isAudioEnabled:(BOOL) isAudioEnabled {
NSLog(#"Auido enabled: %d streams count:%d ", id, isAudioEnabled, _peerConnection.localStreams.count);
if(_peerConnection.localStreams.count > 0) {
RTCMediaStream *lStream = _peerConnection.localStreams[0];
if(lStream.audioTracks.count > 0) { // Usually we will have only one track. If you have more than one, need to traverse all.
// isAudioEnabled == 1 -> Unmute
// isAudioEnabled == 0 -> Mute
[lStream.audioTracks[0] setIsEnabled:isAudioEnabled];
}
}
}
in my case I didnt use streams and directly add audio track to peerconnection.
private func createMediaSenders() {
let streamId = "stream"
// Audio
let audioTrack = self.createAudioTrack()
self.pc.add(audioTrack, streamIds: [streamId])
// Video
/* let videoTrack = self.createVideoTrack()
self.localVideoTrack = videoTrack
self.peerConnection.add(videoTrack, streamIds: [streamId])
self.remoteVideoTrack = self.peerConnection.transceivers.first { $0.mediaType == .video }?.receiver.track as? RTCVideoTrack
// Data
if let dataChannel = createDataChannel() {
dataChannel.delegate = self
self.localDataChannel = dataChannel
}*/
}
private func createAudioTrack() -> RTCAudioTrack {
let audioConstrains = RTCMediaConstraints(mandatoryConstraints: nil, optionalConstraints: nil)
let audioSource = sessionFactory.audioSource(with: audioConstrains)
let audioTrack = sessionFactory.audioTrack(with: audioSource, trackId: "audio0")
return audioTrack
}
to mute and unmute microphone I use this function
public func muteMicrophone(_ mute:Bool){
for sender in pc.senders{
if (sender.track?.kind == "audio") {
sender.track?.isEnabled = mute
}
}
}

Swift AVFoundation Reading and Analyzing a file in real time

I am having trouble reading a file from the operating system using AVFoundation and performing rendering and analysis in real time.
I have a pipe line of code that I know runs in real time does analysis on a video file. This pipe line of code works in realtime via the camera session. However this is not the case when I read the file like so.Can anyone let me know where I might be going wrong ?
protocol VideoStreamTestBenchDelegate {
func frameBuffer(buffer:CMSampleBuffer)
}
class VideoStreamTestBench {
let asset:AVAsset
let assetReader:AVAssetReader
let playAtActualSpeed:Bool
let loop:Bool
var videoEncodingIsFinished = false
var previousFrameTime = kCMTimeZero
var previousActualFrameTime = CFAbsoluteTimeGetCurrent()
var numberOfFramesCaptured = 0
var totalFrameTimeDuringCapture:Double = 0.0
var delegate:VideoStreamTestBenchDelegate?
public convenience init(url:URL, playAtActualSpeed:Bool = false, loop:Bool = false) throws {
let inputOptions = [AVURLAssetPreferPreciseDurationAndTimingKey:NSNumber(value:true)]
let inputAsset = AVURLAsset(url:url, options:inputOptions)
try self.init(asset:inputAsset, playAtActualSpeed:playAtActualSpeed, loop:loop)
}
public init(asset:AVAsset, playAtActualSpeed:Bool = false, loop:Bool = false) throws {
self.asset = asset
self.playAtActualSpeed = playAtActualSpeed
self.loop = loop
assetReader = try AVAssetReader(asset:self.asset)
let outputSettings:[String:AnyObject] = [(kCVPixelBufferPixelFormatTypeKey as String):NSNumber(value:Int32(kCVPixelFormatType_32BGRA))]
let readerVideoTrackOutput = AVAssetReaderTrackOutput(track:self.asset.tracks(withMediaType: AVMediaTypeVideo)[0], outputSettings:outputSettings)
readerVideoTrackOutput.alwaysCopiesSampleData = false
assetReader.add(readerVideoTrackOutput)
// TODO: Audio here
}
public func start() {
asset.loadValuesAsynchronously(forKeys:["tracks"], completionHandler:{
DispatchQueue.global(priority:DispatchQueue.GlobalQueuePriority.default).async(execute: {
guard (self.asset.statusOfValue(forKey: "tracks", error:nil) == .loaded) else { return }
guard self.assetReader.startReading() else {
print("Couldn't start reading")
return
}
var readerVideoTrackOutput:AVAssetReaderOutput? = nil;
for output in self.assetReader.outputs {
if(output.mediaType == AVMediaTypeVideo) {
readerVideoTrackOutput = output;
}
}
while (self.assetReader.status == .reading) {
self.readNextVideoFrame(from:readerVideoTrackOutput!)
}
if (self.assetReader.status == .completed) {
self.assetReader.cancelReading()
if (self.loop) {
// TODO: Restart movie processing
} else {
self.endProcessing()
}
}
})
})
}
public func cancel() {
assetReader.cancelReading()
self.endProcessing()
}
func endProcessing() {
}
func readNextVideoFrame(from videoTrackOutput:AVAssetReaderOutput) {
if ((assetReader.status == .reading) && !videoEncodingIsFinished) {
if let sampleBuffer = videoTrackOutput.copyNextSampleBuffer() {
if (playAtActualSpeed) {
// Do this outside of the video processing queue to not slow that down while waiting
let currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer)
let differenceFromLastFrame = CMTimeSubtract(currentSampleTime, previousFrameTime)
let currentActualTime = CFAbsoluteTimeGetCurrent()
let frameTimeDifference = CMTimeGetSeconds(differenceFromLastFrame)
let actualTimeDifference = currentActualTime - previousActualFrameTime
if (frameTimeDifference > actualTimeDifference) {
usleep(UInt32(round(1000000.0 * (frameTimeDifference - actualTimeDifference))))
}
previousFrameTime = currentSampleTime
previousActualFrameTime = CFAbsoluteTimeGetCurrent()
}
DispatchQueue.global().sync {
self.delegate?.frameBuffer(buffer: sampleBuffer)
CMSampleBufferInvalidate(sampleBuffer)
}
} else {
if (!loop) {
videoEncodingIsFinished = true
if (videoEncodingIsFinished) {
self.endProcessing()
}
}
}
}
}
}
// This is the delegate
public func bufferReader(_ reader: BufferReader!, didGetNextVideoSample bufferRef: CMSampleBuffer!) {
// let posePoints:[Any] = self.visageBackend.posePoints(with: bufferRef)
// var regions:[Any]? = nil
//
// if (posePoints.count > 0) {
// regions = (self.luaBackend?.regions(forPosePoints: posePoints))!
// }
//
// // extract
// if(regions != nil) {
// let rois:[Any] = (self.luaBackend?.extractedRegionInfos(for: bufferRef, andRegions: regions))!
// print(rois)
// }
//
// self.dLibRenderEngine.render(with: bufferRef, andPoints: posePoints, andRegions: regions)
self.backgroundRenderQueue.async { [weak self] in
if self?.previewLayer?.isReadyForMoreMediaData == true {
self?.previewLayer?.enqueue(bufferRef!)
}
}
}

How do I use the Swift sampler to play a tone then pause before playing the next?

I have code to take a sequence of letters in a string and interpret them as notes. The code will then play the notes. The problem is that they all play at the same time. How do I play them each as a quarter note, essentially to play a note, wait for it to end, and then play the next note?
#IBAction func playButton(sender: AnyObject) {
fractalEngine.output = "adgadefe"
var notes = Array(fractalEngine.output.characters)
var counter = 0
while counter < notes.count {
var note = notes[counter]
if note == "a" {
play(58)
}
else if note == "b" {
play(59)
}
else if note == "c" {
play(60)
}
else if note == "d" {
play(61)
}
else if note == "e" {
play(62)
}
else if note == "f" {
play(63)
}
else {
play(64)
}
counter += 1
}
//self.sampler.startNote(60, withVelocity: 64, onChannel: 0)
}
func play(note: UInt8) {
sampler.startNote(note, withVelocity: 64, onChannel: 0)
}
func stop(note: UInt8) {
sampler.stopNote(note, onChannel: 0)
}
Here's the code that initiates the sampler:
func initAudio(){
engine = AVAudioEngine()
self.sampler = AVAudioUnitSampler()
engine.attachNode(self.sampler)
engine.connect(self.sampler, to: engine.outputNode, format: nil)
guard let soundbank = NSBundle.mainBundle().URLForResource("gs_instruments", withExtension: "dls") else {
print("Could not initalize soundbank.")
return
}
let melodicBank:UInt8 = UInt8(kAUSampler_DefaultMelodicBankMSB)
let gmHarpsichord:UInt8 = 6
do {
try engine.start()
try self.sampler.loadSoundBankInstrumentAtURL(soundbank, program: gmHarpsichord, bankMSB: melodicBank, bankLSB: 0)
}catch {
print("An error occurred \(error)")
return
}
/*
self.musicSequence = createMusicSequence()
createAVMIDIPlayer(self.musicSequence)
createAVMIDIPlayerFromMIDIFIle()
self.musicPlayer = createMusicPlayer(musicSequence)
*/
}
It looks like you need to delay playing each in turn. Here's one implementation (that avoids blocking the main thread).
//global delay helper function
func delay(delay:Double, closure:()->()) {
dispatch_after(
dispatch_time(
DISPATCH_TIME_NOW,
Int64(delay * Double(NSEC_PER_SEC))
),
dispatch_get_main_queue(), closure)
}
//inside your playButton
let delayConstant = 0.05 //customize as needed
for (noteNumber, note) in notes.enumerate() {
delay(delayConstant * noteNumber) {
play(note)
//handle stopping
delay(delayConstant) {stop(note)}
}
}
What this does is plays each note after an escalating delay, then stops playing after note length, which is assumed to be the delay constant.

Resources