Skip to main content

Online Radio App

This example shows you how to implement the audio pipeline of a simple online radio app. The SDK allows you to easily integrate your app with the most common audio communication standards and providers.

In this example we will combine what we learned in the Mixer and Ducking example.

With this audio graph the host can play songs while talking over it, thus ducking will be applied to the music. There is also an effect player which allows the host to play different effects in the stream. The music and the effect will be routed to the speaker as a monitor output.

After the final mix the output is sent to the communication layer's sink node, and will be streamed to the users.

import SwitchboardSDK

class OnlineRadioExample {
let audioEngine = SBAudioEngine()
let audioGraph = SBAudioGraph()
let micMuteNode = SBGainNode()
let micSplitterNode = SBSplitterNode()
let micGainNode = SBGainNode()
let musicPlayerNode = SBAudioPlayerNode()
let duckingNode = SBMusicDuckingNode()
let musicGainNode = SBGainNode()
let effectPlayerNode = SBAudioPlayerNode()
let effectGainNode = SBGainNode()
let radioMixerNode = SBMixerNode()
let radioSplitterNode = SBSplitterNode()
let mixerNode = SBMixerNode()

// Insert your preferred sink node here
init(sinkNode: SBWebRTCAudioSinkNode) {
audioGraph.addNode(micMuteNode)
audioGraph.addNode(micSplitterNode)
audioGraph.addNode(micGainNode)
audioGraph.addNode(musicPlayerNode)
audioGraph.addNode(duckingNode)
audioGraph.addNode(musicGainNode)
audioGraph.addNode(effectPlayerNode)
audioGraph.addNode(effectGainNode)
audioGraph.addNode(radioMixerNode)
audioGraph.addNode(radioSplitterNode)
audioGraph.addNode(mixerNode)
audioGraph.addNode(sinkNode)

audioGraph.connect(audioGraph.inputNode, to: micMuteNode)
audioGraph.connect(micMuteNode, to: micSplitterNode)
audioGraph.connect(micSplitterNode, to: micGainNode)
audioGraph.connect(micGainNode, to: mixerNode)

audioGraph.connect(musicPlayerNode, to: duckingNode)
audioGraph.connect(micSplitterNode, to: duckingNode)
audioGraph.connect(duckingNode, to: musicGainNode)
audioGraph.connect(musicGainNode, to: radioMixerNode)

audioGraph.connect(effectPlayerNode, to: effectGainNode)
audioGraph.connect(effectGainNode, to: radioMixerNode)
audioGraph.connect(radioMixerNode, to: radioSplitterNode)

audioGraph.connect(radioSplitterNode, to: mixerNode)
audioGraph.connect(radioSplitterNode, to: audioGraph.outputNode)

audioGraph.connect(mixerNode, to: sinkNode)

audioEngine.microphoneEnabled = true
audioEngine.voiceProcessingEnabled = true
audioEngine.start(audioGraph)
}

// url can point to a local file or to a remote stream as well
func loadSong(url: String, format: SBCodec) {
musicPlayerNode.load(url, withFormat: format)
}

func play() {
musicPlayerNode.play()
}

func pause() {
musicPlayerNode.pause()
}

func stop() {
musicPlayerNode.stop()
}

func playEffect(localSoundEffect: CustomEffect) {
effectPlayerNode.load(localSoundEffect.url, withFormat: localSoundEffect.format)
effectPlayerNode.play()
}

func setMusicVolume(_ newValue: Float) {
musicGainNode.setGain(newValue)
}

func setEffectVolume(_ newValue: Float) {
effectGainNode.setGain(newValue)
}

func setMicVolume(_ newValue: Float) {
micGainNode.setGain(newValue)
}

func muteMicrophone() {
micMuteNode.setGain(0)
}

func unmuteMicrophone() {
micMuteNode.setGain(1)
}
}