Expose audio output selection

This commit is contained in:
QuentinArguillere
2020-10-08 17:24:47 +02:00
parent def8aefef6
commit 6f2684a0c1
2 changed files with 41 additions and 31 deletions

View File

@@ -32,7 +32,8 @@ class CallExampleContext : ObservableObject
@Published var id : String = "sip:myphone@sip.linphone.org"
@Published var passwd : String = "mypassword"
@Published var loggedIn: Bool = false
@Published var currentAudioDevice : AudioDevice!
init() {
mCallTutorialDelegate.tutorialContext = self
@@ -44,11 +45,11 @@ class CallExampleContext : ObservableObject
mCore.autoIterateEnabled = true
try? mCore.start()
currentAudioDevice = mCore.audioDevices[0]
mCore.addDelegate(delegate: mCallTutorialDelegate)
}
func registrationExample()
{
func registrationExample() {
if (!loggedIn) {
do {
proxy_cfg = try createAndInitializeProxyConfig(core : mCore, identity: id, password: passwd)
@@ -99,24 +100,23 @@ class CallExampleContext : ObservableObject
}
}
func microphoneMuteToggle()
{
func microphoneMuteToggle() {
if (callRunning) {
mCall.microphoneMuted = !mCall.microphoneMuted
microphoneMuted = mCall.microphoneMuted
}
}
func speaker()
{
speakerEnabled = !speakerEnabled
do {
try AVAudioSession.sharedInstance().overrideOutputAudioPort(
speakerEnabled ?
AVAudioSession.PortOverride.speaker : AVAudioSession.PortOverride.none)
} catch {
print(error)
func changeAudioOutput() {
let devices = mCore.audioDevices
var newIdx = 0;
for i in 0...devices.count {
if (devices[i].deviceName == currentAudioDevice.deviceName) {
newIdx = (i + 1) % devices.count
break
}
}
mCore.outputAudioDevice = devices[newIdx]
}
func acceptCall()
@@ -127,7 +127,6 @@ class CallExampleContext : ObservableObject
print(error)
}
}
}
// Callback for actions when a change in the Registration State happens
@@ -160,4 +159,13 @@ class CallTutorialDelegate: CoreDelegate {
tutorialContext.isCallIncoming = false
}
}
func onAudioDeviceChanged(core: Core, audioDevice: AudioDevice) {
tutorialContext.currentAudioDevice = audioDevice
}
func onAudioDevicesListUpdated(core: Core) {
if let outputDevice = core.outputAudioDevice {
}
}
}

View File

@@ -11,7 +11,7 @@ import SwiftUI
struct ContentView: View {
@ObservedObject var tutorialContext : CallExampleContext
func getCallButtonText() -> String {
if (tutorialContext.callRunning) {
return "Update Call"
@@ -35,7 +35,6 @@ struct ContentView: View {
return "No Call"
}
}
var body: some View {
VStack(alignment: .leading) {
Group {
@@ -75,27 +74,30 @@ struct ContentView: View {
.padding(.top, 5)
VStack {
HStack {
Text("Speaker :")
Button(action: tutorialContext.speaker)
{
Text(tutorialContext.speakerEnabled ? "ON" : "OFF")
.font(.title)
.foregroundColor(Color.white)
.frame(width: 60.0, height: 30.0)
.background(Color.gray)
}
}
HStack {
Text("Microphone :")
Text("Microphone :").frame(width: 200, height: 40.0)
Button(action: tutorialContext.microphoneMuteToggle)
{
Text(tutorialContext.microphoneMuted ? "Unmute" : "Mute")
.font(.title)
.foregroundColor(Color.white)
.frame(width: 110.0, height: 30.0)
.frame(width: 100.0, height: 40.0)
.background(Color.gray)
}
}.padding(.top)
}.padding()
HStack {
VStack {
Text("Audio device :")
Text("\(tutorialContext.currentAudioDevice.deviceName)")
}.frame(width: 200, height: 40.0)
Button(action: tutorialContext.changeAudioOutput)
{
Text("Change")
.font(.title)
.foregroundColor(Color.white)
.frame(width: 115.0, height: 40.0)
.background(Color.gray)
}
}.padding()
}
Spacer()
VStack {