From 9adbe5d727aa32612a25df84b9197e9cb811eb9d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Maciej=20Bra=C5=BCewicz?= Date: Mon, 25 Mar 2024 17:43:05 +0100 Subject: [PATCH] fix: respecting default audio output setting (#608) --- packages/stream_video/lib/src/call/call.dart | 29 +++++++++++++++++-- .../lib/src/call/call_connect_options.dart | 4 ++- .../lib/src/call/session/call_session.dart | 9 ++++++ .../state/mixins/state_lifecycle_mixin.dart | 26 +++++++++++++++++ .../open_api/open_api_extensions.dart | 14 +++++++++ .../lib/src/models/call_settings.dart | 13 +++++++++ .../rtc_media_device/rtc_media_device.dart | 6 ++-- .../rtc_media_device_notifier.dart | 1 + .../lib/src/call_screen/call_container.dart | 8 +++-- .../lib/src/call_screen/lobby_video.dart | 16 ++++++++++ 10 files changed, 118 insertions(+), 8 deletions(-) diff --git a/packages/stream_video/lib/src/call/call.dart b/packages/stream_video/lib/src/call/call.dart index 89e97a23c..4e1cbc132 100644 --- a/packages/stream_video/lib/src/call/call.dart +++ b/packages/stream_video/lib/src/call/call.dart @@ -270,8 +270,7 @@ class Call { set connectOptions(CallConnectOptions connectOptions) { final status = _status.value; - if (status == _ConnectionStatus.connecting || - status == _ConnectionStatus.connected) { + if (status == _ConnectionStatus.connected) { _logger.w( () => '[setConnectOptions] rejected (connectOptions must be' ' set before invoking `connect`)', @@ -976,6 +975,17 @@ class Call { return [...?_session?.getTracks(trackIdPrefix)]; } + void _setDefaultConnectOptions(CallSettings settings) { + connectOptions = connectOptions.copyWith( + camera: TrackOption.fromSetting( + enabled: settings.video.cameraDefaultOn, + ), + microphone: TrackOption.fromSetting( + enabled: settings.audio.micDefaultOn, + ), + ); + } + Future _applyConnectOptions() async { _logger.d(() => '[applyConnectOptions] connectOptions: $_connectOptions'); await _applyCameraOption(_connectOptions.camera); @@ -1192,11 +1202,26 @@ class Call { custom: custom, ); + final mediaDevicesResult = + await RtcMediaDeviceNotifier.instance.enumerateDevices(); + final mediaDevices = mediaDevicesResult.fold( + success: (success) => success.data, + failure: (failure) => [], + ); + return response.fold( success: (it) { + _setDefaultConnectOptions(it.data.data.metadata.settings); + _stateManager.lifecycleCallCreated( CallCreated(it.data.data), ringing: ringing, + audioOutputs: mediaDevices + .where((d) => d.kind == RtcMediaDeviceKind.audioOutput) + .toList(), + audioInputs: mediaDevices + .where((d) => d.kind == RtcMediaDeviceKind.audioInput) + .toList(), ); _logger.v(() => '[getOrCreate] completed: ${it.data}'); return it; diff --git a/packages/stream_video/lib/src/call/call_connect_options.dart b/packages/stream_video/lib/src/call/call_connect_options.dart index 0b8a0a75d..7a33c044e 100644 --- a/packages/stream_video/lib/src/call/call_connect_options.dart +++ b/packages/stream_video/lib/src/call/call_connect_options.dart @@ -18,7 +18,6 @@ class CallConnectOptions with EquatableMixin { TrackOption? camera, TrackOption? microphone, TrackOption? screenShare, - Duration? dropTimeout, }) { return CallConnectOptions( camera: camera ?? this.camera, @@ -43,6 +42,9 @@ class CallConnectOptions with EquatableMixin { abstract class TrackOption with EquatableMixin { const TrackOption(); + factory TrackOption.fromSetting({required bool enabled}) => + enabled ? TrackOption.enabled() : TrackOption.disabled(); + factory TrackOption.enabled() { return TrackEnabled._instance; } diff --git a/packages/stream_video/lib/src/call/session/call_session.dart b/packages/stream_video/lib/src/call/session/call_session.dart index c17e45873..30cd1c190 100644 --- a/packages/stream_video/lib/src/call/session/call_session.dart +++ b/packages/stream_video/lib/src/call/session/call_session.dart @@ -167,6 +167,15 @@ class CallSession extends Disposable { if (CurrentPlatform.isIos) { await rtcManager?.setAppleAudioConfiguration(); } + + //FIXME: This is a temporary fix for the issue where the audio output device is not set correctly + // we should remove the delay and figure out why it's not setting the device without it + unawaited( + Future.delayed(const Duration(milliseconds: 250), () async { + await _applyCurrentAudioOutputDevice(); + }), + ); + _logger.v(() => '[start] completed'); return const Result.success(none); } catch (e, stk) { diff --git a/packages/stream_video/lib/src/call/state/mixins/state_lifecycle_mixin.dart b/packages/stream_video/lib/src/call/state/mixins/state_lifecycle_mixin.dart index f09f1b5e2..4d5d9917d 100644 --- a/packages/stream_video/lib/src/call/state/mixins/state_lifecycle_mixin.dart +++ b/packages/stream_video/lib/src/call/state/mixins/state_lifecycle_mixin.dart @@ -1,4 +1,5 @@ import 'package:state_notifier/state_notifier.dart'; +import 'package:collection/collection.dart'; import '../../../../stream_video.dart'; import '../../../action/internal/lifecycle_action.dart'; @@ -101,7 +102,26 @@ mixin StateLifecycleMixin on StateNotifier { void lifecycleCallCreated( CallCreated stage, { bool ringing = false, + List? audioOutputs, + List? audioInputs, }) { + final defaultAudioOutput = audioOutputs?.firstWhereOrNull((device) { + if (stage.data.metadata.settings.audio.defaultDevice == + AudioSettingsRequestDefaultDeviceEnum.speaker) { + return device.id.equalsIgnoreCase( + AudioSettingsRequestDefaultDeviceEnum.speaker.value, + ); + } + + return !device.id.equalsIgnoreCase( + AudioSettingsRequestDefaultDeviceEnum.speaker.value, + ); + }); + + final defaultAudioInput = audioInputs + ?.firstWhereOrNull((d) => d.label == defaultAudioOutput?.label) ?? + audioInputs?.firstOrNull; + _logger.d(() => '[lifecycleCallCreated] ringing: $ringing, state: $state'); state = state.copyWith( status: stage.data.toCallStatus(state: state, ringing: ringing), @@ -118,6 +138,8 @@ mixin StateLifecycleMixin on StateNotifier { isBackstage: stage.data.metadata.details.backstage, isBroadcasting: stage.data.metadata.details.broadcasting, isRecording: stage.data.metadata.details.recording, + audioOutputDevice: defaultAudioOutput, + audioInputDevice: defaultAudioInput, ); } @@ -361,3 +383,7 @@ extension on CallRingingData { } } } + +extension on String { + bool equalsIgnoreCase(String other) => toUpperCase() == other.toUpperCase(); +} diff --git a/packages/stream_video/lib/src/coordinator/open_api/open_api_extensions.dart b/packages/stream_video/lib/src/coordinator/open_api/open_api_extensions.dart index 53a0bcbc9..59df9a532 100644 --- a/packages/stream_video/lib/src/coordinator/open_api/open_api_extensions.dart +++ b/packages/stream_video/lib/src/coordinator/open_api/open_api_extensions.dart @@ -1,6 +1,7 @@ import 'package:collection/collection.dart'; import '../../../../open_api/video/coordinator/api.dart' as open; +import '../../../stream_video.dart'; import '../../errors/video_error.dart'; import '../../logger/stream_log.dart'; import '../../models/call_cid.dart'; @@ -176,6 +177,9 @@ extension CallSettingsExt on open.CallSettingsResponse { accessRequestEnabled: audio.accessRequestEnabled, opusDtxEnabled: audio.opusDtxEnabled, redundantCodingEnabled: audio.redundantCodingEnabled, + defaultDevice: audio.defaultDevice.toDomain(), + micDefaultOn: audio.micDefaultOn, + speakerDefaultOn: audio.speakerDefaultOn, ), video: StreamVideoSettings( accessRequestEnabled: video.accessRequestEnabled, @@ -208,6 +212,16 @@ extension CallSettingsExt on open.CallSettingsResponse { } } +extension on open.AudioSettingsDefaultDeviceEnum { + AudioSettingsRequestDefaultDeviceEnum toDomain() { + if (this == open.AudioSettingsDefaultDeviceEnum.speaker) { + return AudioSettingsRequestDefaultDeviceEnum.speaker; + } else { + return AudioSettingsRequestDefaultDeviceEnum.earpiece; + } + } +} + extension on open.TranscriptionSettingsModeEnum { TranscriptionSettingsMode toDomain() { if (this == open.TranscriptionSettingsModeEnum.autoOn) { diff --git a/packages/stream_video/lib/src/models/call_settings.dart b/packages/stream_video/lib/src/models/call_settings.dart index 00a4787fc..d8a1a498d 100644 --- a/packages/stream_video/lib/src/models/call_settings.dart +++ b/packages/stream_video/lib/src/models/call_settings.dart @@ -77,17 +77,24 @@ class StreamAudioSettings extends MediaSettings { this.opusDtxEnabled = false, this.redundantCodingEnabled = false, this.defaultDevice = AudioSettingsRequestDefaultDeviceEnum.speaker, + this.micDefaultOn = true, + this.speakerDefaultOn = true, }); final bool opusDtxEnabled; final bool redundantCodingEnabled; final AudioSettingsRequestDefaultDeviceEnum defaultDevice; + final bool micDefaultOn; + final bool speakerDefaultOn; @override List get props => [ accessRequestEnabled, opusDtxEnabled, redundantCodingEnabled, + defaultDevice, + micDefaultOn, + speakerDefaultOn, ]; AudioSettingsRequest toOpenDto() { @@ -96,6 +103,8 @@ class StreamAudioSettings extends MediaSettings { accessRequestEnabled: accessRequestEnabled, opusDtxEnabled: opusDtxEnabled, redundantCodingEnabled: redundantCodingEnabled, + micDefaultOn: micDefaultOn, + speakerDefaultOn: speakerDefaultOn, ); } } @@ -104,20 +113,24 @@ class StreamVideoSettings extends MediaSettings { const StreamVideoSettings({ super.accessRequestEnabled = false, this.enabled = false, + this.cameraDefaultOn = true, }); final bool enabled; + final bool cameraDefaultOn; @override List get props => [ accessRequestEnabled, enabled, + cameraDefaultOn, ]; VideoSettingsRequest toOpenDto() { return VideoSettingsRequest( enabled: enabled, accessRequestEnabled: accessRequestEnabled, + cameraDefaultOn: cameraDefaultOn, ); } } diff --git a/packages/stream_video/lib/src/webrtc/rtc_media_device/rtc_media_device.dart b/packages/stream_video/lib/src/webrtc/rtc_media_device/rtc_media_device.dart index c7dc96560..80ac7cd3a 100644 --- a/packages/stream_video/lib/src/webrtc/rtc_media_device/rtc_media_device.dart +++ b/packages/stream_video/lib/src/webrtc/rtc_media_device/rtc_media_device.dart @@ -26,17 +26,19 @@ class RtcMediaDevice with EquatableMixin { required this.id, required this.label, required this.kind, + this.groupId, }); final String id; final String label; + final String? groupId; final RtcMediaDeviceKind kind; @override String toString() { - return 'RtcMediaDevice{id: $id, label: $label, kind: $kind}'; + return 'RtcMediaDevice{id: $id, label: $label, groupId: $groupId, kind: $kind}'; } @override - List get props => [id, kind, label]; + List get props => [id, kind, groupId, label]; } diff --git a/packages/stream_video/lib/src/webrtc/rtc_media_device/rtc_media_device_notifier.dart b/packages/stream_video/lib/src/webrtc/rtc_media_device/rtc_media_device_notifier.dart index b6b9389a0..c864ddf43 100644 --- a/packages/stream_video/lib/src/webrtc/rtc_media_device/rtc_media_device_notifier.dart +++ b/packages/stream_video/lib/src/webrtc/rtc_media_device/rtc_media_device_notifier.dart @@ -37,6 +37,7 @@ class RtcMediaDeviceNotifier { return RtcMediaDevice( id: it.deviceId, label: it.label, + groupId: it.groupId, kind: RtcMediaDeviceKind.fromAlias(it.kind), ); }); diff --git a/packages/stream_video_flutter/lib/src/call_screen/call_container.dart b/packages/stream_video_flutter/lib/src/call_screen/call_container.dart index 7fc5086af..82713a935 100644 --- a/packages/stream_video_flutter/lib/src/call_screen/call_container.dart +++ b/packages/stream_video_flutter/lib/src/call_screen/call_container.dart @@ -47,7 +47,7 @@ class StreamCallContainer extends StatefulWidget { const StreamCallContainer({ super.key, required this.call, - this.callConnectOptions = const CallConnectOptions(), + this.callConnectOptions, this.onBackPressed, this.onLeaveCallTap, this.onAcceptCallTap, @@ -62,7 +62,7 @@ class StreamCallContainer extends StatefulWidget { final Call call; /// Options used while connecting to the call. - final CallConnectOptions callConnectOptions; + final CallConnectOptions? callConnectOptions; /// The action to perform when the back button is pressed. final VoidCallback? onBackPressed; @@ -161,7 +161,9 @@ class _StreamCallContainerState extends State { Future _connect() async { try { _logger.d(() => '[connect] no args'); - call.connectOptions = widget.callConnectOptions; + if (widget.callConnectOptions != null) { + call.connectOptions = widget.callConnectOptions!; + } final result = await call.join(); _logger.v(() => '[connect] completed: $result'); } catch (e) { diff --git a/packages/stream_video_flutter/lib/src/call_screen/lobby_video.dart b/packages/stream_video_flutter/lib/src/call_screen/lobby_video.dart index 41fd0b55c..49112685a 100644 --- a/packages/stream_video_flutter/lib/src/call_screen/lobby_video.dart +++ b/packages/stream_video_flutter/lib/src/call_screen/lobby_video.dart @@ -41,6 +41,22 @@ class _StreamLobbyVideoState extends State { RtcLocalAudioTrack? _microphoneTrack; RtcLocalCameraTrack? _cameraTrack; + @override + void initState() { + super.initState(); + + Future.delayed(Duration.zero, () { + final callSettings = widget.call.state.value.settings; + if (callSettings.audio.micDefaultOn) { + toggleMicrophone(); + } + + if (callSettings.video.cameraDefaultOn) { + toggleCamera(); + } + }); + } + Future toggleCamera() async { if (_cameraTrack != null) { await _cameraTrack?.stop();