From 23e28e74d41f5a9cef729a2192d0486271f1c296 Mon Sep 17 00:00:00 2001 From: Brazol Date: Fri, 15 Mar 2024 12:51:43 +0100 Subject: [PATCH 1/3] fix for respecting the default audio output device --- dogfooding/lib/screens/call_screen.dart | 4 + dogfooding/lib/widgets/settings_menu.dart | 249 ++++++++++++++---- packages/stream_video/lib/src/call/call.dart | 13 + .../lib/src/call/session/call_session.dart | 7 + .../state/mixins/state_lifecycle_mixin.dart | 26 ++ .../open_api/open_api_extensions.dart | 12 + .../rtc_media_device/rtc_media_device.dart | 6 +- .../rtc_media_device_notifier.dart | 1 + 8 files changed, 266 insertions(+), 52 deletions(-) diff --git a/dogfooding/lib/screens/call_screen.dart b/dogfooding/lib/screens/call_screen.dart index 3ac861018..188ebaa08 100644 --- a/dogfooding/lib/screens/call_screen.dart +++ b/dogfooding/lib/screens/call_screen.dart @@ -170,6 +170,10 @@ class _CallScreenState extends State { _moreMenuVisible = false; }, ), + onAudioOutputChange: (_) => + setState(() => _moreMenuVisible = false), + onAudioInputChange: (_) => + setState(() => _moreMenuVisible = false), ), ), ], diff --git a/dogfooding/lib/widgets/settings_menu.dart b/dogfooding/lib/widgets/settings_menu.dart index df926f672..6c989dc36 100644 --- a/dogfooding/lib/widgets/settings_menu.dart +++ b/dogfooding/lib/widgets/settings_menu.dart @@ -1,5 +1,8 @@ +import 'dart:async'; + import 'package:flutter/material.dart'; import 'package:flutter_dogfooding/theme/app_palette.dart'; +import 'package:stream_chat_flutter/stream_chat_flutter.dart'; import 'package:stream_video_flutter/stream_video_flutter.dart'; CallReactionData _raisedHandReaction = const CallReactionData( @@ -8,22 +11,65 @@ CallReactionData _raisedHandReaction = const CallReactionData( icon: '✋', ); -class SettingsMenu extends StatelessWidget { +class SettingsMenu extends StatefulWidget { const SettingsMenu({ required this.call, this.onReactionSend, this.onStatsPressed, + this.onAudioOutputChange, + this.onAudioInputChange, super.key, }); final Call call; final void Function(CallReactionData)? onReactionSend; final void Function()? onStatsPressed; + final void Function(RtcMediaDevice)? onAudioOutputChange; + final void Function(RtcMediaDevice)? onAudioInputChange; @override - Widget build(BuildContext context) { - final theme = StreamVideoTheme.of(context); + State createState() => _SettingsMenuState(); +} + +class _SettingsMenuState extends State { + final _deviceNotifier = RtcMediaDeviceNotifier.instance; + StreamSubscription>? _deviceChangeSubscription; + + var _audioOutputs = []; + var _audioInputs = []; + + bool showAudioOutputs = false; + bool showAudioInputs = false; + bool get showMainSettings => !showAudioOutputs && !showAudioInputs; + + @override + void initState() { + super.initState(); + _deviceChangeSubscription = _deviceNotifier.onDeviceChange.listen( + (devices) { + _audioOutputs = devices + .where( + (it) => it.kind == RtcMediaDeviceKind.audioOutput, + ) + .toList(); + + _audioInputs = devices + .where( + (it) => it.kind == RtcMediaDeviceKind.audioInput, + ) + .toList(); + }, + ); + } + @override + void dispose() { + _deviceChangeSubscription?.cancel(); + super.dispose(); + } + + @override + Widget build(BuildContext context) { return Container( decoration: const BoxDecoration( color: AppColorPalette.backgroundColor, @@ -33,56 +79,155 @@ class SettingsMenu extends StatelessWidget { ), ), padding: const EdgeInsets.all(16), - child: Column( - children: [ - Row( - mainAxisAlignment: MainAxisAlignment.spaceEvenly, - children: theme.callControlsTheme.callReactions - .where((element) => - element.emojiCode != _raisedHandReaction.emojiCode) - .map((e) { - return InkWell( - onTap: () { - call.sendReaction( - reactionType: e.type, - emojiCode: e.emojiCode, - ); - - onReactionSend?.call(e); - }, - child: IgnorePointer( - child: CallControlOption( - icon: Text( - e.icon, - textAlign: TextAlign.center, - ), - onPressed: () {}, - padding: const EdgeInsets.all(0), - ), - )); - }).toList(), - ), - const SizedBox(height: 8), - SettingsMenuItem( - child: const Center(child: Text('✋ Raise hand')), - onPressed: () { - call.sendReaction( - reactionType: _raisedHandReaction.type, - emojiCode: _raisedHandReaction.emojiCode, - ); + child: Column(children: [ + if (showMainSettings) ..._buildMenuItems(), + if (showAudioOutputs) ..._buildAudioOutputsMenu(), + if (showAudioInputs) ..._buildAudioInputsMenu(), + ]), + ); + } - onReactionSend?.call(_raisedHandReaction); + List _buildMenuItems() { + return [ + Row( + mainAxisAlignment: MainAxisAlignment.spaceEvenly, + children: StreamVideoTheme.of(context) + .callControlsTheme + .callReactions + .where( + (element) => element.emojiCode != _raisedHandReaction.emojiCode) + .map((e) { + return InkWell( + onTap: () { + widget.call.sendReaction( + reactionType: e.type, + emojiCode: e.emojiCode, + ); + + widget.onReactionSend?.call(e); + }, + child: IgnorePointer( + child: CallControlOption( + icon: Text( + e.icon, + textAlign: TextAlign.center, + ), + onPressed: () {}, + padding: const EdgeInsets.all(0), + ), + )); + }).toList(), + ), + const SizedBox(height: 8), + SettingsMenuItem( + child: const Center(child: Text('✋ Raise hand')), + onPressed: () { + widget.call.sendReaction( + reactionType: _raisedHandReaction.type, + emojiCode: _raisedHandReaction.emojiCode, + ); + + widget.onReactionSend?.call(_raisedHandReaction); + }, + ), + const SizedBox(height: 16), + StandardActionMenuItem( + icon: Icons.auto_graph, + label: 'Call stats', + onPressed: widget.onStatsPressed, + ), + const SizedBox(height: 16), + StandardActionMenuItem( + icon: Icons.headphones, + label: 'Choose audio output', + onPressed: () { + setState(() { + showAudioOutputs = true; + }); + }, + ), + const SizedBox(height: 16), + StandardActionMenuItem( + icon: Icons.mic, + label: 'Choose audio input', + onPressed: () { + setState(() { + showAudioInputs = true; + }); + }, + ) + ]; + } + + List _buildAudioOutputsMenu() { + return [ + GestureDetector( + onTap: () { + setState(() { + showAudioOutputs = false; + }); + }, + child: const Align( + alignment: Alignment.centerLeft, + child: Icon(Icons.arrow_back, size: 24), + ), + ), + const SizedBox(height: 16), + ..._audioOutputs + .map( + (device) { + return StandardActionMenuItem( + icon: Icons.multitrack_audio, + label: device.label, + color: + widget.call.state.value.audioOutputDevice?.id == device.id + ? AppColorPalette.appGreen + : null, + onPressed: () { + widget.call.setAudioOutputDevice(device); + widget.onAudioOutputChange?.call(device); + }, + ); }, - ), - const SizedBox(height: 16), - StandardActionMenuItem( - icon: Icons.auto_graph, - label: 'Call stats', - onPressed: onStatsPressed, ) - ], + .cast() + .insertBetween(const SizedBox(height: 16)), + ]; + } + + List _buildAudioInputsMenu() { + return [ + GestureDetector( + onTap: () { + setState(() { + showAudioInputs = false; + }); + }, + child: const Align( + alignment: Alignment.centerLeft, + child: Icon(Icons.arrow_back, size: 24), + ), ), - ); + const SizedBox(height: 16), + ..._audioInputs + .map( + (device) { + return StandardActionMenuItem( + icon: Icons.multitrack_audio, + label: device.label, + color: widget.call.state.value.audioInputDevice?.id == device.id + ? AppColorPalette.appGreen + : null, + onPressed: () { + widget.call.setAudioInputDevice(device); + widget.onAudioInputChange?.call(device); + }, + ); + }, + ) + .cast() + .insertBetween(const SizedBox(height: 16)), + ]; } } @@ -118,11 +263,13 @@ class StandardActionMenuItem extends StatelessWidget { super.key, required this.icon, required this.label, + this.color, this.onPressed, }); final IconData icon; final String label; + final Color? color; final void Function()? onPressed; @override @@ -135,9 +282,11 @@ class StandardActionMenuItem extends StatelessWidget { Icon( icon, size: 20, + color: color, ), const SizedBox(width: 8), - Text(label, style: const TextStyle(fontWeight: FontWeight.bold)), + Text(label, + style: TextStyle(color: color, fontWeight: FontWeight.bold)), ], ), ); diff --git a/packages/stream_video/lib/src/call/call.dart b/packages/stream_video/lib/src/call/call.dart index 89e97a23c..db44c48e4 100644 --- a/packages/stream_video/lib/src/call/call.dart +++ b/packages/stream_video/lib/src/call/call.dart @@ -1192,11 +1192,24 @@ class Call { custom: custom, ); + final mediaDevicesResult = + await RtcMediaDeviceNotifier.instance.enumerateDevices(); + final mediaDevices = mediaDevicesResult.fold( + success: (success) => success.data, + failure: (failure) => [], + ); + return response.fold( success: (it) { _stateManager.lifecycleCallCreated( CallCreated(it.data.data), ringing: ringing, + audioOutputs: mediaDevices + .where((d) => d.kind == RtcMediaDeviceKind.audioOutput) + .toList(), + audioInputs: mediaDevices + .where((d) => d.kind == RtcMediaDeviceKind.audioInput) + .toList(), ); _logger.v(() => '[getOrCreate] completed: ${it.data}'); return it; diff --git a/packages/stream_video/lib/src/call/session/call_session.dart b/packages/stream_video/lib/src/call/session/call_session.dart index c17e45873..0b2e459fd 100644 --- a/packages/stream_video/lib/src/call/session/call_session.dart +++ b/packages/stream_video/lib/src/call/session/call_session.dart @@ -167,6 +167,13 @@ class CallSession extends Disposable { if (CurrentPlatform.isIos) { await rtcManager?.setAppleAudioConfiguration(); } + + unawaited( + Future.delayed(const Duration(milliseconds: 250), () async { + await _applyCurrentAudioOutputDevice(); + }), + ); + _logger.v(() => '[start] completed'); return const Result.success(none); } catch (e, stk) { diff --git a/packages/stream_video/lib/src/call/state/mixins/state_lifecycle_mixin.dart b/packages/stream_video/lib/src/call/state/mixins/state_lifecycle_mixin.dart index f09f1b5e2..4d5d9917d 100644 --- a/packages/stream_video/lib/src/call/state/mixins/state_lifecycle_mixin.dart +++ b/packages/stream_video/lib/src/call/state/mixins/state_lifecycle_mixin.dart @@ -1,4 +1,5 @@ import 'package:state_notifier/state_notifier.dart'; +import 'package:collection/collection.dart'; import '../../../../stream_video.dart'; import '../../../action/internal/lifecycle_action.dart'; @@ -101,7 +102,26 @@ mixin StateLifecycleMixin on StateNotifier { void lifecycleCallCreated( CallCreated stage, { bool ringing = false, + List? audioOutputs, + List? audioInputs, }) { + final defaultAudioOutput = audioOutputs?.firstWhereOrNull((device) { + if (stage.data.metadata.settings.audio.defaultDevice == + AudioSettingsRequestDefaultDeviceEnum.speaker) { + return device.id.equalsIgnoreCase( + AudioSettingsRequestDefaultDeviceEnum.speaker.value, + ); + } + + return !device.id.equalsIgnoreCase( + AudioSettingsRequestDefaultDeviceEnum.speaker.value, + ); + }); + + final defaultAudioInput = audioInputs + ?.firstWhereOrNull((d) => d.label == defaultAudioOutput?.label) ?? + audioInputs?.firstOrNull; + _logger.d(() => '[lifecycleCallCreated] ringing: $ringing, state: $state'); state = state.copyWith( status: stage.data.toCallStatus(state: state, ringing: ringing), @@ -118,6 +138,8 @@ mixin StateLifecycleMixin on StateNotifier { isBackstage: stage.data.metadata.details.backstage, isBroadcasting: stage.data.metadata.details.broadcasting, isRecording: stage.data.metadata.details.recording, + audioOutputDevice: defaultAudioOutput, + audioInputDevice: defaultAudioInput, ); } @@ -361,3 +383,7 @@ extension on CallRingingData { } } } + +extension on String { + bool equalsIgnoreCase(String other) => toUpperCase() == other.toUpperCase(); +} diff --git a/packages/stream_video/lib/src/coordinator/open_api/open_api_extensions.dart b/packages/stream_video/lib/src/coordinator/open_api/open_api_extensions.dart index 53a0bcbc9..11ea0a89d 100644 --- a/packages/stream_video/lib/src/coordinator/open_api/open_api_extensions.dart +++ b/packages/stream_video/lib/src/coordinator/open_api/open_api_extensions.dart @@ -1,6 +1,7 @@ import 'package:collection/collection.dart'; import '../../../../open_api/video/coordinator/api.dart' as open; +import '../../../stream_video.dart'; import '../../errors/video_error.dart'; import '../../logger/stream_log.dart'; import '../../models/call_cid.dart'; @@ -176,6 +177,7 @@ extension CallSettingsExt on open.CallSettingsResponse { accessRequestEnabled: audio.accessRequestEnabled, opusDtxEnabled: audio.opusDtxEnabled, redundantCodingEnabled: audio.redundantCodingEnabled, + defaultDevice: audio.defaultDevice.toDomain(), ), video: StreamVideoSettings( accessRequestEnabled: video.accessRequestEnabled, @@ -208,6 +210,16 @@ extension CallSettingsExt on open.CallSettingsResponse { } } +extension on open.AudioSettingsDefaultDeviceEnum { + AudioSettingsRequestDefaultDeviceEnum toDomain() { + if (this == open.AudioSettingsDefaultDeviceEnum.speaker) { + return AudioSettingsRequestDefaultDeviceEnum.speaker; + } else { + return AudioSettingsRequestDefaultDeviceEnum.earpiece; + } + } +} + extension on open.TranscriptionSettingsModeEnum { TranscriptionSettingsMode toDomain() { if (this == open.TranscriptionSettingsModeEnum.autoOn) { diff --git a/packages/stream_video/lib/src/webrtc/rtc_media_device/rtc_media_device.dart b/packages/stream_video/lib/src/webrtc/rtc_media_device/rtc_media_device.dart index c7dc96560..80ac7cd3a 100644 --- a/packages/stream_video/lib/src/webrtc/rtc_media_device/rtc_media_device.dart +++ b/packages/stream_video/lib/src/webrtc/rtc_media_device/rtc_media_device.dart @@ -26,17 +26,19 @@ class RtcMediaDevice with EquatableMixin { required this.id, required this.label, required this.kind, + this.groupId, }); final String id; final String label; + final String? groupId; final RtcMediaDeviceKind kind; @override String toString() { - return 'RtcMediaDevice{id: $id, label: $label, kind: $kind}'; + return 'RtcMediaDevice{id: $id, label: $label, groupId: $groupId, kind: $kind}'; } @override - List get props => [id, kind, label]; + List get props => [id, kind, groupId, label]; } diff --git a/packages/stream_video/lib/src/webrtc/rtc_media_device/rtc_media_device_notifier.dart b/packages/stream_video/lib/src/webrtc/rtc_media_device/rtc_media_device_notifier.dart index b6b9389a0..c864ddf43 100644 --- a/packages/stream_video/lib/src/webrtc/rtc_media_device/rtc_media_device_notifier.dart +++ b/packages/stream_video/lib/src/webrtc/rtc_media_device/rtc_media_device_notifier.dart @@ -37,6 +37,7 @@ class RtcMediaDeviceNotifier { return RtcMediaDevice( id: it.deviceId, label: it.label, + groupId: it.groupId, kind: RtcMediaDeviceKind.fromAlias(it.kind), ); }); From 75a09e02e310badeb7acd400725828c091699e4c Mon Sep 17 00:00:00 2001 From: Brazol Date: Mon, 18 Mar 2024 11:56:45 +0100 Subject: [PATCH 2/3] more default settings handled --- packages/stream_video/lib/src/call/call.dart | 16 ++++++++++++++-- .../lib/src/call/call_connect_options.dart | 4 +++- .../open_api/open_api_extensions.dart | 2 ++ .../lib/src/models/call_settings.dart | 13 +++++++++++++ .../lib/src/call_screen/call_container.dart | 8 +++++--- .../lib/src/call_screen/lobby_video.dart | 16 ++++++++++++++++ 6 files changed, 53 insertions(+), 6 deletions(-) diff --git a/packages/stream_video/lib/src/call/call.dart b/packages/stream_video/lib/src/call/call.dart index db44c48e4..4e1cbc132 100644 --- a/packages/stream_video/lib/src/call/call.dart +++ b/packages/stream_video/lib/src/call/call.dart @@ -270,8 +270,7 @@ class Call { set connectOptions(CallConnectOptions connectOptions) { final status = _status.value; - if (status == _ConnectionStatus.connecting || - status == _ConnectionStatus.connected) { + if (status == _ConnectionStatus.connected) { _logger.w( () => '[setConnectOptions] rejected (connectOptions must be' ' set before invoking `connect`)', @@ -976,6 +975,17 @@ class Call { return [...?_session?.getTracks(trackIdPrefix)]; } + void _setDefaultConnectOptions(CallSettings settings) { + connectOptions = connectOptions.copyWith( + camera: TrackOption.fromSetting( + enabled: settings.video.cameraDefaultOn, + ), + microphone: TrackOption.fromSetting( + enabled: settings.audio.micDefaultOn, + ), + ); + } + Future _applyConnectOptions() async { _logger.d(() => '[applyConnectOptions] connectOptions: $_connectOptions'); await _applyCameraOption(_connectOptions.camera); @@ -1201,6 +1211,8 @@ class Call { return response.fold( success: (it) { + _setDefaultConnectOptions(it.data.data.metadata.settings); + _stateManager.lifecycleCallCreated( CallCreated(it.data.data), ringing: ringing, diff --git a/packages/stream_video/lib/src/call/call_connect_options.dart b/packages/stream_video/lib/src/call/call_connect_options.dart index 0b8a0a75d..7a33c044e 100644 --- a/packages/stream_video/lib/src/call/call_connect_options.dart +++ b/packages/stream_video/lib/src/call/call_connect_options.dart @@ -18,7 +18,6 @@ class CallConnectOptions with EquatableMixin { TrackOption? camera, TrackOption? microphone, TrackOption? screenShare, - Duration? dropTimeout, }) { return CallConnectOptions( camera: camera ?? this.camera, @@ -43,6 +42,9 @@ class CallConnectOptions with EquatableMixin { abstract class TrackOption with EquatableMixin { const TrackOption(); + factory TrackOption.fromSetting({required bool enabled}) => + enabled ? TrackOption.enabled() : TrackOption.disabled(); + factory TrackOption.enabled() { return TrackEnabled._instance; } diff --git a/packages/stream_video/lib/src/coordinator/open_api/open_api_extensions.dart b/packages/stream_video/lib/src/coordinator/open_api/open_api_extensions.dart index 11ea0a89d..59df9a532 100644 --- a/packages/stream_video/lib/src/coordinator/open_api/open_api_extensions.dart +++ b/packages/stream_video/lib/src/coordinator/open_api/open_api_extensions.dart @@ -178,6 +178,8 @@ extension CallSettingsExt on open.CallSettingsResponse { opusDtxEnabled: audio.opusDtxEnabled, redundantCodingEnabled: audio.redundantCodingEnabled, defaultDevice: audio.defaultDevice.toDomain(), + micDefaultOn: audio.micDefaultOn, + speakerDefaultOn: audio.speakerDefaultOn, ), video: StreamVideoSettings( accessRequestEnabled: video.accessRequestEnabled, diff --git a/packages/stream_video/lib/src/models/call_settings.dart b/packages/stream_video/lib/src/models/call_settings.dart index 00a4787fc..d8a1a498d 100644 --- a/packages/stream_video/lib/src/models/call_settings.dart +++ b/packages/stream_video/lib/src/models/call_settings.dart @@ -77,17 +77,24 @@ class StreamAudioSettings extends MediaSettings { this.opusDtxEnabled = false, this.redundantCodingEnabled = false, this.defaultDevice = AudioSettingsRequestDefaultDeviceEnum.speaker, + this.micDefaultOn = true, + this.speakerDefaultOn = true, }); final bool opusDtxEnabled; final bool redundantCodingEnabled; final AudioSettingsRequestDefaultDeviceEnum defaultDevice; + final bool micDefaultOn; + final bool speakerDefaultOn; @override List get props => [ accessRequestEnabled, opusDtxEnabled, redundantCodingEnabled, + defaultDevice, + micDefaultOn, + speakerDefaultOn, ]; AudioSettingsRequest toOpenDto() { @@ -96,6 +103,8 @@ class StreamAudioSettings extends MediaSettings { accessRequestEnabled: accessRequestEnabled, opusDtxEnabled: opusDtxEnabled, redundantCodingEnabled: redundantCodingEnabled, + micDefaultOn: micDefaultOn, + speakerDefaultOn: speakerDefaultOn, ); } } @@ -104,20 +113,24 @@ class StreamVideoSettings extends MediaSettings { const StreamVideoSettings({ super.accessRequestEnabled = false, this.enabled = false, + this.cameraDefaultOn = true, }); final bool enabled; + final bool cameraDefaultOn; @override List get props => [ accessRequestEnabled, enabled, + cameraDefaultOn, ]; VideoSettingsRequest toOpenDto() { return VideoSettingsRequest( enabled: enabled, accessRequestEnabled: accessRequestEnabled, + cameraDefaultOn: cameraDefaultOn, ); } } diff --git a/packages/stream_video_flutter/lib/src/call_screen/call_container.dart b/packages/stream_video_flutter/lib/src/call_screen/call_container.dart index 7fc5086af..82713a935 100644 --- a/packages/stream_video_flutter/lib/src/call_screen/call_container.dart +++ b/packages/stream_video_flutter/lib/src/call_screen/call_container.dart @@ -47,7 +47,7 @@ class StreamCallContainer extends StatefulWidget { const StreamCallContainer({ super.key, required this.call, - this.callConnectOptions = const CallConnectOptions(), + this.callConnectOptions, this.onBackPressed, this.onLeaveCallTap, this.onAcceptCallTap, @@ -62,7 +62,7 @@ class StreamCallContainer extends StatefulWidget { final Call call; /// Options used while connecting to the call. - final CallConnectOptions callConnectOptions; + final CallConnectOptions? callConnectOptions; /// The action to perform when the back button is pressed. final VoidCallback? onBackPressed; @@ -161,7 +161,9 @@ class _StreamCallContainerState extends State { Future _connect() async { try { _logger.d(() => '[connect] no args'); - call.connectOptions = widget.callConnectOptions; + if (widget.callConnectOptions != null) { + call.connectOptions = widget.callConnectOptions!; + } final result = await call.join(); _logger.v(() => '[connect] completed: $result'); } catch (e) { diff --git a/packages/stream_video_flutter/lib/src/call_screen/lobby_video.dart b/packages/stream_video_flutter/lib/src/call_screen/lobby_video.dart index 41fd0b55c..49112685a 100644 --- a/packages/stream_video_flutter/lib/src/call_screen/lobby_video.dart +++ b/packages/stream_video_flutter/lib/src/call_screen/lobby_video.dart @@ -41,6 +41,22 @@ class _StreamLobbyVideoState extends State { RtcLocalAudioTrack? _microphoneTrack; RtcLocalCameraTrack? _cameraTrack; + @override + void initState() { + super.initState(); + + Future.delayed(Duration.zero, () { + final callSettings = widget.call.state.value.settings; + if (callSettings.audio.micDefaultOn) { + toggleMicrophone(); + } + + if (callSettings.video.cameraDefaultOn) { + toggleCamera(); + } + }); + } + Future toggleCamera() async { if (_cameraTrack != null) { await _cameraTrack?.stop(); From aad655ace6baee5b6f44df291097ea11b07ca4e1 Mon Sep 17 00:00:00 2001 From: Brazol Date: Mon, 25 Mar 2024 09:59:19 +0100 Subject: [PATCH 3/3] tweak --- packages/stream_video/lib/src/call/session/call_session.dart | 2 ++ 1 file changed, 2 insertions(+) diff --git a/packages/stream_video/lib/src/call/session/call_session.dart b/packages/stream_video/lib/src/call/session/call_session.dart index 0b2e459fd..30cd1c190 100644 --- a/packages/stream_video/lib/src/call/session/call_session.dart +++ b/packages/stream_video/lib/src/call/session/call_session.dart @@ -168,6 +168,8 @@ class CallSession extends Disposable { await rtcManager?.setAppleAudioConfiguration(); } + //FIXME: This is a temporary fix for the issue where the audio output device is not set correctly + // we should remove the delay and figure out why it's not setting the device without it unawaited( Future.delayed(const Duration(milliseconds: 250), () async { await _applyCurrentAudioOutputDevice();