From 079ad24807e478a17b858c2dd02e1c835bd10c96 Mon Sep 17 00:00:00 2001 From: Brazol Date: Mon, 2 Feb 2026 12:12:38 +0100 Subject: [PATCH 1/9] audio configuration policy --- packages/stream_video/CHANGELOG.md | 11 + packages/stream_video/lib/src/call/call.dart | 9 +- .../lib/src/call/session/call_session.dart | 8 +- .../models/audio_configuration_policy.dart | 143 ++++++++++++ .../stream_video/lib/src/stream_video.dart | 220 +++++++++--------- .../lib/src/webrtc/rtc_manager.dart | 54 ++--- .../lib/src/webrtc/rtc_manager_factory.dart | 7 +- packages/stream_video/lib/stream_video.dart | 1 + ...call_allow_multiple_active_calls_test.dart | 4 +- .../src/call/call_apply_settings_test.dart | 2 +- .../src/call/fixtures/call_test_helpers.dart | 2 +- .../test/src/core/client_state_test.dart | 4 +- .../example/lib/stream_video_options.dart | 10 +- .../example/lib/stream_video_sdk.dart | 2 +- 14 files changed, 317 insertions(+), 160 deletions(-) create mode 100644 packages/stream_video/lib/src/models/audio_configuration_policy.dart diff --git a/packages/stream_video/CHANGELOG.md b/packages/stream_video/CHANGELOG.md index e27ee709f..162cd8d15 100644 --- a/packages/stream_video/CHANGELOG.md +++ b/packages/stream_video/CHANGELOG.md @@ -1,3 +1,14 @@ +## Upcoming + +### ✅ Added +* Added `audioConfigurationPolicy` to `StreamVideoOptions` - a unified audio configuration for both iOS and Android platforms. Includes predefined policies: + * `AudioConfigurationPolicy.call()` - Optimized for voice/video calls (default) + * `AudioConfigurationPolicy.livestream()` - Optimized for livestream playback + * `AudioConfigurationPolicy.custom()` - Full control over platform-specific settings with optional `basePolicy` fallback + +### ⚠️ Deprecated +* Deprecated `androidAudioConfiguration` in `StreamVideoOptions`. Use `audioConfigurationPolicy` instead. + ## 1.2.3 ### ⚡ Performance diff --git a/packages/stream_video/lib/src/call/call.dart b/packages/stream_video/lib/src/call/call.dart index 15074c5b1..002d33359 100644 --- a/packages/stream_video/lib/src/call/call.dart +++ b/packages/stream_video/lib/src/call/call.dart @@ -2022,6 +2022,7 @@ class Call { if (CurrentPlatform.isIos) { await _session?.rtcManager?.setAppleAudioConfiguration( speakerOn: _connectOptions.speakerDefaultOn, + policy: _streamVideo.options.audioConfigurationPolicy, ); } } @@ -2994,11 +2995,9 @@ class Call { if (enabled && CurrentPlatform.isAndroid) { try { - if (_streamVideo.options.androidAudioConfiguration != null) { - await rtc.Helper.setAndroidAudioConfiguration( - _streamVideo.options.androidAudioConfiguration!, - ); - } + await rtc.Helper.setAndroidAudioConfiguration( + _streamVideo.options.audioConfigurationPolicy.getAndroidConfiguration(), + ); } catch (e) { _logger.w( () => diff --git a/packages/stream_video/lib/src/call/session/call_session.dart b/packages/stream_video/lib/src/call/session/call_session.dart index 68f3075c2..d813af4ff 100644 --- a/packages/stream_video/lib/src/call/session/call_session.dart +++ b/packages/stream_video/lib/src/call/session/call_session.dart @@ -150,11 +150,11 @@ class CallSession extends Disposable { } Future _ensureAndroidAudioConfiguration() async { - if (CurrentPlatform.isAndroid && - _streamVideo.options.androidAudioConfiguration != null) { + if (CurrentPlatform.isAndroid) { try { await rtc.Helper.setAndroidAudioConfiguration( - _streamVideo.options.androidAudioConfiguration!, + _streamVideo.options.audioConfigurationPolicy + .getAndroidConfiguration(), ); _logger.v( () => '[_ensureAndroidAudioConfiguration] Configuration applied', @@ -322,6 +322,7 @@ class CallSession extends Disposable { rtcManager = await rtcManagerFactory.makeRtcManager( sfuClient: sfuClient, + streamVideo: _streamVideo, clientDetails: clientDetails, sessionSequence: sessionSeq, statsOptions: statsOptions, @@ -343,6 +344,7 @@ class CallSession extends Disposable { rtcManager = await rtcManagerFactory.makeRtcManager( sfuClient: sfuClient, + streamVideo: _streamVideo, publisherId: localTrackId, publishOptions: joinResponseEvent.publishOptions, clientDetails: clientDetails, diff --git a/packages/stream_video/lib/src/models/audio_configuration_policy.dart b/packages/stream_video/lib/src/models/audio_configuration_policy.dart new file mode 100644 index 000000000..442c540b8 --- /dev/null +++ b/packages/stream_video/lib/src/models/audio_configuration_policy.dart @@ -0,0 +1,143 @@ +import 'package:stream_webrtc_flutter/stream_webrtc_flutter.dart' as rtc; + +/// A policy that defines how audio should be configured on both iOS and Android. +/// +/// Use one of the predefined policies: +/// - [AudioConfigurationPolicy.call] - Optimized for voice/video calls +/// - [AudioConfigurationPolicy.livestream] - Optimized for livestream playback +/// +/// Or create a custom configuration: +/// - [AudioConfigurationPolicy.custom] - Full control over platform settings +sealed class AudioConfigurationPolicy { + const AudioConfigurationPolicy(); + + const factory AudioConfigurationPolicy.call() = CallAudioPolicy; + + const factory AudioConfigurationPolicy.livestream() = LivestreamAudioPolicy; + + /// Custom policy allowing full control over platform-specific settings. + /// + /// Use this when you need specific audio configurations that differ from + /// the predefined policies. + /// + /// You can provide only the configuration for the platform you want to customize, + /// and use [basePolicy] to define defaults for the other platform. + const factory AudioConfigurationPolicy.custom({ + AudioConfigurationPolicy basePolicy, + rtc.AppleAudioConfiguration? appleConfiguration, + rtc.AndroidAudioConfiguration? androidConfiguration, + }) = CustomAudioPolicy; + + rtc.AppleAudioConfiguration getAppleConfiguration({ + bool defaultToSpeaker = false, + }); + + rtc.AndroidAudioConfiguration getAndroidConfiguration(); +} + +/// Audio policy optimized for normal video/audio calls. +class CallAudioPolicy extends AudioConfigurationPolicy { + const CallAudioPolicy(); + + @override + rtc.AppleAudioConfiguration getAppleConfiguration({ + bool defaultToSpeaker = false, + }) { + return rtc.AppleAudioConfiguration( + appleAudioMode: defaultToSpeaker + ? rtc.AppleAudioMode.videoChat + : rtc.AppleAudioMode.voiceChat, + appleAudioCategory: rtc.AppleAudioCategory.playAndRecord, + appleAudioCategoryOptions: { + if (defaultToSpeaker) rtc.AppleAudioCategoryOption.defaultToSpeaker, + rtc.AppleAudioCategoryOption.mixWithOthers, + rtc.AppleAudioCategoryOption.allowBluetooth, + rtc.AppleAudioCategoryOption.allowBluetoothA2DP, + rtc.AppleAudioCategoryOption.allowAirPlay, + }, + ); + } + + @override + rtc.AndroidAudioConfiguration getAndroidConfiguration() { + return rtc.AndroidAudioConfiguration( + androidAudioMode: rtc.AndroidAudioMode.inCommunication, + androidAudioStreamType: rtc.AndroidAudioStreamType.voiceCall, + androidAudioAttributesUsageType: + rtc.AndroidAudioAttributesUsageType.voiceCommunication, + androidAudioAttributesContentType: + rtc.AndroidAudioAttributesContentType.speech, + androidAudioFocusMode: rtc.AndroidAudioFocusMode.gain, + forceHandleAudioRouting: true, + ); + } +} + +/// Audio policy optimized for livestream/broadcast scenarios. +class LivestreamAudioPolicy extends AudioConfigurationPolicy { + const LivestreamAudioPolicy(); + + @override + rtc.AppleAudioConfiguration getAppleConfiguration({ + bool defaultToSpeaker = false, + }) { + return rtc.AppleAudioConfiguration( + appleAudioMode: rtc.AppleAudioMode.default_, + appleAudioCategory: rtc.AppleAudioCategory.playAndRecord, + appleAudioCategoryOptions: const { + rtc.AppleAudioCategoryOption.defaultToSpeaker, + rtc.AppleAudioCategoryOption.mixWithOthers, + rtc.AppleAudioCategoryOption.allowBluetooth, + rtc.AppleAudioCategoryOption.allowBluetoothA2DP, + rtc.AppleAudioCategoryOption.allowAirPlay, + }, + ); + } + + @override + rtc.AndroidAudioConfiguration getAndroidConfiguration() { + return rtc.AndroidAudioConfiguration( + androidAudioMode: rtc.AndroidAudioMode.normal, + androidAudioStreamType: rtc.AndroidAudioStreamType.music, + androidAudioAttributesUsageType: + rtc.AndroidAudioAttributesUsageType.media, + androidAudioAttributesContentType: + rtc.AndroidAudioAttributesContentType.music, + androidAudioFocusMode: rtc.AndroidAudioFocusMode.gain, + forceHandleAudioRouting: false, + ); + } +} + +/// Custom audio policy with full control over platform settings. +class CustomAudioPolicy extends AudioConfigurationPolicy { + /// At least one of [appleConfiguration] or [androidConfiguration] should be + /// provided. If a configuration is not provided, [basePolicy] will be used + /// for that platform. + const CustomAudioPolicy({ + this.basePolicy = const CallAudioPolicy(), + this.appleConfiguration, + this.androidConfiguration, + }); + + /// The base policy used for platforms where a custom configuration + /// is not provided. Defaults to [CallAudioPolicy]. + final AudioConfigurationPolicy basePolicy; + + final rtc.AppleAudioConfiguration? appleConfiguration; + + final rtc.AndroidAudioConfiguration? androidConfiguration; + + @override + rtc.AppleAudioConfiguration getAppleConfiguration({ + bool defaultToSpeaker = false, + }) { + return appleConfiguration ?? + basePolicy.getAppleConfiguration(defaultToSpeaker: defaultToSpeaker); + } + + @override + rtc.AndroidAudioConfiguration getAndroidConfiguration() { + return androidConfiguration ?? basePolicy.getAndroidConfiguration(); + } +} diff --git a/packages/stream_video/lib/src/stream_video.dart b/packages/stream_video/lib/src/stream_video.dart index e81b19898..8398404fb 100644 --- a/packages/stream_video/lib/src/stream_video.dart +++ b/packages/stream_video/lib/src/stream_video.dart @@ -40,6 +40,7 @@ import 'logger/impl/external_logger.dart'; import 'logger/impl/tagged_logger.dart'; import 'logger/stream_log.dart'; import 'logger/stream_logger.dart'; +import 'models/audio_configuration_policy.dart'; import 'models/call_cid.dart'; import 'models/call_preferences.dart'; import 'models/call_received_data.dart'; @@ -92,7 +93,7 @@ class StreamVideo extends Disposable { /// If [failIfSingletonExists] is set to false, the new instance will override and disconnect the existing singleton instance. factory StreamVideo( String apiKey, { - StreamVideoOptions options = const StreamVideoOptions(), + StreamVideoOptions options = const StreamVideoOptions.constant(), required User user, String? userToken, TokenLoader? tokenLoader, @@ -123,7 +124,7 @@ class StreamVideo extends Disposable { factory StreamVideo.create( String apiKey, { required User user, - StreamVideoOptions options = const StreamVideoOptions(), + StreamVideoOptions options = const StreamVideoOptions.constant(), String? userToken, TokenLoader? tokenLoader, OnTokenUpdated? onTokenUpdated, @@ -190,9 +191,9 @@ class StreamVideo extends Disposable { if (CurrentPlatform.isAndroid || CurrentPlatform.isIos) { rtc.WebRTC.initialize( options: { - if (CurrentPlatform.isAndroid && - options.androidAudioConfiguration != null) - 'androidAudioConfiguration': options.androidAudioConfiguration! + if (CurrentPlatform.isAndroid) + 'androidAudioConfiguration': options.audioConfigurationPolicy + .getAndroidConfiguration() .toMap(), }, ).then((_) { @@ -210,27 +211,21 @@ class StreamVideo extends Disposable { UserToken.anonymous(), onTokenUpdated: onTokenUpdated, ), - UserType.guest => TokenProvider.dynamic( - (userId) async { - final result = await _client.loadGuest(id: userId); - if (result is! Success) { - throw (result as Failure).error; - } - final updatedUser = result.data.user; - _state.user.value = User( - type: user.type, - info: updatedUser.toUserInfo(), - ); - return result.data.accessToken; - }, - onTokenUpdated: onTokenUpdated, - ), + UserType.guest => TokenProvider.dynamic((userId) async { + final result = await _client.loadGuest(id: userId); + if (result is! Success) { + throw (result as Failure).error; + } + final updatedUser = result.data.user; + _state.user.value = User( + type: user.type, + info: updatedUser.toUserInfo(), + ); + return result.data.accessToken; + }, onTokenUpdated: onTokenUpdated), }; - _tokenManager.setTokenProvider( - user.id, - tokenProvider: tokenProvider, - ); + _tokenManager.setTokenProvider(user.id, tokenProvider: tokenProvider); _setupLogger(options.logPriority, options.logHandlerFunction); @@ -259,9 +254,7 @@ class StreamVideo extends Disposable { '[StreamVideo] failed to auto connect: $error with stackTrace: $stackTrace', ); - return Result.error( - 'Failed to auto connect: $error', - ); + return Result.error('Failed to auto connect: $error'); }), ); } @@ -372,12 +365,10 @@ class StreamVideo extends Disposable { return _connectOperation! .valueOrDefault(Result.error('connect was cancelled')) - .whenComplete( - () { - _logger.i(() => '[connect] clear shared operation'); - _connectOperation = null; - }, - ); + .whenComplete(() { + _logger.i(() => '[connect] clear shared operation'); + _connectOperation = null; + }); } /// Disconnects the user from the Stream Video service. @@ -406,9 +397,7 @@ class StreamVideo extends Disposable { return Result.success(token); } - _connectionState = ConnectionState.connecting( - _state.currentUser.id, - ); + _connectionState = ConnectionState.connecting(_state.currentUser.id); // guest user will be updated when token gets fetched final tokenResult = await _tokenManager.getToken(); @@ -437,9 +426,7 @@ class StreamVideo extends Disposable { ); return result; } - _connectionState = ConnectionState.connected( - _state.currentUser.id, - ); + _connectionState = ConnectionState.connected(_state.currentUser.id); _subscriptions.add(_idEvents, _client.events.listen(_onEvent)); _subscriptions.add(_idAppState, lifecycle.appState.listen(_onAppState)); @@ -521,14 +508,10 @@ class StreamVideo extends Disposable { _state.incomingCall.value = call; } else if (event is CoordinatorConnectedEvent) { _logger.i(() => '[onCoordinatorEvent] connected ${event.userId}'); - _connectionState = ConnectionState.connected( - _state.currentUser.id, - ); + _connectionState = ConnectionState.connected(_state.currentUser.id); } else if (event is CoordinatorDisconnectedEvent) { _logger.i(() => '[onCoordinatorEvent] disconnected ${event.userId}'); - _connectionState = ConnectionState.disconnected( - _state.currentUser.id, - ); + _connectionState = ConnectionState.disconnected(_state.currentUser.id); } else if (event is CoordinatorReconnectedEvent) { _logger.i(() => '[onCoordinatorEvent] reconnected ${event.userId}'); if (state.watchedCalls.value.isNotEmpty) { @@ -543,14 +526,12 @@ class StreamVideo extends Disposable { .toList(), }, }, - ).onError( - (error, stackTrace) { - _logger.e( - () => '[onCoordinatorEvent] re-watching calls failed: $error', - ); - return Result.failure(VideoErrors.compose(error, stackTrace)); - }, - ), + ).onError((error, stackTrace) { + _logger.e( + () => '[onCoordinatorEvent] re-watching calls failed: $error', + ); + return Result.failure(VideoErrors.compose(error, stackTrace)); + }), ); } } @@ -648,10 +629,7 @@ class StreamVideo extends Disposable { CallPreferences? preferences, }) { return Call( - callCid: StreamCallCid.from( - type: callType, - id: id, - ), + callCid: StreamCallCid.from(type: callType, id: id), coordinatorClient: _client, streamVideo: this, networkMonitor: _networkMonitor, @@ -722,9 +700,7 @@ class StreamVideo extends Disposable { } /// Removes a device used to receive push notifications. - Future> removeDevice({ - required String pushToken, - }) { + Future> removeDevice({required String pushToken}) { _logger.d(() => '[removeDevice] pushToken: $pushToken'); return _client.deleteDevice(id: pushToken, userId: currentUser.id); } @@ -757,20 +733,18 @@ class StreamVideo extends Disposable { StreamSubscription? disposeAfterResolvingRinging({ void Function()? disposingCallback, }) { - return onRingingEvent( - (event) { - if (event is ActionCallAccept || - event is ActionCallDecline || - event is ActionCallTimeout || - event is ActionCallEnded) { - // Delay the callback to ensure the call is fully resolved. - Future.delayed(const Duration(seconds: 1), () { - disposingCallback?.call(); - dispose(); - }); - } - }, - ); + return onRingingEvent((event) { + if (event is ActionCallAccept || + event is ActionCallDecline || + event is ActionCallTimeout || + event is ActionCallEnded) { + // Delay the callback to ensure the call is fully resolved. + Future.delayed(const Duration(seconds: 1), () { + disposingCallback?.call(); + dispose(); + }); + } + }); } Future consumeAndAcceptActiveCall({ @@ -865,20 +839,18 @@ class StreamVideo extends Disposable { void Function(Call)? onCallAccepted, CallPreferences? acceptCallPreferences, }) { - return onRingingEvent( - (event) { - // Ignore call accept event when app is in detached state on Android. - // The call flow should be handled by consuming the call like in the terminated state. - if (!CurrentPlatform.isAndroid || - _state.appLifecycleState.value != LifecycleState.detached) { - _onCallAccept( - event, - onCallAccepted: onCallAccepted, - callPreferences: acceptCallPreferences, - ); - } - }, - ); + return onRingingEvent((event) { + // Ignore call accept event when app is in detached state on Android. + // The call flow should be handled by consuming the call like in the terminated state. + if (!CurrentPlatform.isAndroid || + _state.appLifecycleState.value != LifecycleState.detached) { + _onCallAccept( + event, + onCallAccepted: onCallAccepted, + callPreferences: acceptCallPreferences, + ); + } + }); } StreamSubscription? observeCallIncomingRingingEvent() { @@ -923,9 +895,7 @@ class StreamVideo extends Disposable { ); if (consumeResult.isFailure) { - _logger.w( - () => '[onCallAccept] error consuming incoming call}', - ); + _logger.w(() => '[onCallAccept] error consuming incoming call}'); return; } @@ -950,10 +920,7 @@ class StreamVideo extends Disposable { final cid = event.data.callCid; if (uuid == null || cid == null) return; - final consumeResult = await consumeIncomingCall( - uuid: uuid, - cid: cid, - ); + final consumeResult = await consumeIncomingCall(uuid: uuid, cid: cid); final incomingCall = consumeResult.getDataOrNull(); if (incomingCall == null) return; @@ -1152,10 +1119,7 @@ class StreamVideo extends Disposable { required StreamCallType callType, required String id, }) async { - final call = makeCall( - callType: callType, - id: id, - ); + final call = makeCall(callType: callType, id: id); final callResult = await call.get(watch: false); return callResult.fold( @@ -1315,9 +1279,7 @@ Future _setClientDetails() async { sfu_models.Device? device; sfu_models.Browser? browser; - var os = sfu_models.OS( - name: CurrentPlatform.name, - ); + var os = sfu_models.OS(name: CurrentPlatform.name); if (CurrentPlatform.isAndroid) { final deviceInfo = await DeviceInfoPlugin().androidInfo; @@ -1335,9 +1297,7 @@ Future _setClientDetails() async { name: CurrentPlatform.name, version: deviceInfo.systemVersion, ); - device = sfu_models.Device( - name: deviceInfo.utsname.machine, - ); + device = sfu_models.Device(name: deviceInfo.utsname.machine); } else if (CurrentPlatform.isMacOS) { final deviceInfo = await DeviceInfoPlugin().macOsInfo; os = sfu_models.OS( @@ -1409,7 +1369,36 @@ void _defaultLogHandler( } class StreamVideoOptions { - const StreamVideoOptions({ + StreamVideoOptions({ + this.coordinatorRpcUrl = _defaultCoordinatorRpcUrl, + this.coordinatorWsUrl = _defaultCoordinatorWsUrl, + this.latencySettings = const LatencySettings(), + this.retryPolicy = const RetryPolicy(), + this.defaultCallPreferences, + this.sdpPolicy = const SdpPolicy(spdEditingEnabled: false), + this.audioProcessor, + this.logPriority = Priority.none, + this.logHandlerFunction = _defaultLogHandler, + this.muteVideoWhenInBackground = false, + this.muteAudioWhenInBackground = false, + this.autoConnect = true, + this.includeUserDetailsForAutoConnect = true, + this.keepConnectionsAliveWhenInBackground = false, + this.networkMonitorSettings = const NetworkMonitorSettings(), + this.allowMultipleActiveCalls = false, + @Deprecated( + 'Use audioConfigurationPolicy instead. This parameter will be removed in the next major release.', + ) + this.androidAudioConfiguration, + AudioConfigurationPolicy audioConfigurationPolicy = const CallAudioPolicy(), + }) : audioConfigurationPolicy = androidAudioConfiguration == null + ? audioConfigurationPolicy + : CustomAudioPolicy(androidConfiguration: androidAudioConfiguration); + + /// Use this constructor when you need a compile-time constant. Note that [androidAudioConfiguration] + /// will be ignored in this constructor - use [audioConfigurationPolicy] instead. + //TODO: Remove this constructor in the next major release while removing androidAudioConfiguration. + const StreamVideoOptions.constant({ this.coordinatorRpcUrl = _defaultCoordinatorRpcUrl, this.coordinatorWsUrl = _defaultCoordinatorWsUrl, this.latencySettings = const LatencySettings(), @@ -1426,7 +1415,11 @@ class StreamVideoOptions { this.keepConnectionsAliveWhenInBackground = false, this.networkMonitorSettings = const NetworkMonitorSettings(), this.allowMultipleActiveCalls = false, + @Deprecated( + 'Use audioConfigurationPolicy instead. Usage of this parameter will be ignored in this constructor.', + ) this.androidAudioConfiguration, + this.audioConfigurationPolicy = const CallAudioPolicy(), }); final String coordinatorRpcUrl; @@ -1455,5 +1448,20 @@ class StreamVideoOptions { /// Returns the current [NetworkMonitorSettings]. final NetworkMonitorSettings networkMonitorSettings; + @Deprecated( + 'Use audioConfigurationPolicy instead. This parameter will be removed in the next major release.', + ) final rtc.AndroidAudioConfiguration? androidAudioConfiguration; + + /// The audio configuration policy for the SDK. + /// + /// Use predefined policies: + /// - [AudioConfigurationPolicy.call] - Optimized for voice/video calls (default) + /// - [AudioConfigurationPolicy.livestream] - Optimized for livestream playback + /// + /// Or create a custom configuration: + /// - [AudioConfigurationPolicy.custom] - Full control over platform settings + /// + /// Defaults to [CallAudioPolicy]. + final AudioConfigurationPolicy audioConfigurationPolicy; } diff --git a/packages/stream_video/lib/src/webrtc/rtc_manager.dart b/packages/stream_video/lib/src/webrtc/rtc_manager.dart index 579c86d62..36cc50228 100644 --- a/packages/stream_video/lib/src/webrtc/rtc_manager.dart +++ b/packages/stream_video/lib/src/webrtc/rtc_manager.dart @@ -6,36 +6,22 @@ import 'package:rxdart/transformers.dart'; import 'package:sdp_transform/sdp_transform.dart'; import 'package:stream_webrtc_flutter/stream_webrtc_flutter.dart' as rtc; -import '../../open_api/video/coordinator/api.dart'; +import '../../stream_video.dart'; import '../disposable.dart'; import '../errors/video_error_composer.dart'; -import '../logger/impl/tagged_logger.dart'; -import '../logger/stream_log.dart'; -import '../models/models.dart'; -import '../platform_detector/platform_detector.dart'; import '../sfu/data/models/sfu_model_parser.dart'; import '../sfu/data/models/sfu_publish_options.dart'; -import '../sfu/data/models/sfu_track_type.dart'; import '../sfu/data/models/sfu_video_sender.dart'; import '../utils/extensions.dart'; -import '../utils/none.dart'; -import '../utils/result.dart'; import 'codecs_helper.dart' as codecs; import 'codecs_helper.dart'; -import 'media/media_constraints.dart'; import 'model/rtc_audio_bitrate_preset.dart'; import 'model/rtc_tracks_info.dart'; -import 'model/rtc_video_dimension.dart'; import 'model/rtc_video_encoding.dart'; -import 'model/rtc_video_parameters.dart'; import 'peer_connection.dart'; -import 'peer_type.dart'; import 'rtc_audio_api/rtc_audio_api.dart' show checkIfAudioOutputChangeSupported; -import 'rtc_media_device/rtc_media_device.dart'; -import 'rtc_media_device/rtc_media_device_notifier.dart'; import 'rtc_parser.dart'; -import 'rtc_track/rtc_track.dart'; import 'traced_peer_connection.dart'; import 'transceiver_cache.dart'; @@ -68,7 +54,8 @@ class RtcManager extends Disposable { required this.publisher, required this.subscriber, required this.publishOptions, - }) { + required StreamVideo streamVideo, + }) : _streamVideo = streamVideo { subscriber.onTrack = _onRemoteTrack; } @@ -79,6 +66,7 @@ class RtcManager extends Disposable { final String? publisherId; final TracedStreamPeerConnection? publisher; final TracedStreamPeerConnection subscriber; + final StreamVideo _streamVideo; final transceiversManager = TransceiverManager(); List publishOptions; @@ -1236,9 +1224,12 @@ extension RtcManagerTrackHelper on RtcManager { )) { await setAppleAudioConfiguration( speakerOn: true, + policy: _streamVideo.options.audioConfigurationPolicy, ); } else { - await setAppleAudioConfiguration(); + await setAppleAudioConfiguration( + policy: _streamVideo.options.audioConfigurationPolicy, + ); } // Change the audio output device for all remote audio tracks. @@ -1477,23 +1468,26 @@ extension RtcManagerTrackHelper on RtcManager { } Future> setAppleAudioConfiguration({ + required AudioConfigurationPolicy policy, bool speakerOn = false, }) async { try { await rtc.Helper.setAppleAudioConfiguration( - rtc.AppleAudioConfiguration( - appleAudioMode: speakerOn - ? rtc.AppleAudioMode.videoChat - : rtc.AppleAudioMode.voiceChat, - appleAudioCategory: rtc.AppleAudioCategory.playAndRecord, - appleAudioCategoryOptions: { - if (speakerOn) rtc.AppleAudioCategoryOption.defaultToSpeaker, - rtc.AppleAudioCategoryOption.mixWithOthers, - rtc.AppleAudioCategoryOption.allowBluetooth, - rtc.AppleAudioCategoryOption.allowBluetoothA2DP, - rtc.AppleAudioCategoryOption.allowAirPlay, - }, - ), + policy.getAppleConfiguration(defaultToSpeaker: speakerOn), + ); + return const Result.success(none); + } catch (e, stk) { + return Result.failure(VideoErrors.compose(e, stk)); + } + } + + /// Applies the Android audio configuration from the policy. + Future> setAndroidAudioConfiguration({ + required AudioConfigurationPolicy policy, + }) async { + try { + await rtc.Helper.setAndroidAudioConfiguration( + policy.getAndroidConfiguration(), ); return const Result.success(none); } catch (e, stk) { diff --git a/packages/stream_video/lib/src/webrtc/rtc_manager_factory.dart b/packages/stream_video/lib/src/webrtc/rtc_manager_factory.dart index 38132555d..22979293a 100644 --- a/packages/stream_video/lib/src/webrtc/rtc_manager_factory.dart +++ b/packages/stream_video/lib/src/webrtc/rtc_manager_factory.dart @@ -1,11 +1,8 @@ -import '../../open_api/video/coordinator/api.dart'; import '../../protobuf/video/sfu/models/models.pb.dart'; +import '../../stream_video.dart'; import '../call/session/call_session_config.dart'; -import '../logger/impl/tagged_logger.dart'; -import '../models/call_cid.dart'; import '../sfu/data/models/sfu_publish_options.dart'; import '../sfu/sfu_client.dart'; -import '../types/other.dart'; import 'peer_connection_factory.dart'; import 'rtc_manager.dart'; import 'sdp/editor/sdp_editor.dart'; @@ -33,6 +30,7 @@ class RtcManagerFactory { Future makeRtcManager({ required SfuClient sfuClient, + required StreamVideo streamVideo, ClientDetails? clientDetails, String? publisherId, int? sessionSequence, @@ -71,6 +69,7 @@ class RtcManagerFactory { publisher: publisher, subscriber: subscriber, publishOptions: publishOptions, + streamVideo: streamVideo, ); } } diff --git a/packages/stream_video/lib/stream_video.dart b/packages/stream_video/lib/stream_video.dart index cd0613c38..73b5d8c85 100644 --- a/packages/stream_video/lib/stream_video.dart +++ b/packages/stream_video/lib/stream_video.dart @@ -24,6 +24,7 @@ export 'src/logger/impl/file_logger.dart'; export 'src/logger/impl/tagged_logger.dart'; export 'src/logger/stream_log.dart'; export 'src/logger/stream_logger.dart'; +export 'src/models/audio_configuration_policy.dart'; export 'src/models/models.dart'; export 'src/network_monitor_settings.dart'; export 'src/platform_detector/platform_detector.dart'; diff --git a/packages/stream_video/test/src/call/call_allow_multiple_active_calls_test.dart b/packages/stream_video/test/src/call/call_allow_multiple_active_calls_test.dart index 57bf04a6e..6eb808342 100644 --- a/packages/stream_video/test/src/call/call_allow_multiple_active_calls_test.dart +++ b/packages/stream_video/test/src/call/call_allow_multiple_active_calls_test.dart @@ -74,7 +74,7 @@ void main() { 'test-api-key', user: user, userToken: userToken, - options: const StreamVideoOptions( + options: const StreamVideoOptions.constant( allowMultipleActiveCalls: false, autoConnect: false, ), @@ -202,7 +202,7 @@ void main() { 'test-api-key', user: user, userToken: userToken, - options: const StreamVideoOptions( + options: const StreamVideoOptions.constant( allowMultipleActiveCalls: true, autoConnect: false, ), diff --git a/packages/stream_video/test/src/call/call_apply_settings_test.dart b/packages/stream_video/test/src/call/call_apply_settings_test.dart index bfb0b8900..90c3ebe23 100644 --- a/packages/stream_video/test/src/call/call_apply_settings_test.dart +++ b/packages/stream_video/test/src/call/call_apply_settings_test.dart @@ -53,7 +53,7 @@ void main() { 'test-api-key', user: user, userToken: userToken, - options: const StreamVideoOptions( + options: const StreamVideoOptions.constant( autoConnect: false, ), precacheGenericSdps: false, diff --git a/packages/stream_video/test/src/call/fixtures/call_test_helpers.dart b/packages/stream_video/test/src/call/fixtures/call_test_helpers.dart index a8dd24187..a83587bd6 100644 --- a/packages/stream_video/test/src/call/fixtures/call_test_helpers.dart +++ b/packages/stream_video/test/src/call/fixtures/call_test_helpers.dart @@ -166,7 +166,7 @@ MockStreamVideo setupMockStreamVideo({ClientState? clientState}) { final effectiveClientState = clientState ?? setupMockClientState(); when(() => streamVideo.state).thenReturn(effectiveClientState); - when(() => streamVideo.options).thenReturn(const StreamVideoOptions()); + when(() => streamVideo.options).thenReturn(StreamVideoOptions()); when( () => streamVideo.currentUser, ).thenReturn(SampleCallData.defaultUserInfo); diff --git a/packages/stream_video/test/src/core/client_state_test.dart b/packages/stream_video/test/src/core/client_state_test.dart index 1d7dbcea2..23b70f820 100644 --- a/packages/stream_video/test/src/core/client_state_test.dart +++ b/packages/stream_video/test/src/core/client_state_test.dart @@ -59,7 +59,7 @@ void main() { 'test-api-key', user: user, userToken: userToken, - options: const StreamVideoOptions( + options: StreamVideoOptions( allowMultipleActiveCalls: false, autoConnect: false, ), @@ -180,7 +180,7 @@ void main() { 'test-api-key', user: user, userToken: userToken, - options: const StreamVideoOptions( + options: StreamVideoOptions( allowMultipleActiveCalls: true, autoConnect: false, ), diff --git a/packages/stream_video_flutter/example/lib/stream_video_options.dart b/packages/stream_video_flutter/example/lib/stream_video_options.dart index 1719a2cd6..b7fbb2e1b 100644 --- a/packages/stream_video_flutter/example/lib/stream_video_options.dart +++ b/packages/stream_video_flutter/example/lib/stream_video_options.dart @@ -4,11 +4,11 @@ import 'package:stream_video/stream_video.dart'; mixin DefaultVideoOptions { static StreamVideoOptions get remote { - return const StreamVideoOptions(); + return StreamVideoOptions(); } static StreamVideoOptions get local { - return const StreamVideoOptions( + return StreamVideoOptions( coordinatorRpcUrl: 'http://10.0.0.53:3030/video', coordinatorWsUrl: 'ws://10.0.0.53:8800/video/connect', ); @@ -45,17 +45,17 @@ mixin DefaultVideoOptions { } } - static const StreamVideoOptions _web = StreamVideoOptions( + static final StreamVideoOptions _web = StreamVideoOptions( coordinatorRpcUrl: 'http://localhost:3030/video', coordinatorWsUrl: 'ws://localhost:8800/video/connect', ); - static const StreamVideoOptions _android = StreamVideoOptions( + static final StreamVideoOptions _android = StreamVideoOptions( coordinatorRpcUrl: 'http://10.0.2.2:3030/video', coordinatorWsUrl: 'ws://10.0.2.2:8800/video/connect', ); - static const StreamVideoOptions _ios = StreamVideoOptions( + static final StreamVideoOptions _ios = StreamVideoOptions( coordinatorRpcUrl: 'http://localhost:3030/video', coordinatorWsUrl: 'ws://localhost:8800/video/connect', ); diff --git a/packages/stream_video_flutter/example/lib/stream_video_sdk.dart b/packages/stream_video_flutter/example/lib/stream_video_sdk.dart index 0e3cf6246..cec60d9b1 100644 --- a/packages/stream_video_flutter/example/lib/stream_video_sdk.dart +++ b/packages/stream_video_flutter/example/lib/stream_video_sdk.dart @@ -10,7 +10,7 @@ class StreamVideoSdk { required String apiKey, required UserInfo user, required UserToken userToken, - StreamVideoOptions options = const StreamVideoOptions(), + StreamVideoOptions options = const StreamVideoOptions.constant(), }) async { if (StreamVideo.isInitialized()) { streamLog.d(_tag, () => '[initialize] reset instance'); From 47f819f738914896c1d3239ca1d5bfbf858dd36b Mon Sep 17 00:00:00 2001 From: Brazol Date: Fri, 6 Feb 2026 15:03:33 +0100 Subject: [PATCH 2/9] hifi and stereo implementation for Android --- melos.yaml | 5 +- packages/stream_video/lib/src/call/call.dart | 43 +++- .../lib/src/call/session/call_session.dart | 43 +--- .../mixins/state_call_actions_mixin.dart | 9 + packages/stream_video/lib/src/call_state.dart | 7 + .../open_api/open_api_extensions.dart | 1 + .../models/audio_configuration_policy.dart | 104 ++++++-- .../lib/src/models/call_settings.dart | 4 + .../events/sfu_event_mapper_extensions.dart | 34 +++ .../sfu/data/models/sfu_audio_bitrate.dart | 39 +++ .../models/sfu_model_mapper_extensions.dart | 33 +++ .../sfu/data/models/sfu_publish_options.dart | 6 +- .../lib/src/sfu/sfu_extensions.dart | 14 ++ .../stream_video/lib/src/stream_video.dart | 58 +++-- .../lib/src/webrtc/codecs_helper.dart | 24 ++ .../src/webrtc/media/audio_constraints.dart | 7 + .../model/rtc_audio_bitrate_preset.dart | 13 +- .../lib/src/webrtc/model/rtc_tracks_info.dart | 10 +- .../lib/src/webrtc/rtc_manager.dart | 230 ++++++++++++++---- .../lib/src/webrtc/rtc_manager_factory.dart | 3 + .../rtc_media_device_notifier.dart | 22 +- .../rtc_track/rtc_track_publish_options.dart | 9 + .../action_set_opus_stereo_enabled.dart | 68 ++++++ .../action/sdp_edit_action_factory.dart | 8 + .../rule/rule_set_opus_stereo_enabled.dart | 22 ++ .../sdp/editor/rule/sdp_munging_rule.dart | 7 + .../lib/src/webrtc/sdp/editor/sdp_editor.dart | 2 + .../webrtc/sdp/editor/sdp_editor_impl.dart | 27 +- .../lib/src/webrtc/transceiver_cache.dart | 9 +- packages/stream_video/lib/stream_video.dart | 1 + packages/stream_video/pubspec.yaml | 5 +- packages/stream_video_filters/pubspec.yaml | 5 +- .../stream_video_flutter/example/pubspec.yaml | 5 +- packages/stream_video_flutter/pubspec.yaml | 5 +- .../pubspec.yaml | 5 +- .../pubspec.yaml | 5 +- 36 files changed, 731 insertions(+), 161 deletions(-) create mode 100644 packages/stream_video/lib/src/sfu/data/models/sfu_audio_bitrate.dart create mode 100644 packages/stream_video/lib/src/webrtc/rtc_track/rtc_track_publish_options.dart create mode 100644 packages/stream_video/lib/src/webrtc/sdp/editor/action/action_set_opus_stereo_enabled.dart create mode 100644 packages/stream_video/lib/src/webrtc/sdp/editor/rule/rule_set_opus_stereo_enabled.dart diff --git a/melos.yaml b/melos.yaml index 59a4fb85c..a2a14fcae 100644 --- a/melos.yaml +++ b/melos.yaml @@ -22,7 +22,10 @@ command: device_info_plus: ^12.1.0 share_plus: ^11.0.0 stream_chat_flutter: ^9.17.0 - stream_webrtc_flutter: ^2.2.4 + stream_webrtc_flutter: + git: + url: https://github.com/GetStream/webrtc-flutter.git + ref: feat/hifi-stereo-playout stream_video: ^1.2.4 stream_video_flutter: ^1.2.4 stream_video_noise_cancellation: ^1.2.4 diff --git a/packages/stream_video/lib/src/call/call.dart b/packages/stream_video/lib/src/call/call.dart index 002d33359..99cf7a9e9 100644 --- a/packages/stream_video/lib/src/call/call.dart +++ b/packages/stream_video/lib/src/call/call.dart @@ -29,6 +29,7 @@ import '../models/models.dart'; import '../platform_detector/platform_detector.dart'; import '../retry/retry_policy.dart'; import '../sfu/data/events/sfu_events.dart'; +import '../sfu/data/models/sfu_audio_bitrate.dart'; import '../sfu/data/models/sfu_client_capability.dart'; import '../sfu/data/models/sfu_error.dart'; import '../sfu/data/models/sfu_track_type.dart'; @@ -2993,19 +2994,6 @@ class Call { return Result.error('Missing permission to send audio'); } - if (enabled && CurrentPlatform.isAndroid) { - try { - await rtc.Helper.setAndroidAudioConfiguration( - _streamVideo.options.audioConfigurationPolicy.getAndroidConfiguration(), - ); - } catch (e) { - _logger.w( - () => - '[setMicrophoneEnabled] Failed to set Android audio configuration: $e', - ); - } - } - final result = await _session?.setMicrophoneEnabled( enabled, @@ -3152,6 +3140,35 @@ class Call { return result; } + void setAudioBitrateProfile(SfuAudioBitrateProfile profile) { + if (!state.value.settings.audio.hifiAudioEnabled) { + throw ArgumentError('High Fidelity audio is not enabled for this call'); + } + + if (_streamVideo.isAudioProcessorConfigured()) { + final disableAudioProcessing = + profile == SfuAudioBitrateProfile.musicHighQuality; + + if (disableAudioProcessing) { + unawaited(stopAudioProcessing()); + } else { + unawaited(startAudioProcessing()); + } + } + + _stateManager.setAudioBitrateProfile(profile); + + final stereo = profile == SfuAudioBitrateProfile.musicHighQuality; + _session?.rtcManager?.changeDefaultAudioConstraints( + AudioConstraints( + noiseSuppression: !stereo, + echoCancellation: !stereo, + autoGainControl: !stereo, + channelCount: stereo ? 2 : 1, + ), + ); + } + bool checkIfAudioOutputChangeSupported() { return rtc_audio.checkIfAudioOutputChangeSupported(); } diff --git a/packages/stream_video/lib/src/call/session/call_session.dart b/packages/stream_video/lib/src/call/session/call_session.dart index d813af4ff..d6a2ee43e 100644 --- a/packages/stream_video/lib/src/call/session/call_session.dart +++ b/packages/stream_video/lib/src/call/session/call_session.dart @@ -149,25 +149,6 @@ class CallSession extends Disposable { }); } - Future _ensureAndroidAudioConfiguration() async { - if (CurrentPlatform.isAndroid) { - try { - await rtc.Helper.setAndroidAudioConfiguration( - _streamVideo.options.audioConfigurationPolicy - .getAndroidConfiguration(), - ); - _logger.v( - () => '[_ensureAndroidAudioConfiguration] Configuration applied', - ); - } catch (e) { - _logger.w( - () => - '[_ensureAndroidAudioConfiguration] Failed to apply Android audio configuration: $e', - ); - } - } - } - Future getReconnectDetails( SfuReconnectionStrategy strategy, { String? migratingFromSfuId, @@ -316,16 +297,18 @@ class CallSession extends Disposable { // Ensure WebRTC initialization completes before creating rtcManager await _streamVideo.webrtcInitializationCompleter.future; - await _ensureAndroidAudioConfiguration(); if (isAnonymousUser) { rtcManager = await rtcManagerFactory.makeRtcManager( sfuClient: sfuClient, streamVideo: _streamVideo, + stateManager: stateManager, clientDetails: clientDetails, sessionSequence: sessionSeq, statsOptions: statsOptions, + callSessionConfig: config, + publishOptions: joinResponseEvent.publishOptions, ) ..onSubscriberIceCandidate = _onLocalIceCandidate ..onRenegotiationNeeded = _onRenegotiationNeeded @@ -345,6 +328,7 @@ class CallSession extends Disposable { await rtcManagerFactory.makeRtcManager( sfuClient: sfuClient, streamVideo: _streamVideo, + stateManager: stateManager, publisherId: localTrackId, publishOptions: joinResponseEvent.publishOptions, clientDetails: clientDetails, @@ -475,8 +459,6 @@ class CallSession extends Disposable { stateManager.sfuPinsUpdated(event.callState.pins); - await _ensureAndroidAudioConfiguration(); - result = Result.success( ( callState: event.callState, @@ -708,11 +690,6 @@ class CallSession extends Disposable { // Only start remote tracks. Local tracks are started by the user. if (track is! RtcRemoteTrack) return; - - if (track.isAudioTrack) { - await _ensureAndroidAudioConfiguration(); - } - await track.start(); } @@ -917,10 +894,6 @@ class CallSession extends Disposable { ) async { _logger.d(() => '[onRemoteTrackReceived] remoteTrack: $remoteTrack'); - if (remoteTrack.isAudioTrack) { - await _ensureAndroidAudioConfiguration(); - } - // Start the track. await remoteTrack.start(); @@ -975,11 +948,6 @@ class CallSession extends Disposable { } final result = await rtcManager.setAudioOutputDevice(device: device); - - if (result.isSuccess && CurrentPlatform.isAndroid) { - await _ensureAndroidAudioConfiguration(); - } - return result; } @@ -1120,6 +1088,9 @@ extension RtcTracksInfoMapper on List { mid: info.mid, publishOptionId: info.publishOptionId, codec: info.codec?.toDTO(), + dtx: info.dtx, + stereo: info.stereo, + red: info.red, layers: info.layers?.map((layer) { return sfu_models.VideoLayer( rid: layer.rid, diff --git a/packages/stream_video/lib/src/call/state/mixins/state_call_actions_mixin.dart b/packages/stream_video/lib/src/call/state/mixins/state_call_actions_mixin.dart index def723c80..a39071377 100644 --- a/packages/stream_video/lib/src/call/state/mixins/state_call_actions_mixin.dart +++ b/packages/stream_video/lib/src/call/state/mixins/state_call_actions_mixin.dart @@ -1,6 +1,7 @@ import 'package:state_notifier/state_notifier.dart'; import '../../../call_state.dart'; import '../../../logger/impl/tagged_logger.dart'; +import '../../../sfu/data/models/sfu_audio_bitrate.dart'; final _logger = taggedLogger(tag: 'SV:CallState:CallActions'); @@ -61,4 +62,12 @@ mixin StateCallActionsMixin on StateNotifier { egress: newEgress, ); } + + void setAudioBitrateProfile(SfuAudioBitrateProfile profile) { + _logger.v(() => '[setAudioBitrateProfile] profile:$profile'); + + state = state.copyWith( + audioBitrateProfile: profile, + ); + } } diff --git a/packages/stream_video/lib/src/call_state.dart b/packages/stream_video/lib/src/call_state.dart index e608e54d8..3e0b20162 100644 --- a/packages/stream_video/lib/src/call_state.dart +++ b/packages/stream_video/lib/src/call_state.dart @@ -5,6 +5,7 @@ import 'package:meta/meta.dart'; import 'call/call_type.dart'; import 'models/call_member_state.dart'; import 'models/models.dart'; +import 'sfu/data/models/sfu_audio_bitrate.dart'; import 'webrtc/rtc_media_device/rtc_media_device.dart'; /// Represents the call's state. @@ -29,6 +30,7 @@ class CallState extends Equatable { isCaptioning: false, isBackstage: false, isAudioProcessing: false, + audioBitrateProfile: SfuAudioBitrateProfile.voiceStandard, settings: const CallSettings(), egress: const CallEgress(), rtmpIngress: '', @@ -69,6 +71,7 @@ class CallState extends Equatable { required this.isCaptioning, required this.isBackstage, required this.isAudioProcessing, + required this.audioBitrateProfile, required this.settings, required this.egress, required this.rtmpIngress, @@ -110,6 +113,7 @@ class CallState extends Equatable { final bool isCaptioning; final bool isBackstage; final bool isAudioProcessing; + final SfuAudioBitrateProfile audioBitrateProfile; final RtcMediaDevice? videoInputDevice; final RtcMediaDevice? audioInputDevice; final RtcMediaDevice? audioOutputDevice; @@ -178,6 +182,7 @@ class CallState extends Equatable { bool? isCaptioning, bool? isBackstage, bool? isAudioProcessing, + SfuAudioBitrateProfile? audioBitrateProfile, CallSettings? settings, CallEgress? egress, String? rtmpIngress, @@ -216,6 +221,7 @@ class CallState extends Equatable { isCaptioning: isCaptioning ?? this.isCaptioning, isBackstage: isBackstage ?? this.isBackstage, isAudioProcessing: isAudioProcessing ?? this.isAudioProcessing, + audioBitrateProfile: audioBitrateProfile ?? this.audioBitrateProfile, settings: settings ?? this.settings, egress: egress ?? this.egress, rtmpIngress: rtmpIngress ?? this.rtmpIngress, @@ -291,6 +297,7 @@ class CallState extends Equatable { isBroadcasting, isBackstage, isAudioProcessing, + audioBitrateProfile, settings, egress, rtmpIngress, diff --git a/packages/stream_video/lib/src/coordinator/open_api/open_api_extensions.dart b/packages/stream_video/lib/src/coordinator/open_api/open_api_extensions.dart index 4fef5efa4..b49ffd000 100644 --- a/packages/stream_video/lib/src/coordinator/open_api/open_api_extensions.dart +++ b/packages/stream_video/lib/src/coordinator/open_api/open_api_extensions.dart @@ -171,6 +171,7 @@ extension CallSettingsExt on open.CallSettingsResponse { micDefaultOn: audio.micDefaultOn, speakerDefaultOn: audio.speakerDefaultOn, noiseCancellation: audio.noiseCancellation?.toSettingsDomain(), + hifiAudioEnabled: audio.hifiAudioEnabled, ), video: StreamVideoSettings( accessRequestEnabled: video.accessRequestEnabled, diff --git a/packages/stream_video/lib/src/models/audio_configuration_policy.dart b/packages/stream_video/lib/src/models/audio_configuration_policy.dart index 442c540b8..029f13d1f 100644 --- a/packages/stream_video/lib/src/models/audio_configuration_policy.dart +++ b/packages/stream_video/lib/src/models/audio_configuration_policy.dart @@ -2,18 +2,62 @@ import 'package:stream_webrtc_flutter/stream_webrtc_flutter.dart' as rtc; /// A policy that defines how audio should be configured on both iOS and Android. /// -/// Use one of the predefined policies: -/// - [AudioConfigurationPolicy.call] - Optimized for voice/video calls -/// - [AudioConfigurationPolicy.livestream] - Optimized for livestream playback +/// ## Predefined Policies /// -/// Or create a custom configuration: +/// ### Broadcaster Policy +/// [AudioConfigurationPolicy.broadcaster] is designed for **active participation** +/// in calls, such as: +/// - Hosts or co-hosts in a livestream +/// - Participants in meetings or video calls +/// - Users who actively speak and interact +/// +/// This policy treats the call as a **voice/video communication session**. +/// +/// ### Viewer Policy +/// [AudioConfigurationPolicy.viewer] is designed for **passive consumption** +/// of audio/video content, such as: +/// - Viewers watching a livestream +/// - Audience members who only listen +/// - Users in watch-only mode +/// +/// This policy treats the call as **media playback**. +/// +/// ## Platform-Specific Differences +/// +/// ### Android +/// +/// **Broadcaster Policy:** +/// - Hardware volume buttons control **call volume** (not media volume) +/// - Echo cancellation and noise suppression are **enabled** +/// - Can automatically route between earpiece and speaker +/// +/// **Viewer Policy:** +/// - Hardware volume buttons control **media volume** +/// - No echo cancellation or audio processing (higher fidelity) +/// - Enables stereo playout +/// - Always uses speaker/media output path +/// - Treated as regular media playback +/// +/// ### iOS +/// +/// **Broadcaster Policy:** +/// - Voice processing **enabled** (echo cancellation, noise reduction) +/// - Compatible with CallKit for system call integration +/// - Optimized for voice clarity over audio quality +/// +/// **Viewer Policy:** +/// - Voice processing **bypassed** for pure, high-fidelity audio +/// - Always uses speaker by default +/// - Optimized for music/media quality +/// +/// ## Custom Configuration /// - [AudioConfigurationPolicy.custom] - Full control over platform settings sealed class AudioConfigurationPolicy { const AudioConfigurationPolicy(); - const factory AudioConfigurationPolicy.call() = CallAudioPolicy; + const factory AudioConfigurationPolicy.broadcaster() = BroadcasterAudioPolicy; - const factory AudioConfigurationPolicy.livestream() = LivestreamAudioPolicy; + const factory AudioConfigurationPolicy.viewer() = ViewerAudioPolicy; /// Custom policy allowing full control over platform-specific settings. /// @@ -33,11 +77,23 @@ sealed class AudioConfigurationPolicy { }); rtc.AndroidAudioConfiguration getAndroidConfiguration(); + + bool get bypassVoiceProcessing; } -/// Audio policy optimized for normal video/audio calls. -class CallAudioPolicy extends AudioConfigurationPolicy { - const CallAudioPolicy(); +/// Audio policy optimized for active participation in calls. +/// +/// Use this policy when users have an active role such as: +/// - Meeting participants who speak +/// - Livestream hosts or co-hosts +/// - Interactive voice/video communication +/// +/// This enables voice processing (echo cancellation, noise suppression) +/// and treats the session as a call on both platforms. +/// +/// See [AudioConfigurationPolicy] for detailed platform-specific behavior. +class BroadcasterAudioPolicy extends AudioConfigurationPolicy { + const BroadcasterAudioPolicy(); @override rtc.AppleAudioConfiguration getAppleConfiguration({ @@ -71,11 +127,24 @@ class CallAudioPolicy extends AudioConfigurationPolicy { forceHandleAudioRouting: true, ); } + + @override + bool get bypassVoiceProcessing => false; } -/// Audio policy optimized for livestream/broadcast scenarios. -class LivestreamAudioPolicy extends AudioConfigurationPolicy { - const LivestreamAudioPolicy(); +/// Audio policy optimized for passive consumption of audio/video content. +/// +/// Use this policy when users have a passive role such as: +/// - Livestream viewers (watch-only) +/// - Audience members who only listen +/// - Playback of recorded content +/// +/// This disables voice processing and treats the session as media playback +/// on both platforms, providing higher audio fidelity. +/// +/// See [AudioConfigurationPolicy] for detailed platform-specific behavior. +class ViewerAudioPolicy extends AudioConfigurationPolicy { + const ViewerAudioPolicy(); @override rtc.AppleAudioConfiguration getAppleConfiguration({ @@ -107,6 +176,9 @@ class LivestreamAudioPolicy extends AudioConfigurationPolicy { forceHandleAudioRouting: false, ); } + + @override + bool get bypassVoiceProcessing => true; } /// Custom audio policy with full control over platform settings. @@ -115,19 +187,23 @@ class CustomAudioPolicy extends AudioConfigurationPolicy { /// provided. If a configuration is not provided, [basePolicy] will be used /// for that platform. const CustomAudioPolicy({ - this.basePolicy = const CallAudioPolicy(), + this.basePolicy = const BroadcasterAudioPolicy(), this.appleConfiguration, this.androidConfiguration, + this.bypassVoiceProcessing = false, }); /// The base policy used for platforms where a custom configuration - /// is not provided. Defaults to [CallAudioPolicy]. + /// is not provided. Defaults to [BroadcasterAudioPolicy]. final AudioConfigurationPolicy basePolicy; final rtc.AppleAudioConfiguration? appleConfiguration; final rtc.AndroidAudioConfiguration? androidConfiguration; + @override + final bool bypassVoiceProcessing; + @override rtc.AppleAudioConfiguration getAppleConfiguration({ bool defaultToSpeaker = false, diff --git a/packages/stream_video/lib/src/models/call_settings.dart b/packages/stream_video/lib/src/models/call_settings.dart index 931bcb0d4..8307df02d 100644 --- a/packages/stream_video/lib/src/models/call_settings.dart +++ b/packages/stream_video/lib/src/models/call_settings.dart @@ -99,6 +99,7 @@ class StreamAudioSettings extends MediaSettings { this.micDefaultOn = true, this.speakerDefaultOn = true, this.noiseCancellation, + this.hifiAudioEnabled = false, }); final bool opusDtxEnabled; @@ -107,6 +108,7 @@ class StreamAudioSettings extends MediaSettings { final bool micDefaultOn; final bool speakerDefaultOn; final StreamNoiceCancellingSettings? noiseCancellation; + final bool hifiAudioEnabled; @override List get props => [ @@ -117,6 +119,7 @@ class StreamAudioSettings extends MediaSettings { micDefaultOn, speakerDefaultOn, noiseCancellation, + hifiAudioEnabled, ]; AudioSettingsRequest toOpenDto() { @@ -128,6 +131,7 @@ class StreamAudioSettings extends MediaSettings { micDefaultOn: micDefaultOn, speakerDefaultOn: speakerDefaultOn, noiseCancellation: noiseCancellation?.toOpenDto(), + hifiAudioEnabled: hifiAudioEnabled, ); } } diff --git a/packages/stream_video/lib/src/sfu/data/events/sfu_event_mapper_extensions.dart b/packages/stream_video/lib/src/sfu/data/events/sfu_event_mapper_extensions.dart index 198e2dd72..02d9e626e 100644 --- a/packages/stream_video/lib/src/sfu/data/events/sfu_event_mapper_extensions.dart +++ b/packages/stream_video/lib/src/sfu/data/events/sfu_event_mapper_extensions.dart @@ -1,6 +1,7 @@ import '../../../../protobuf/video/sfu/event/events.pb.dart' as sfu_events; import '../../../../protobuf/video/sfu/models/models.pb.dart' as sfu_models; import '../../../webrtc/model/rtc_video_dimension.dart'; +import '../models/sfu_audio_bitrate.dart'; import '../models/sfu_audio_level.dart'; import '../models/sfu_audio_sender.dart'; import '../models/sfu_call_ended_reason.dart'; @@ -338,6 +339,36 @@ extension SfuCallEndedReasonExtension on sfu_models.CallEndedReason { } } +extension SfuAudioBitrateExtension on sfu_models.AudioBitrateProfile { + SfuAudioBitrateProfile toDomain() { + switch (this) { + case sfu_models + .AudioBitrateProfile + .AUDIO_BITRATE_PROFILE_VOICE_STANDARD_UNSPECIFIED: + return SfuAudioBitrateProfile.voiceStandard; + case sfu_models + .AudioBitrateProfile + .AUDIO_BITRATE_PROFILE_VOICE_HIGH_QUALITY: + return SfuAudioBitrateProfile.voiceHighQuality; + case sfu_models + .AudioBitrateProfile + .AUDIO_BITRATE_PROFILE_MUSIC_HIGH_QUALITY: + return SfuAudioBitrateProfile.musicHighQuality; + default: + return SfuAudioBitrateProfile.voiceStandard; + } + } +} + +extension SfuAudioBitrateExtension2 on sfu_models.AudioBitrate { + SfuAudioBitrate toDomain() { + return SfuAudioBitrate( + profile: profile.toDomain(), + bitrate: bitrate, + ); + } +} + extension SfuTrackTypeExtension on sfu_models.TrackType { SfuTrackType toDomain() { switch (this) { @@ -498,6 +529,9 @@ extension on sfu_models.PublishOption { bitrate: bitrate, fps: fps, useSingleLayer: useSingleLayer, + audioBitrateProfiles: audioBitrateProfiles + .map((it) => it.toDomain()) + .toList(), ); } } diff --git a/packages/stream_video/lib/src/sfu/data/models/sfu_audio_bitrate.dart b/packages/stream_video/lib/src/sfu/data/models/sfu_audio_bitrate.dart new file mode 100644 index 000000000..6e724757e --- /dev/null +++ b/packages/stream_video/lib/src/sfu/data/models/sfu_audio_bitrate.dart @@ -0,0 +1,39 @@ +import 'package:meta/meta.dart'; + +@immutable +class SfuAudioBitrate { + const SfuAudioBitrate({ + required this.profile, + this.bitrate, + }); + + final SfuAudioBitrateProfile profile; + final int? bitrate; + + @override + String toString() { + return 'SfuAudioBitrate{profile: $profile, bitrate: $bitrate}'; + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is SfuAudioBitrate && + runtimeType == other.runtimeType && + profile == other.profile && + bitrate == other.bitrate; + + @override + int get hashCode => profile.hashCode ^ bitrate.hashCode; +} + +enum SfuAudioBitrateProfile { + voiceStandard, + voiceHighQuality, + musicHighQuality; + + @override + String toString() { + return name; + } +} diff --git a/packages/stream_video/lib/src/sfu/data/models/sfu_model_mapper_extensions.dart b/packages/stream_video/lib/src/sfu/data/models/sfu_model_mapper_extensions.dart index 6b0434c1a..2202d0903 100644 --- a/packages/stream_video/lib/src/sfu/data/models/sfu_model_mapper_extensions.dart +++ b/packages/stream_video/lib/src/sfu/data/models/sfu_model_mapper_extensions.dart @@ -2,6 +2,7 @@ import '../../../../protobuf/video/sfu/models/models.pb.dart' as sfu_models; import '../../../../protobuf/video/sfu/signal_rpc/signal.pb.dart' as sfu; import '../../../webrtc/model/rtc_video_encoding.dart'; import '../../../webrtc/peer_type.dart'; +import 'sfu_audio_bitrate.dart'; import 'sfu_client_capability.dart'; import 'sfu_codec.dart'; import 'sfu_publish_options.dart'; @@ -89,6 +90,34 @@ extension SfuCodecMapper on SfuCodec { } } +extension SfuAudioBitrateProfileMapper on SfuAudioBitrateProfile { + sfu_models.AudioBitrateProfile toDTO() { + switch (this) { + case SfuAudioBitrateProfile.voiceStandard: + return sfu_models + .AudioBitrateProfile + .AUDIO_BITRATE_PROFILE_VOICE_STANDARD_UNSPECIFIED; + case SfuAudioBitrateProfile.voiceHighQuality: + return sfu_models + .AudioBitrateProfile + .AUDIO_BITRATE_PROFILE_VOICE_HIGH_QUALITY; + case SfuAudioBitrateProfile.musicHighQuality: + return sfu_models + .AudioBitrateProfile + .AUDIO_BITRATE_PROFILE_MUSIC_HIGH_QUALITY; + } + } +} + +extension SfuAudioBitrateMapper on SfuAudioBitrate { + sfu_models.AudioBitrate toDTO() { + return sfu_models.AudioBitrate( + profile: profile.toDTO(), + bitrate: bitrate, + ); + } +} + extension SfuPublishOptionsMapper on SfuPublishOptions { sfu_models.PublishOption toDTO() { return sfu_models.PublishOption( @@ -103,6 +132,10 @@ extension SfuPublishOptionsMapper on SfuPublishOptions { width: videoDimension?.width, height: videoDimension?.height, ), + useSingleLayer: useSingleLayer, + audioBitrateProfiles: audioBitrateProfiles + ?.map((it) => it.toDTO()) + .toList(), ); } } diff --git a/packages/stream_video/lib/src/sfu/data/models/sfu_publish_options.dart b/packages/stream_video/lib/src/sfu/data/models/sfu_publish_options.dart index 4fce157cc..8d6d11d3c 100644 --- a/packages/stream_video/lib/src/sfu/data/models/sfu_publish_options.dart +++ b/packages/stream_video/lib/src/sfu/data/models/sfu_publish_options.dart @@ -1,4 +1,5 @@ import '../../../webrtc/model/rtc_video_dimension.dart'; +import 'sfu_audio_bitrate.dart'; import 'sfu_codec.dart'; import 'sfu_track_type.dart'; @@ -13,6 +14,7 @@ class SfuPublishOptions { this.bitrate, this.fps, this.useSingleLayer = false, + this.audioBitrateProfiles, }); /// The unique identifier for the publish request. @@ -67,8 +69,10 @@ class SfuPublishOptions { /// For SVC codecs, prefer using the L1T3 (single spatial, 3 temporal layers) mode instead. final bool useSingleLayer; + final List? audioBitrateProfiles; + @override String toString() { - return 'SfuPublishOptions{id: $id, codec: $codec, trackType: $trackType, videoDimension: $videoDimension, maxSpatialLayers: $maxSpatialLayers, maxTemporalLayers: $maxTemporalLayers, bitrate: $bitrate, fps: $fps, useSingleLayer: $useSingleLayer}'; + return 'SfuPublishOptions{id: $id, codec: $codec, trackType: $trackType, videoDimension: $videoDimension, maxSpatialLayers: $maxSpatialLayers, maxTemporalLayers: $maxTemporalLayers, bitrate: $bitrate, fps: $fps, useSingleLayer: $useSingleLayer, audioBitrateProfiles: $audioBitrateProfiles}'; } } diff --git a/packages/stream_video/lib/src/sfu/sfu_extensions.dart b/packages/stream_video/lib/src/sfu/sfu_extensions.dart index f0223abc1..954733b5c 100644 --- a/packages/stream_video/lib/src/sfu/sfu_extensions.dart +++ b/packages/stream_video/lib/src/sfu/sfu_extensions.dart @@ -14,6 +14,7 @@ import '../sfu/data/models/sfu_video_sender.dart'; import '../utils/string.dart'; import '../webrtc/model/rtc_video_dimension.dart'; import 'data/events/sfu_events.dart'; +import 'data/models/sfu_audio_bitrate.dart'; import 'data/models/sfu_connection_info.dart'; import 'data/models/sfu_model_mapper_extensions.dart'; import 'data/models/sfu_participant.dart'; @@ -317,6 +318,10 @@ extension SfuPublishOptionsJsonX on SfuPublishOptions { 'useSingleLayer': useSingleLayer, 'bitrate': bitrate, 'fps': fps, + 'use_single_layer': useSingleLayer, + 'audio_bitrate_profiles': audioBitrateProfiles + ?.map((it) => it.toJson()) + .toList(), }; } } @@ -333,6 +338,15 @@ extension SfuCodecJsonX on SfuCodec { } } +extension SfuAudioBitrateX on SfuAudioBitrate { + Map toJson() { + return { + 'profile': profile.toString(), + 'bitrate': bitrate, + }; + } +} + extension SfuVideoLayerSettingJsonX on SfuVideoLayerSetting { Map toJson() { return { diff --git a/packages/stream_video/lib/src/stream_video.dart b/packages/stream_video/lib/src/stream_video.dart index 0c787f76a..24213c3e4 100644 --- a/packages/stream_video/lib/src/stream_video.dart +++ b/packages/stream_video/lib/src/stream_video.dart @@ -65,6 +65,7 @@ import 'utils/result.dart'; import 'utils/standard.dart'; import 'utils/subscriptions.dart'; import 'webrtc/rtc_manager.dart'; +import 'webrtc/rtc_media_device/rtc_media_device_notifier.dart'; import 'webrtc/sdp/policy/sdp_policy.dart'; const _tag = 'SV:Client'; @@ -189,16 +190,15 @@ class StreamVideo extends Disposable { _state.user.value = user; if (CurrentPlatform.isAndroid || CurrentPlatform.isIos) { - rtc.WebRTC.initialize( - options: { - if (CurrentPlatform.isAndroid) - 'androidAudioConfiguration': options.audioConfigurationPolicy - .getAndroidConfiguration() - .toMap(), - }, - ).then((_) { - webrtcInitializationCompleter.complete(); - }); + RtcMediaDeviceNotifier.instance + .reinitializeAudioConfiguration(options.audioConfigurationPolicy) + .then((_) { + if (precacheGenericSdps) { + unawaited(RtcManager.cacheGenericSdp()); + } + + webrtcInitializationCompleter.complete(); + }); } else { webrtcInitializationCompleter.complete(); } @@ -242,10 +242,6 @@ class StreamVideo extends Disposable { }), ); - if (precacheGenericSdps) { - unawaited(RtcManager.cacheGenericSdp()); - } - if (options.autoConnect) { unawaited( connect( @@ -1377,7 +1373,8 @@ class StreamVideoOptions { this.latencySettings = const LatencySettings(), this.retryPolicy = const RetryPolicy(), this.defaultCallPreferences, - this.sdpPolicy = const SdpPolicy(spdEditingEnabled: false), + //TODO: Allow sdp munging for development purposees, remove it before merging + this.sdpPolicy = const SdpPolicy(), this.audioProcessor, this.logPriority = Priority.none, this.logHandlerFunction = _defaultLogHandler, @@ -1392,7 +1389,8 @@ class StreamVideoOptions { 'Use audioConfigurationPolicy instead. This parameter will be removed in the next major release.', ) this.androidAudioConfiguration, - AudioConfigurationPolicy audioConfigurationPolicy = const CallAudioPolicy(), + AudioConfigurationPolicy audioConfigurationPolicy = + const BroadcasterAudioPolicy(), }) : audioConfigurationPolicy = androidAudioConfiguration == null ? audioConfigurationPolicy : CustomAudioPolicy(androidConfiguration: androidAudioConfiguration); @@ -1406,7 +1404,8 @@ class StreamVideoOptions { this.latencySettings = const LatencySettings(), this.retryPolicy = const RetryPolicy(), this.defaultCallPreferences, - this.sdpPolicy = const SdpPolicy(spdEditingEnabled: false), + //TODO: Allow sdp munging for development purposees, remove it before merging + this.sdpPolicy = const SdpPolicy(), this.audioProcessor, this.logPriority = Priority.none, this.logHandlerFunction = _defaultLogHandler, @@ -1421,7 +1420,7 @@ class StreamVideoOptions { 'Use audioConfigurationPolicy instead. Usage of this parameter will be ignored in this constructor.', ) this.androidAudioConfiguration, - this.audioConfigurationPolicy = const CallAudioPolicy(), + this.audioConfigurationPolicy = const BroadcasterAudioPolicy(), }); final String coordinatorRpcUrl; @@ -1457,13 +1456,26 @@ class StreamVideoOptions { /// The audio configuration policy for the SDK. /// - /// Use predefined policies: - /// - [AudioConfigurationPolicy.call] - Optimized for voice/video calls (default) - /// - [AudioConfigurationPolicy.livestream] - Optimized for livestream playback + /// **Broadcaster Policy** (default) - For active participation: + /// - Use for: meeting participants, livestream hosts, active speakers + /// - Enables echo cancellation and noise suppression + /// - Volume buttons control call volume (Android) + /// - Optimized for voice clarity + /// + /// **Viewer Policy** - For passive consumption: + /// - Use for: livestream viewers, watch-only audience + /// - Disables audio processing for higher fidelity + /// - Volume buttons control media volume (Android) + /// - Optimized for audio quality + /// - Enables stereo playout /// - /// Or create a custom configuration: + /// Use predefined policies: + /// - [AudioConfigurationPolicy.broadcaster] - Voice/video calls (default) + /// - [AudioConfigurationPolicy.viewer] - Livestream playback /// - [AudioConfigurationPolicy.custom] - Full control over platform settings /// - /// Defaults to [CallAudioPolicy]. + /// Defaults to [BroadcasterAudioPolicy]. + /// Once set it will be applied for all calls. + /// To change the audio configuration policy after initial setup, use [RtcMediaDeviceNotifier.reinitializeAudioConfiguration]. final AudioConfigurationPolicy audioConfigurationPolicy; } diff --git a/packages/stream_video/lib/src/webrtc/codecs_helper.dart b/packages/stream_video/lib/src/webrtc/codecs_helper.dart index 7cd66646b..650e7bf48 100644 --- a/packages/stream_video/lib/src/webrtc/codecs_helper.dart +++ b/packages/stream_video/lib/src/webrtc/codecs_helper.dart @@ -5,9 +5,12 @@ import 'dart:math'; import 'package:collection/collection.dart'; import 'package:stream_webrtc_flutter/stream_webrtc_flutter.dart' as rtc; +import '../sfu/data/models/sfu_audio_bitrate.dart'; import '../sfu/data/models/sfu_publish_options.dart'; +import 'model/rtc_audio_bitrate_preset.dart'; import 'model/rtc_video_dimension.dart'; import 'model/rtc_video_parameters.dart'; +import 'rtc_track/rtc_track_publish_options.dart'; class RTCRtpEncodingWithDimensions extends rtc.RTCRtpEncoding { RTCRtpEncodingWithDimensions({ @@ -28,6 +31,7 @@ class RTCRtpEncodingWithDimensions extends rtc.RTCRtpEncoding { final double height; } +/// Determines the most optimal video layers for the given track. List findOptimalVideoLayers({ required RtcVideoDimension dimensions, required SfuPublishOptions publishOptions, @@ -180,3 +184,23 @@ bool isSvcCodec(String? codecOrMimeType) { String toScalabilityMode(int spatialLayers, int temporalLayers) => 'L${spatialLayers}T$temporalLayers${spatialLayers > 1 ? '_KEY' : ''}'; + +/// Prepares the audio layer for the given track. +/// Based on the provided audio bitrate profile, we apply the appropriate bitrate. +List findOptimalAudioLayers({ + required SfuPublishOptions publishOptions, + required RtcTrackPublishOptions trackPublishOptions, +}) { + final profileConfig = publishOptions.audioBitrateProfiles?.firstWhereOrNull( + (config) => config.profile == trackPublishOptions.audioBitrateProfile, + ); + final maxBitrate = + profileConfig?.bitrate ?? + { + SfuAudioBitrateProfile.voiceStandard: AudioBitrate.voiceStandard, + SfuAudioBitrateProfile.voiceHighQuality: AudioBitrate.voiceHighQuality, + SfuAudioBitrateProfile.musicHighQuality: AudioBitrate.musicHighQuality, + }[trackPublishOptions.audioBitrateProfile]; + + return [rtc.RTCRtpEncoding(maxBitrate: maxBitrate)]; +} diff --git a/packages/stream_video/lib/src/webrtc/media/audio_constraints.dart b/packages/stream_video/lib/src/webrtc/media/audio_constraints.dart index 227bd8f51..e8a1eaeff 100644 --- a/packages/stream_video/lib/src/webrtc/media/audio_constraints.dart +++ b/packages/stream_video/lib/src/webrtc/media/audio_constraints.dart @@ -10,6 +10,7 @@ class AudioConstraints extends MediaConstraints { this.autoGainControl = true, this.highPassFilter = false, this.typingNoiseDetection = true, + this.channelCount = 1, }); /// Attempt to use noiseSuppression option (if supported by the platform) @@ -35,6 +36,9 @@ class AudioConstraints extends MediaConstraints { /// Defaults to true. final bool typingNoiseDetection; + /// The number of audio channels to use. + final int channelCount; + @override Map toMap() { final constraints = {}; @@ -54,6 +58,7 @@ class AudioConstraints extends MediaConstraints { {'googHighpassFilter': highPassFilter}, {'googTypingNoiseDetection': typingNoiseDetection}, {'DtlsSrtpKeyAgreement': true}, + {'channelCount': channelCount}, ]; } @@ -80,6 +85,7 @@ class AudioConstraints extends MediaConstraints { bool? autoGainControl, bool? highPassFilter, bool? typingNoiseDetection, + int? channelCount, }) => AudioConstraints( deviceId: deviceId ?? this.deviceId, noiseSuppression: noiseSuppression ?? this.noiseSuppression, @@ -87,5 +93,6 @@ class AudioConstraints extends MediaConstraints { autoGainControl: autoGainControl ?? this.autoGainControl, highPassFilter: highPassFilter ?? this.highPassFilter, typingNoiseDetection: typingNoiseDetection ?? this.typingNoiseDetection, + channelCount: channelCount ?? this.channelCount, ); } diff --git a/packages/stream_video/lib/src/webrtc/model/rtc_audio_bitrate_preset.dart b/packages/stream_video/lib/src/webrtc/model/rtc_audio_bitrate_preset.dart index d1788b919..8f0427bc4 100644 --- a/packages/stream_video/lib/src/webrtc/model/rtc_audio_bitrate_preset.dart +++ b/packages/stream_video/lib/src/webrtc/model/rtc_audio_bitrate_preset.dart @@ -1,8 +1,9 @@ class AudioBitrate { - static const telephone = 12000; - static const speech = 20000; - static const music = 32000; - static const musicStereo = 48000; - static const musicHighQuality = 64000; - static const musicHighQualityStereo = 96000; + // Voice profiles + static const voiceStandard = 64000; // clear speech, VoIP+ + static const voiceHighQuality = 128000; // podcast / studio-level speech + + // Music profiles + static const musicStandard = 64000; // lightweight mono music + static const musicHighQuality = 128000; // transparent stereo, good default } diff --git a/packages/stream_video/lib/src/webrtc/model/rtc_tracks_info.dart b/packages/stream_video/lib/src/webrtc/model/rtc_tracks_info.dart index 1a3fbe7b4..8d0932be2 100644 --- a/packages/stream_video/lib/src/webrtc/model/rtc_tracks_info.dart +++ b/packages/stream_video/lib/src/webrtc/model/rtc_tracks_info.dart @@ -11,6 +11,9 @@ class RtcTrackInfo { required this.codec, required this.muted, required this.publishOptionId, + required this.dtx, + required this.stereo, + required this.red, }); final String? trackId; @@ -21,10 +24,15 @@ class RtcTrackInfo { final bool muted; final int publishOptionId; + final bool dtx; + final bool stereo; + final bool red; + @override String toString() { return 'RtcTrackInfo{trackId: $trackId, trackType: $trackType, ' - 'mid: $mid, layers: $layers, codec: $codec, muted: $muted, publishOptionId: $publishOptionId}'; + 'mid: $mid, layers: $layers, codec: $codec, muted: $muted, publishOptionId: $publishOptionId, ' + 'dtx: $dtx, stereo: $stereo, red: $red}'; } } diff --git a/packages/stream_video/lib/src/webrtc/rtc_manager.dart b/packages/stream_video/lib/src/webrtc/rtc_manager.dart index 36cc50228..729eef2d9 100644 --- a/packages/stream_video/lib/src/webrtc/rtc_manager.dart +++ b/packages/stream_video/lib/src/webrtc/rtc_manager.dart @@ -5,8 +5,10 @@ import 'package:flutter/widgets.dart'; import 'package:rxdart/transformers.dart'; import 'package:sdp_transform/sdp_transform.dart'; import 'package:stream_webrtc_flutter/stream_webrtc_flutter.dart' as rtc; +import 'package:webrtc_interface/webrtc_interface.dart'; import '../../stream_video.dart'; +import '../call/state/call_state_notifier.dart'; import '../disposable.dart'; import '../errors/video_error_composer.dart'; import '../sfu/data/models/sfu_model_parser.dart'; @@ -15,13 +17,13 @@ import '../sfu/data/models/sfu_video_sender.dart'; import '../utils/extensions.dart'; import 'codecs_helper.dart' as codecs; import 'codecs_helper.dart'; -import 'model/rtc_audio_bitrate_preset.dart'; import 'model/rtc_tracks_info.dart'; import 'model/rtc_video_encoding.dart'; import 'peer_connection.dart'; import 'rtc_audio_api/rtc_audio_api.dart' show checkIfAudioOutputChangeSupported; import 'rtc_parser.dart'; +import 'rtc_track/rtc_track_publish_options.dart'; import 'traced_peer_connection.dart'; import 'transceiver_cache.dart'; @@ -54,6 +56,7 @@ class RtcManager extends Disposable { required this.publisher, required this.subscriber, required this.publishOptions, + required this.stateManager, required StreamVideo streamVideo, }) : _streamVideo = streamVideo { subscriber.onTrack = _onRemoteTrack; @@ -64,12 +67,14 @@ class RtcManager extends Disposable { final String sessionId; final StreamCallCid callCid; final String? publisherId; + final CallStateNotifier stateManager; final TracedStreamPeerConnection? publisher; final TracedStreamPeerConnection subscriber; final StreamVideo _streamVideo; final transceiversManager = TransceiverManager(); List publishOptions; + AudioConstraints _defaultAudioConstraints = const AudioConstraints(); final tracks = {}; @@ -203,6 +208,21 @@ class RtcManager extends Disposable { _logger.v(() => '[onRemoteTrack] published: ${remoteTrack.trackId}'); } + Future changeDefaultAudioConstraints( + AudioConstraints constraints, + ) async { + _defaultAudioConstraints = constraints; + + final localAudioTracks = tracks.values.whereType(); + for (final track in localAudioTracks) { + await muteTrack(trackId: track.trackId, stopTrackOnMute: true); + } + + for (final track in localAudioTracks) { + await unmuteTrack(trackId: track.trackId); + } + } + Future unpublishTrack({required String trackId}) async { final publishedTrack = tracks.remove(trackId); @@ -227,7 +247,7 @@ class RtcManager extends Disposable { for (final publishOption in publishOptions) { if (publishOption.trackType != publishedTrack.trackType) continue; - final transceiver = transceiversManager.get(publishOption); + final transceiver = transceiversManager.get(publishOption)?.transceiver; try { if (transceiver != null) { @@ -303,7 +323,11 @@ class RtcManager extends Disposable { // take the track from the existing transceiver for the same track type, // and publish it with the new publish options - final result = await _addTransceiver(item.track, publishOption); + final result = await _addTransceiver( + item.track, + publishOption, + item.trackPublishOptions, + ); if (result is Success) { final localTrack = tracks[item.track.trackId] as RtcLocalTrack?; @@ -335,7 +359,7 @@ class RtcManager extends Disposable { // it is safe to stop the track here, it is a clone await item.transceiver.sender.track?.stop(); - await item.transceiver.sender.replaceTrack(null); + await _updateTransceiver(item.transceiver, null, publishOption.trackType); } } @@ -602,6 +626,11 @@ extension PublisherRtcManager on RtcManager { ); if (track is RtcLocalAudioTrack) { + final audioSettings = stateManager.callState.settings.audio; + final stereo = + track.trackType == SfuTrackType.screenShareAudio || + audioSettings.hifiAudioEnabled; + return RtcTrackInfo( trackId: track.mediaTrack.id, trackType: track.trackType, @@ -614,6 +643,9 @@ extension PublisherRtcManager on RtcManager { layers: [], codec: transceiverCache.publishOption.codec, muted: transceiverCache.transceiver.sender.track?.enabled ?? true, + stereo: stereo, + dtx: audioSettings.opusDtxEnabled, + red: audioSettings.redundantCodingEnabled, ); } else if (track is RtcLocalVideoTrack) { final encodings = codecs.findOptimalVideoLayers( @@ -630,6 +662,9 @@ extension PublisherRtcManager on RtcManager { transceiverInitialIndex, sdp, ), + dtx: false, + red: false, + stereo: false, codec: transceiverCache.publishOption.codec, muted: transceiverCache.transceiver.sender.track?.enabled ?? true, layers: encodings.map((it) { @@ -695,22 +730,43 @@ extension PublisherRtcManager on RtcManager { tracks[track.trackId] = track; var updatedTrack = track.copyWith(stopTrackOnMute: stopTrackOnMute); - final transceivers = []; for (final option in publishOptions) { if (option.trackType != track.trackType) continue; - final transceiverResult = await _addTransceiver(track, option); - if (transceiverResult is Failure) return transceiverResult; - transceivers.add(transceiverResult.getDataOrNull()!.transceiver); + final cashedTransceiver = transceiversManager.get(option)?.transceiver; + if (cashedTransceiver == null) { + final transceiverResult = await _addTransceiver( + track, + option, + RtcTrackPublishOptions( + audioBitrateProfile: stateManager.callState.audioBitrateProfile, + ), + ); - _logger.v(() => '[publishAudioTrack] transceiver: $transceiverResult'); + if (transceiverResult is Failure) return transceiverResult; - updatedTrack = updatedTrack.copyWith( - clonedTracks: [ - ...updatedTrack.clonedTracks, - transceiverResult.getDataOrNull()!.mediaTrack, - ], - ); + _logger.v(() => '[publishAudioTrack] transceiver: $transceiverResult'); + + updatedTrack = updatedTrack.copyWith( + clonedTracks: [ + ...updatedTrack.clonedTracks, + transceiverResult.getDataOrNull()!.mediaTrack, + ], + ); + } else { + await _updateTransceiver( + cashedTransceiver, + track, + track.trackType, + trackPublishOptions: RtcTrackPublishOptions( + audioBitrateProfile: stateManager.callState.audioBitrateProfile, + ), + ); + + _logger.v( + () => '[publishAudioTrack] cached transceiver: $cashedTransceiver', + ); + } } // Notify listeners. @@ -751,9 +807,14 @@ extension PublisherRtcManager on RtcManager { for (final option in publishOptions) { if (option.trackType != track.trackType) continue; - final cashedTransceiver = transceiversManager.get(option); + final cashedTransceiver = transceiversManager.get(option)?.transceiver; if (cashedTransceiver == null) { - final transceiverResult = await _addTransceiver(track, option); + final transceiverResult = await _addTransceiver( + track, + option, + const RtcTrackPublishOptions(), + ); + if (transceiverResult is Failure) return transceiverResult; updatedTrack = updatedTrack.copyWith( @@ -767,14 +828,11 @@ extension PublisherRtcManager on RtcManager { () => '[publishVideoTrack] new transceiver: $transceiverResult', ); } else { - final previousTrack = cashedTransceiver.sender.track; - - // don't stop the track if we are re-publishing the same track - if (previousTrack != null && previousTrack != track.mediaTrack) { - await previousTrack.stop(); - } - - await cashedTransceiver.sender.replaceTrack(track.mediaTrack); + await _updateTransceiver( + cashedTransceiver, + track, + track.trackType, + ); _logger.v( () => '[publishVideoTrack] cached transceiver: $cashedTransceiver', @@ -846,6 +904,7 @@ extension PublisherRtcManager on RtcManager { _addTransceiver( RtcLocalTrack track, SfuPublishOptions publishOptions, + RtcTrackPublishOptions trackPublishOptions, ) async { if (publisher == null) { return Result.error('Publisher is not created, cannot add transceiver'); @@ -863,11 +922,13 @@ extension PublisherRtcManager on RtcManager { ); if (track is RtcLocalAudioTrack) { + final audioEncodings = codecs.findOptimalAudioLayers( + publishOptions: publishOptions, + trackPublishOptions: trackPublishOptions, + ); transceiverResult = await publisher!.addAudioTransceiver( track: mediaTrackClone, - encodings: [ - rtc.RTCRtpEncoding(rid: 'a', maxBitrate: AudioBitrate.music), - ], + encodings: audioEncodings, ); } else if (track is RtcLocalVideoTrack) { final videoEncodings = codecs.findOptimalVideoLayers( @@ -899,6 +960,7 @@ extension PublisherRtcManager on RtcManager { track.copyWith(mediaTrack: mediaTrackClone), publishOptions, transceiver, + trackPublishOptions, ); return Result.success( @@ -909,18 +971,83 @@ extension PublisherRtcManager on RtcManager { ); } - Future> muteTrack({required String trackId}) async { - final track = tracks[trackId]; - if (track == null) { + Future _updateTransceiver( + RTCRtpTransceiver transceiver, + RtcLocalTrack? track, + SfuTrackType trackType, { + RtcTrackPublishOptions? trackPublishOptions, + }) async { + final previousTrack = transceiver.sender.track; + + // don't stop the track if we are re-publishing the same track + if (previousTrack != null && previousTrack != track?.mediaTrack) { + await previousTrack.stop(); + } + + await transceiver.sender.replaceTrack(track?.mediaTrack); + + if (track is RtcLocalAudioTrack) { + await _updateAudioPublishOptions( + track.trackType, + trackPublishOptions ?? const RtcTrackPublishOptions(), + ); + } + } + + Future _updateAudioPublishOptions( + SfuTrackType trackType, + RtcTrackPublishOptions options, + ) async { + for (final publishOption in publishOptions) { + if (publishOption.trackType != trackType) continue; + final transceiverBundle = transceiversManager.get(publishOption); + if (transceiverBundle == null) continue; + + final transceiver = transceiverBundle.transceiver; + final current = transceiverBundle.trackPublishOptions; + if (current.audioBitrateProfile != options.audioBitrateProfile) { + final encodings = codecs.findOptimalAudioLayers( + publishOptions: publishOption, + trackPublishOptions: options, + ); + + if (encodings.isNotEmpty) { + final params = transceiver.sender.parameters; + if (params.encodings != null && params.encodings!.isNotEmpty) { + final currentEncoding = params.encodings!.first; + final targetEncoding = encodings.first; + if (currentEncoding.maxBitrate != targetEncoding.maxBitrate) { + currentEncoding.maxBitrate = targetEncoding.maxBitrate; + } + + await transceiver.sender.setParameters(params); + } + } + } + + transceiverBundle.trackPublishOptions = options; + } + } + + Future> muteTrack({ + required String trackId, + bool? stopTrackOnMute, + }) async { + final originalTrack = tracks[trackId]; + + if (originalTrack == null) { _logger.w(() => 'muteTrack: track not found'); return Result.error('Track not found'); } - if (track is! RtcLocalTrack) { + if (originalTrack is! RtcLocalTrack) { _logger.w(() => 'muteTrack: track is not local'); return Result.error('Track is not local'); } + final track = originalTrack.copyWith(stopTrackOnMute: stopTrackOnMute); + tracks[trackId] = track; + track.disable(); if (track.stopTrackOnMute) { // Releases the track and stops the permission indicator. @@ -949,7 +1076,22 @@ extension PublisherRtcManager on RtcManager { .getTransceiversForTrack(track.trackId) .toList(); - final updatedTrack = await track.recreate(transceivers); + final updatedTrack = await track.recreate( + transceivers, + mediaConstraints: track.trackType == SfuTrackType.audio + ? _defaultAudioConstraints + : null, + ); + + if (track is RtcLocalAudioTrack) { + await _updateAudioPublishOptions( + track.trackType, + RtcTrackPublishOptions( + audioBitrateProfile: stateManager.callState.audioBitrateProfile, + ), + ); + } + tracks[trackId] = updatedTrack; onLocalTrackMuted?.call(updatedTrack, false); @@ -964,9 +1106,9 @@ extension PublisherRtcManager on RtcManager { } Future> createAudioTrack({ - AudioConstraints constraints = const AudioConstraints(), + AudioConstraints? constraints, }) async { - _logger.d(() => '[createAudioTrack] constraints: ${constraints.toMap()}'); + _logger.d(() => '[createAudioTrack] constraints: ${constraints?.toMap()}'); if (publisher == null || publisherId == null) { return Result.error( @@ -977,7 +1119,7 @@ extension PublisherRtcManager on RtcManager { try { final audioTrack = await RtcLocalTrack.audio( trackIdPrefix: publisherId!, - constraints: constraints, + constraints: constraints ?? _defaultAudioConstraints, ); return Result.success(audioTrack); @@ -1413,7 +1555,7 @@ extension RtcManagerTrackHelper on RtcManager { final audioTrackResult = await createAudioTrack( constraints: - (constraints ?? const AudioConstraints()) as AudioConstraints, + (constraints ?? _defaultAudioConstraints) as AudioConstraints, ); return audioTrackResult.fold( success: (it) => publishAudioTrack(track: it.data), @@ -1480,20 +1622,6 @@ extension RtcManagerTrackHelper on RtcManager { return Result.failure(VideoErrors.compose(e, stk)); } } - - /// Applies the Android audio configuration from the policy. - Future> setAndroidAudioConfiguration({ - required AudioConfigurationPolicy policy, - }) async { - try { - await rtc.Helper.setAndroidAudioConfiguration( - policy.getAndroidConfiguration(), - ); - return const Result.success(none); - } catch (e, stk) { - return Result.failure(VideoErrors.compose(e, stk)); - } - } } extension on RtcLocalTrack { diff --git a/packages/stream_video/lib/src/webrtc/rtc_manager_factory.dart b/packages/stream_video/lib/src/webrtc/rtc_manager_factory.dart index 22979293a..42899db0e 100644 --- a/packages/stream_video/lib/src/webrtc/rtc_manager_factory.dart +++ b/packages/stream_video/lib/src/webrtc/rtc_manager_factory.dart @@ -1,6 +1,7 @@ import '../../protobuf/video/sfu/models/models.pb.dart'; import '../../stream_video.dart'; import '../call/session/call_session_config.dart'; +import '../call/state/call_state_notifier.dart'; import '../sfu/data/models/sfu_publish_options.dart'; import '../sfu/sfu_client.dart'; import 'peer_connection_factory.dart'; @@ -30,6 +31,7 @@ class RtcManagerFactory { Future makeRtcManager({ required SfuClient sfuClient, + required CallStateNotifier stateManager, required StreamVideo streamVideo, ClientDetails? clientDetails, String? publisherId, @@ -69,6 +71,7 @@ class RtcManagerFactory { publisher: publisher, subscriber: subscriber, publishOptions: publishOptions, + stateManager: stateManager, streamVideo: streamVideo, ); } diff --git a/packages/stream_video/lib/src/webrtc/rtc_media_device/rtc_media_device_notifier.dart b/packages/stream_video/lib/src/webrtc/rtc_media_device/rtc_media_device_notifier.dart index 086ed5838..ceba34b30 100644 --- a/packages/stream_video/lib/src/webrtc/rtc_media_device/rtc_media_device_notifier.dart +++ b/packages/stream_video/lib/src/webrtc/rtc_media_device/rtc_media_device_notifier.dart @@ -4,13 +4,10 @@ import 'package:meta/meta.dart'; import 'package:rxdart/rxdart.dart'; import 'package:stream_webrtc_flutter/stream_webrtc_flutter.dart' as rtc; -import '../../../open_api/video/coordinator/api.dart'; +import '../../../stream_video.dart'; import '../../call/stats/tracer.dart'; import '../../errors/video_error_composer.dart'; -import '../../platform_detector/platform_detector.dart'; import '../../utils/extensions.dart'; -import '../../utils/result.dart'; -import 'rtc_media_device.dart'; abstract class InterruptionEvent {} @@ -215,4 +212,21 @@ class RtcMediaDeviceNotifier { _tracer.trace('navigator.mediaDevices.regainAndroidAudioFocus', null); return rtc.Helper.regainAndroidAudioFocus(); } + + /// Reinitializes the audio configuration for the WebRTC instance. + /// + /// This is used to reinitialize the audio configuration when the audio configuration policy changes. + /// When called after initial setup, it will automatically + /// dispose all existing peer connections, tracks, and streams, then recreate + /// the audio device module and peer connection factory with the new parameters. + Future reinitializeAudioConfiguration(AudioConfigurationPolicy policy) { + return rtc.WebRTC.initialize( + options: { + 'reinitialize': true, + 'bypassVoiceProcessing': policy.bypassVoiceProcessing, + if (CurrentPlatform.isAndroid) + 'androidAudioConfiguration': policy.getAndroidConfiguration().toMap(), + }, + ); + } } diff --git a/packages/stream_video/lib/src/webrtc/rtc_track/rtc_track_publish_options.dart b/packages/stream_video/lib/src/webrtc/rtc_track/rtc_track_publish_options.dart new file mode 100644 index 000000000..d8cc4b730 --- /dev/null +++ b/packages/stream_video/lib/src/webrtc/rtc_track/rtc_track_publish_options.dart @@ -0,0 +1,9 @@ +import '../../sfu/data/models/sfu_audio_bitrate.dart'; + +class RtcTrackPublishOptions { + const RtcTrackPublishOptions({ + this.audioBitrateProfile = SfuAudioBitrateProfile.voiceStandard, + }); + + final SfuAudioBitrateProfile audioBitrateProfile; +} diff --git a/packages/stream_video/lib/src/webrtc/sdp/editor/action/action_set_opus_stereo_enabled.dart b/packages/stream_video/lib/src/webrtc/sdp/editor/action/action_set_opus_stereo_enabled.dart new file mode 100644 index 000000000..16fc17e99 --- /dev/null +++ b/packages/stream_video/lib/src/webrtc/sdp/editor/action/action_set_opus_stereo_enabled.dart @@ -0,0 +1,68 @@ +import '../../../../logger/impl/tagged_logger.dart'; +import '../../attributes/fmtp.dart'; +import '../../attributes/rtpmap.dart'; +import '../../codec/sdp_codec.dart'; +import '../../sdp.dart'; +import 'sdp_edit_action.dart'; + +final _logger = taggedLogger(tag: 'SV:SetOpusStereoEnabled'); + +// Temp SDP munging to add stereo=1 parameter before SFU switch is set for Flutter SDK +class SetOpusStereoEnabledAction implements SdpEditAction { + const SetOpusStereoEnabledAction({ + required this.enabled, + required this.rtpmapParser, + required this.fmtpParser, + }); + + final bool enabled; + final RtpmapParser rtpmapParser; + final FmtpParser fmtpParser; + + @override + void execute(List sdpLines) { + String? opusPayloadType; + for (var index = 0; index < sdpLines.length; index++) { + final sdpLine = sdpLines[index]; + if (sdpLine.isRtpmap) { + final rtpmap = rtpmapParser.parse(sdpLine); + if (rtpmap == null) continue; + if (rtpmap.encodingName.toUpperCase() == + AudioCodec.opus.alias.toUpperCase()) { + opusPayloadType = rtpmap.payloadType; + } + } else if (opusPayloadType != null && sdpLine.isFmtp) { + final original = fmtpParser.parse(sdpLine); + if (original == null || original.payloadType != opusPayloadType) { + _logger.w( + () => '[setOpusStereoEnabled] rejected (not opus): "$original"', + ); + continue; + } + final modified = original.setStereoEnabled(enabled); + _logger.v(() => '[setOpusStereoEnabled] original: "$original"'); + _logger.v(() => '[setOpusStereoEnabled] modified: "$modified"'); + sdpLines[index] = modified.toSdpLine(); + } + } + } +} + +extension on Fmtp { + Fmtp setStereoEnabled(bool enabled) { + if (enabled) { + return copyWith( + parameters: { + ...parameters, + 'stereo': '1', + }, + ); + } else { + return copyWith( + parameters: { + ...parameters, + }..remove('stereo'), + ); + } + } +} diff --git a/packages/stream_video/lib/src/webrtc/sdp/editor/action/sdp_edit_action_factory.dart b/packages/stream_video/lib/src/webrtc/sdp/editor/action/sdp_edit_action_factory.dart index 06972af58..fb067ea78 100644 --- a/packages/stream_video/lib/src/webrtc/sdp/editor/action/sdp_edit_action_factory.dart +++ b/packages/stream_video/lib/src/webrtc/sdp/editor/action/sdp_edit_action_factory.dart @@ -6,10 +6,12 @@ import '../../specification/media_description.dart'; import '../rule/rule_prioritize_codec.dart'; import '../rule/rule_set_opus_dtx_enabled.dart'; import '../rule/rule_set_opus_red_enabled.dart'; +import '../rule/rule_set_opus_stereo_enabled.dart'; import '../rule/sdp_munging_rule.dart'; import 'action_prioritize_codec.dart'; import 'action_set_opus_dtx_enabled.dart'; import 'action_set_opus_red_enabled.dart'; +import 'action_set_opus_stereo_enabled.dart'; import 'sdp_edit_action.dart'; @internal @@ -38,6 +40,12 @@ class SdpEditActionFactory { rtpmapParser: _rtpmapParser, fmtpParser: _fmtpParser, ); + } else if (rule is SetOpusStereoEnabledRule) { + return SetOpusStereoEnabledAction( + enabled: rule.enabled, + rtpmapParser: _rtpmapParser, + fmtpParser: _fmtpParser, + ); } throw UnsupportedError('Not supported: $rule'); } diff --git a/packages/stream_video/lib/src/webrtc/sdp/editor/rule/rule_set_opus_stereo_enabled.dart b/packages/stream_video/lib/src/webrtc/sdp/editor/rule/rule_set_opus_stereo_enabled.dart new file mode 100644 index 000000000..1d359e42a --- /dev/null +++ b/packages/stream_video/lib/src/webrtc/sdp/editor/rule/rule_set_opus_stereo_enabled.dart @@ -0,0 +1,22 @@ +import '../../sdp.dart'; +import 'sdp_munging_rule.dart'; + +// Temp SDP munging to add stereo=1 parameter before SFU switch is set for Flutter SDK +class SetOpusStereoEnabledRule extends SdpMungingRule { + const SetOpusStereoEnabledRule({ + required this.enabled, + super.platforms, + super.types = const [SdpType.remoteOffer], + }); + + final bool enabled; + + @override + String get key => 'set-opus-stereo-enabled'; + + @override + String toString() { + return 'SetOpusStereoEnabledRule{enabled: $enabled, ' + 'types: $types, platforms: $platforms}'; + } +} diff --git a/packages/stream_video/lib/src/webrtc/sdp/editor/rule/sdp_munging_rule.dart b/packages/stream_video/lib/src/webrtc/sdp/editor/rule/sdp_munging_rule.dart index 0944befc5..cdaefcd79 100644 --- a/packages/stream_video/lib/src/webrtc/sdp/editor/rule/sdp_munging_rule.dart +++ b/packages/stream_video/lib/src/webrtc/sdp/editor/rule/sdp_munging_rule.dart @@ -6,6 +6,7 @@ import '../../editor/rule/rule_set_opus_red_enabled.dart'; import '../../sdp.dart'; import 'rule_prioritize_codec.dart'; import 'rule_set_opus_dtx_enabled.dart'; +import 'rule_set_opus_stereo_enabled.dart'; abstract class SdpMungingRule with EquatableMixin { const SdpMungingRule({ @@ -31,6 +32,12 @@ abstract class SdpMungingRule with EquatableMixin { List types, }) = SetOpusRedEnabledRule; + const factory SdpMungingRule.setOpusStereoEnabled({ + required bool enabled, + List platforms, + List types, + }) = SetOpusStereoEnabledRule; + String get key; final List types; diff --git a/packages/stream_video/lib/src/webrtc/sdp/editor/sdp_editor.dart b/packages/stream_video/lib/src/webrtc/sdp/editor/sdp_editor.dart index d121696bd..d7b8d8f06 100644 --- a/packages/stream_video/lib/src/webrtc/sdp/editor/sdp_editor.dart +++ b/packages/stream_video/lib/src/webrtc/sdp/editor/sdp_editor.dart @@ -8,5 +8,7 @@ abstract class SdpEditor { set opusRedEnabled(bool value); + set opusStereoEnabled(bool value); + String? edit(Sdp? sdp); } diff --git a/packages/stream_video/lib/src/webrtc/sdp/editor/sdp_editor_impl.dart b/packages/stream_video/lib/src/webrtc/sdp/editor/sdp_editor_impl.dart index befc46cba..fac0d79fa 100644 --- a/packages/stream_video/lib/src/webrtc/sdp/editor/sdp_editor_impl.dart +++ b/packages/stream_video/lib/src/webrtc/sdp/editor/sdp_editor_impl.dart @@ -10,6 +10,7 @@ import '../sdp.dart'; import 'action/sdp_edit_action_factory.dart'; import 'rule/rule_set_opus_dtx_enabled.dart'; import 'rule/rule_set_opus_red_enabled.dart'; +import 'rule/rule_set_opus_stereo_enabled.dart'; import 'rule/rule_toggle.dart'; import 'rule/sdp_munging_rule.dart'; import 'sdp_editor.dart'; @@ -22,6 +23,9 @@ class NoOpSdpEditor implements SdpEditor { @override set opusRedEnabled(bool value) {} + @override + set opusStereoEnabled(bool value) {} + @override String? edit(Sdp? sdp) { return sdp?.value; @@ -62,6 +66,16 @@ class SdpEditorImpl implements SdpEditor { } } + @override + set opusStereoEnabled(bool value) { + for (final toggle in internalRules) { + if (toggle.rule is SetOpusStereoEnabledRule) { + toggle.enabled = value; + break; + } + } + } + @override String? edit(Sdp? sdp) { if (sdp == null) { @@ -126,7 +140,7 @@ extension on StringBuffer { List _createRules() { return [ SdpRuleToggle( - enabled: true, + enabled: false, rule: const SdpMungingRule.prioritizeCodec( platforms: [PlatformType.android], types: [SdpType.localOffer], @@ -135,15 +149,22 @@ List _createRules() { ), SdpRuleToggle( rule: const SdpMungingRule.setOpusDtxEnabled( - enabled: true, + enabled: false, types: [SdpType.localOffer], ), ), SdpRuleToggle( rule: const SdpMungingRule.setOpusRedEnabled( - enabled: true, + enabled: false, types: [SdpType.localOffer], ), ), + SdpRuleToggle( + enabled: true, + rule: const SdpMungingRule.setOpusStereoEnabled( + enabled: true, + types: [SdpType.remoteOffer, SdpType.localAnswer], + ), + ), ]; } diff --git a/packages/stream_video/lib/src/webrtc/transceiver_cache.dart b/packages/stream_video/lib/src/webrtc/transceiver_cache.dart index fa02ace64..9db810cbf 100644 --- a/packages/stream_video/lib/src/webrtc/transceiver_cache.dart +++ b/packages/stream_video/lib/src/webrtc/transceiver_cache.dart @@ -4,17 +4,20 @@ import 'package:stream_webrtc_flutter/stream_webrtc_flutter.dart'; import '../sfu/data/models/sfu_publish_options.dart'; import '../sfu/data/models/sfu_track_type.dart'; import 'rtc_track/rtc_track.dart'; +import 'rtc_track/rtc_track_publish_options.dart'; class TransceiverCache { TransceiverCache({ required this.track, required this.publishOption, required this.transceiver, + required this.trackPublishOptions, }); RtcLocalTrack track; SfuPublishOptions publishOption; RTCRtpTransceiver transceiver; + RtcTrackPublishOptions trackPublishOptions; @override String toString() { @@ -41,12 +44,14 @@ class TransceiverManager { RtcLocalTrack track, SfuPublishOptions publishOption, RTCRtpTransceiver transceiver, + RtcTrackPublishOptions trackPublishOptions, ) { _transceivers.add( TransceiverCache( track: track, publishOption: publishOption, transceiver: transceiver, + trackPublishOptions: trackPublishOptions, ), ); @@ -54,11 +59,11 @@ class TransceiverManager { } /// Gets the transceiver for the given publish option. - RTCRtpTransceiver? get(SfuPublishOptions publishOption) { + TransceiverCache? get(SfuPublishOptions publishOption) { return _findTransceiver( publishOption.trackType, publishOption.id, - )?.transceiver; + ); } /// Gets the last transceiver for the given track type and publish option id. diff --git a/packages/stream_video/lib/stream_video.dart b/packages/stream_video/lib/stream_video.dart index 73b5d8c85..fcf5e6985 100644 --- a/packages/stream_video/lib/stream_video.dart +++ b/packages/stream_video/lib/stream_video.dart @@ -30,6 +30,7 @@ export 'src/network_monitor_settings.dart'; export 'src/platform_detector/platform_detector.dart'; export 'src/push_notification/push_notification_manager.dart'; export 'src/retry/retry_policy.dart'; +export 'src/sfu/data/models/sfu_audio_bitrate.dart'; export 'src/sfu/data/models/sfu_client_capability.dart'; export 'src/sfu/data/models/sfu_connection_quality.dart'; export 'src/sfu/data/models/sfu_error.dart'; diff --git a/packages/stream_video/pubspec.yaml b/packages/stream_video/pubspec.yaml index 431a9e7e9..be253b9c4 100644 --- a/packages/stream_video/pubspec.yaml +++ b/packages/stream_video/pubspec.yaml @@ -31,7 +31,10 @@ dependencies: rxdart: ^0.28.0 sdp_transform: ^0.3.2 state_notifier: ^1.0.0 - stream_webrtc_flutter: ^2.2.4 + stream_webrtc_flutter: + git: + url: https://github.com/GetStream/webrtc-flutter.git + ref: feat/hifi-stereo-playout synchronized: ^3.1.0 system_info2: ^4.0.0 tart: ^0.6.0 diff --git a/packages/stream_video_filters/pubspec.yaml b/packages/stream_video_filters/pubspec.yaml index da3fe6365..6468b267d 100644 --- a/packages/stream_video_filters/pubspec.yaml +++ b/packages/stream_video_filters/pubspec.yaml @@ -15,7 +15,10 @@ dependencies: sdk: flutter plugin_platform_interface: ^2.0.2 stream_video: ^1.2.4 - stream_webrtc_flutter: ^2.2.4 + stream_webrtc_flutter: + git: + url: https://github.com/GetStream/webrtc-flutter.git + ref: feat/hifi-stereo-playout dev_dependencies: flutter_lints: ^6.0.0 diff --git a/packages/stream_video_flutter/example/pubspec.yaml b/packages/stream_video_flutter/example/pubspec.yaml index 47bf41aa5..2b1645abd 100644 --- a/packages/stream_video_flutter/example/pubspec.yaml +++ b/packages/stream_video_flutter/example/pubspec.yaml @@ -31,7 +31,10 @@ dependencies: stream_video: ^1.2.4 stream_video_flutter: ^1.2.4 stream_video_push_notification: ^1.2.4 - stream_webrtc_flutter: ^2.2.4 + stream_webrtc_flutter: + git: + url: https://github.com/GetStream/webrtc-flutter.git + ref: feat/hifi-stereo-playout dependency_overrides: stream_video: diff --git a/packages/stream_video_flutter/pubspec.yaml b/packages/stream_video_flutter/pubspec.yaml index bb574dd97..40ce916b7 100644 --- a/packages/stream_video_flutter/pubspec.yaml +++ b/packages/stream_video_flutter/pubspec.yaml @@ -25,7 +25,10 @@ dependencies: rate_limiter: ^1.0.0 rxdart: ^0.28.0 stream_video: ^1.2.4 - stream_webrtc_flutter: ^2.2.4 + stream_webrtc_flutter: + git: + url: https://github.com/GetStream/webrtc-flutter.git + ref: feat/hifi-stereo-playout visibility_detector: ^0.4.0+2 dev_dependencies: diff --git a/packages/stream_video_noise_cancellation/pubspec.yaml b/packages/stream_video_noise_cancellation/pubspec.yaml index f903279c9..061e7ed99 100644 --- a/packages/stream_video_noise_cancellation/pubspec.yaml +++ b/packages/stream_video_noise_cancellation/pubspec.yaml @@ -15,7 +15,10 @@ dependencies: sdk: flutter plugin_platform_interface: ^2.0.2 stream_video: ^1.2.4 - stream_webrtc_flutter: ^2.2.4 + stream_webrtc_flutter: + git: + url: https://github.com/GetStream/webrtc-flutter.git + ref: feat/hifi-stereo-playout dev_dependencies: flutter_lints: ^6.0.0 diff --git a/packages/stream_video_push_notification/pubspec.yaml b/packages/stream_video_push_notification/pubspec.yaml index 83865f95b..1e7573d60 100644 --- a/packages/stream_video_push_notification/pubspec.yaml +++ b/packages/stream_video_push_notification/pubspec.yaml @@ -23,7 +23,10 @@ dependencies: shared_preferences: ^2.5.3 stream_video: ^1.2.4 stream_video_flutter: ^1.2.4 - stream_webrtc_flutter: ^2.2.4 + stream_webrtc_flutter: + git: + url: https://github.com/GetStream/webrtc-flutter.git + ref: feat/hifi-stereo-playout uuid: ^4.5.1 dev_dependencies: From cb54277fc371038ef3495a7f4318138ee805eabe Mon Sep 17 00:00:00 2001 From: Brazol Date: Tue, 10 Feb 2026 11:54:43 +0100 Subject: [PATCH 3/9] ios stereo support --- packages/stream_video/lib/src/call/call.dart | 6 +++ .../models/audio_configuration_policy.dart | 1 - .../rtc_media_device_notifier.dart | 48 ++++++++++++++++++- 3 files changed, 52 insertions(+), 3 deletions(-) diff --git a/packages/stream_video/lib/src/call/call.dart b/packages/stream_video/lib/src/call/call.dart index 99cf7a9e9..04ebabb95 100644 --- a/packages/stream_video/lib/src/call/call.dart +++ b/packages/stream_video/lib/src/call/call.dart @@ -3159,6 +3159,12 @@ class Call { _stateManager.setAudioBitrateProfile(profile); final stereo = profile == SfuAudioBitrateProfile.musicHighQuality; + + // On iOS, toggle stereo playout preference when switching HiFi audio modes. + if (CurrentPlatform.isIos) { + unawaited(rtc.Helper.setStereoPlayoutPreferred(stereo)); + } + _session?.rtcManager?.changeDefaultAudioConstraints( AudioConstraints( noiseSuppression: !stereo, diff --git a/packages/stream_video/lib/src/models/audio_configuration_policy.dart b/packages/stream_video/lib/src/models/audio_configuration_policy.dart index 029f13d1f..bee8f9c68 100644 --- a/packages/stream_video/lib/src/models/audio_configuration_policy.dart +++ b/packages/stream_video/lib/src/models/audio_configuration_policy.dart @@ -156,7 +156,6 @@ class ViewerAudioPolicy extends AudioConfigurationPolicy { appleAudioCategoryOptions: const { rtc.AppleAudioCategoryOption.defaultToSpeaker, rtc.AppleAudioCategoryOption.mixWithOthers, - rtc.AppleAudioCategoryOption.allowBluetooth, rtc.AppleAudioCategoryOption.allowBluetoothA2DP, rtc.AppleAudioCategoryOption.allowAirPlay, }, diff --git a/packages/stream_video/lib/src/webrtc/rtc_media_device/rtc_media_device_notifier.dart b/packages/stream_video/lib/src/webrtc/rtc_media_device/rtc_media_device_notifier.dart index ceba34b30..b8647ab31 100644 --- a/packages/stream_video/lib/src/webrtc/rtc_media_device/rtc_media_device_notifier.dart +++ b/packages/stream_video/lib/src/webrtc/rtc_media_device/rtc_media_device_notifier.dart @@ -33,6 +33,8 @@ class RtcMediaDeviceNotifier { rtc.navigator.mediaDevices.ondevicechange = _onDeviceChange; // Triggers the initial device change event to get the devices list. _onDeviceChange(null); + + _listenForAudioProcessingStateChanges(); } static final instance = RtcMediaDeviceNotifier._internal(); @@ -115,6 +117,37 @@ class RtcMediaDeviceNotifier { .asBroadcastStream(); } + void _listenForAudioProcessingStateChanges() { + rtc.eventStream.listen((data) { + if (data.isEmpty) return; + + final event = data.keys.first; + if (event != 'onAudioProcessingStateChanged') return; + + final values = data.values.first; + if (values is! Map) return; + + final stereoPlayoutEnabled = + values['stereoPlayoutEnabled'] as bool? ?? false; + final voiceProcessingEnabled = + values['voiceProcessingEnabled'] as bool? ?? false; + final voiceProcessingBypassed = + values['voiceProcessingBypassed'] as bool? ?? false; + final voiceProcessingAGCEnabled = + values['voiceProcessingAGCEnabled'] as bool? ?? false; + + _tracer.trace( + 'audioProcessingStateChanged', + { + 'stereoPlayoutEnabled': stereoPlayoutEnabled, + 'voiceProcessingEnabled': voiceProcessingEnabled, + 'voiceProcessingBypassed': voiceProcessingBypassed, + 'voiceProcessingAGCEnabled': voiceProcessingAGCEnabled, + }, + ); + }); + } + Future _onDeviceChange(_) async { await enumerateDevices(); } @@ -219,8 +252,10 @@ class RtcMediaDeviceNotifier { /// When called after initial setup, it will automatically /// dispose all existing peer connections, tracks, and streams, then recreate /// the audio device module and peer connection factory with the new parameters. - Future reinitializeAudioConfiguration(AudioConfigurationPolicy policy) { - return rtc.WebRTC.initialize( + Future reinitializeAudioConfiguration( + AudioConfigurationPolicy policy, + ) async { + await rtc.WebRTC.initialize( options: { 'reinitialize': true, 'bypassVoiceProcessing': policy.bypassVoiceProcessing, @@ -228,5 +263,14 @@ class RtcMediaDeviceNotifier { 'androidAudioConfiguration': policy.getAndroidConfiguration().toMap(), }, ); + + // On iOS, configure stereo playout preference based on the policy. + // When voice processing is bypassed (e.g. ViewerAudioPolicy), stereo + // playout is preferred for high-fidelity audio. + if (CurrentPlatform.isIos) { + await rtc.Helper.setStereoPlayoutPreferred( + policy.bypassVoiceProcessing, + ); + } } } From da561dfd253ab9803772e88d645b5096edeb517e Mon Sep 17 00:00:00 2001 From: Brazol Date: Tue, 10 Feb 2026 14:51:11 +0100 Subject: [PATCH 4/9] fixes --- packages/stream_video/lib/src/call/call.dart | 6 ++++-- packages/stream_video/lib/src/webrtc/codecs_helper.dart | 4 ++-- packages/stream_video/lib/src/webrtc/rtc_manager.dart | 4 ++-- 3 files changed, 8 insertions(+), 6 deletions(-) diff --git a/packages/stream_video/lib/src/call/call.dart b/packages/stream_video/lib/src/call/call.dart index 04ebabb95..6fd9dedc5 100644 --- a/packages/stream_video/lib/src/call/call.dart +++ b/packages/stream_video/lib/src/call/call.dart @@ -3140,9 +3140,9 @@ class Call { return result; } - void setAudioBitrateProfile(SfuAudioBitrateProfile profile) { + Result setAudioBitrateProfile(SfuAudioBitrateProfile profile) { if (!state.value.settings.audio.hifiAudioEnabled) { - throw ArgumentError('High Fidelity audio is not enabled for this call'); + return Result.error('High Fidelity audio is not enabled for this call'); } if (_streamVideo.isAudioProcessorConfigured()) { @@ -3173,6 +3173,8 @@ class Call { channelCount: stereo ? 2 : 1, ), ); + + return const Result.success(none); } bool checkIfAudioOutputChangeSupported() { diff --git a/packages/stream_video/lib/src/webrtc/codecs_helper.dart b/packages/stream_video/lib/src/webrtc/codecs_helper.dart index 650e7bf48..de04d2c55 100644 --- a/packages/stream_video/lib/src/webrtc/codecs_helper.dart +++ b/packages/stream_video/lib/src/webrtc/codecs_helper.dart @@ -125,8 +125,8 @@ List withSimulcastConstraints({ // provide only one layer 320x240 (q), the one with the highest quality layers = optimalVideoLayers.where((layer) => layer.rid == 'f').toList(); } else if (size <= 640) { - // provide two layers, 160x120 (q) and 640x480 (h) - layers = optimalVideoLayers.where((layer) => layer.rid != 'h').toList(); + // provide two layers, 320x240 (h) and 640x480 (f) + layers = optimalVideoLayers.where((layer) => layer.rid != 'q').toList(); } else { // provide three layers for sizes > 640x480 layers = optimalVideoLayers; diff --git a/packages/stream_video/lib/src/webrtc/rtc_manager.dart b/packages/stream_video/lib/src/webrtc/rtc_manager.dart index 729eef2d9..6192b9dcc 100644 --- a/packages/stream_video/lib/src/webrtc/rtc_manager.dart +++ b/packages/stream_video/lib/src/webrtc/rtc_manager.dart @@ -642,7 +642,7 @@ extension PublisherRtcManager on RtcManager { ), layers: [], codec: transceiverCache.publishOption.codec, - muted: transceiverCache.transceiver.sender.track?.enabled ?? true, + muted: !(transceiverCache.transceiver.sender.track?.enabled ?? false), stereo: stereo, dtx: audioSettings.opusDtxEnabled, red: audioSettings.redundantCodingEnabled, @@ -666,7 +666,7 @@ extension PublisherRtcManager on RtcManager { red: false, stereo: false, codec: transceiverCache.publishOption.codec, - muted: transceiverCache.transceiver.sender.track?.enabled ?? true, + muted: !(transceiverCache.transceiver.sender.track?.enabled ?? false), layers: encodings.map((it) { return RtcVideoLayer( rid: it.rid ?? '', From c87e4940fe909812541c1970abf44526b1d33e5b Mon Sep 17 00:00:00 2001 From: Brazol Date: Fri, 13 Feb 2026 11:44:35 +0100 Subject: [PATCH 5/9] updated signatures --- packages/stream_video/lib/src/call/call.dart | 2 +- .../src/webrtc/rtc_media_device/rtc_media_device_notifier.dart | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/stream_video/lib/src/call/call.dart b/packages/stream_video/lib/src/call/call.dart index 6fd9dedc5..f472d3062 100644 --- a/packages/stream_video/lib/src/call/call.dart +++ b/packages/stream_video/lib/src/call/call.dart @@ -3162,7 +3162,7 @@ class Call { // On iOS, toggle stereo playout preference when switching HiFi audio modes. if (CurrentPlatform.isIos) { - unawaited(rtc.Helper.setStereoPlayoutPreferred(stereo)); + unawaited(rtc.Helper.setiOSStereoPlayoutPreferred(stereo)); } _session?.rtcManager?.changeDefaultAudioConstraints( diff --git a/packages/stream_video/lib/src/webrtc/rtc_media_device/rtc_media_device_notifier.dart b/packages/stream_video/lib/src/webrtc/rtc_media_device/rtc_media_device_notifier.dart index b8647ab31..c3fcf8bc5 100644 --- a/packages/stream_video/lib/src/webrtc/rtc_media_device/rtc_media_device_notifier.dart +++ b/packages/stream_video/lib/src/webrtc/rtc_media_device/rtc_media_device_notifier.dart @@ -268,7 +268,7 @@ class RtcMediaDeviceNotifier { // When voice processing is bypassed (e.g. ViewerAudioPolicy), stereo // playout is preferred for high-fidelity audio. if (CurrentPlatform.isIos) { - await rtc.Helper.setStereoPlayoutPreferred( + await rtc.Helper.setiOSStereoPlayoutPreferred( policy.bypassVoiceProcessing, ); } From 75b3dd7de7f8fc02909ee6a3769b43236b6b2f02 Mon Sep 17 00:00:00 2001 From: Brazol Date: Mon, 16 Feb 2026 13:20:30 +0100 Subject: [PATCH 6/9] hifi policy --- .../models/audio_configuration_policy.dart | 79 +++++++++++++++++++ 1 file changed, 79 insertions(+) diff --git a/packages/stream_video/lib/src/models/audio_configuration_policy.dart b/packages/stream_video/lib/src/models/audio_configuration_policy.dart index bee8f9c68..4809b8e6e 100644 --- a/packages/stream_video/lib/src/models/audio_configuration_policy.dart +++ b/packages/stream_video/lib/src/models/audio_configuration_policy.dart @@ -22,6 +22,15 @@ import 'package:stream_webrtc_flutter/stream_webrtc_flutter.dart' as rtc; /// /// This policy treats the call as **media playback**. /// +/// ### HiFi Policy +/// [AudioConfigurationPolicy.hiFi] is designed for **broadcasting high-fidelity +/// audio**, such as: +/// - Live music streaming +/// - Screen sharing with stereo audio +/// +/// This policy bypasses voice processing to enable stereo I/O and +/// treats the session as media on both platforms. +/// /// ## Platform-Specific Differences /// /// ### Android @@ -38,6 +47,11 @@ import 'package:stream_webrtc_flutter/stream_webrtc_flutter.dart' as rtc; /// - Always uses speaker/media output path /// - Treated as regular media playback /// +/// **HiFi Policy:** +/// - Same audio settings as Viewer Policy (media volume, no processing) +/// - Enables stereo input and output on the Audio Device Module +/// - Optimized for broadcasting high-fidelity / stereo audio +/// /// ### iOS /// /// **Broadcaster Policy:** @@ -50,6 +64,11 @@ import 'package:stream_webrtc_flutter/stream_webrtc_flutter.dart' as rtc; /// - Always uses speaker by default /// - Optimized for music/media quality /// +/// **HiFi Policy:** +/// - Voice processing **bypassed** for pure, high-fidelity audio +/// - Uses `playAndRecord` category for active microphone use +/// - Always uses speaker by default +/// /// ## Custom Configuration /// - [AudioConfigurationPolicy.custom] - Full control over platform settings sealed class AudioConfigurationPolicy { @@ -59,6 +78,8 @@ sealed class AudioConfigurationPolicy { const factory AudioConfigurationPolicy.viewer() = ViewerAudioPolicy; + const factory AudioConfigurationPolicy.hiFi() = HiFiAudioPolicy; + /// Custom policy allowing full control over platform-specific settings. /// /// Use this when you need specific audio configurations that differ from @@ -180,6 +201,64 @@ class ViewerAudioPolicy extends AudioConfigurationPolicy { bool get bypassVoiceProcessing => true; } +/// Audio policy optimized for broadcasting high-fidelity audio. +/// +/// Use this policy when users need to **send** high-quality audio such as: +/// - Live music streaming or karaoke hosts +/// - Podcast hosts or professional streamers +/// - Screen sharing with stereo audio content +/// +/// This policy bypasses voice processing (echo cancellation, noise suppression) +/// and configures the audio layer for stereo I/O and media-quality output. +/// +/// On Android, this enables stereo input and output on the Audio Device Module +/// (ADM), which is required for capturing and sending stereo audio. Screen share +/// audio also benefits from this — it will be captured in stereo when this +/// policy is active. +/// +/// **Requirements for stereo capture on Android:** +/// 1. The call type must have HiFi audio enabled (Stream Dashboard) +/// 2. The audio bitrate profile must be set to `musicHighQuality` +/// 3. This policy (or any policy with `bypassVoiceProcessing: true`) must be set +/// +/// See [AudioConfigurationPolicy] for detailed platform-specific behavior. +class HiFiAudioPolicy extends AudioConfigurationPolicy { + const HiFiAudioPolicy(); + + @override + rtc.AppleAudioConfiguration getAppleConfiguration({ + bool defaultToSpeaker = false, + }) { + return rtc.AppleAudioConfiguration( + appleAudioMode: rtc.AppleAudioMode.default_, + appleAudioCategory: rtc.AppleAudioCategory.playAndRecord, + appleAudioCategoryOptions: const { + rtc.AppleAudioCategoryOption.defaultToSpeaker, + rtc.AppleAudioCategoryOption.mixWithOthers, + rtc.AppleAudioCategoryOption.allowBluetoothA2DP, + rtc.AppleAudioCategoryOption.allowAirPlay, + }, + ); + } + + @override + rtc.AndroidAudioConfiguration getAndroidConfiguration() { + return rtc.AndroidAudioConfiguration( + androidAudioMode: rtc.AndroidAudioMode.normal, + androidAudioStreamType: rtc.AndroidAudioStreamType.music, + androidAudioAttributesUsageType: + rtc.AndroidAudioAttributesUsageType.media, + androidAudioAttributesContentType: + rtc.AndroidAudioAttributesContentType.music, + androidAudioFocusMode: rtc.AndroidAudioFocusMode.gain, + forceHandleAudioRouting: false, + ); + } + + @override + bool get bypassVoiceProcessing => true; +} + /// Custom audio policy with full control over platform settings. class CustomAudioPolicy extends AudioConfigurationPolicy { /// At least one of [appleConfiguration] or [androidConfiguration] should be From 913dfded78619722c50a1a2f7e930895cdaa1296 Mon Sep 17 00:00:00 2001 From: Brazol Date: Thu, 19 Feb 2026 12:10:19 +0100 Subject: [PATCH 7/9] local answer sdp munging --- .../lib/src/webrtc/peer_connection.dart | 14 +- .../lib/src/webrtc/rtc_manager.dart | 3 +- .../action/action_mirror_sprop_stereo.dart | 127 ++++++++++++++++++ .../action/sdp_edit_action_factory.dart | 16 ++- .../editor/rule/rule_mirror_sprop_stereo.dart | 19 +++ .../rule/rule_set_opus_stereo_enabled.dart | 22 --- .../sdp/editor/rule/sdp_munging_rule.dart | 7 +- .../lib/src/webrtc/sdp/editor/sdp_editor.dart | 2 - .../webrtc/sdp/editor/sdp_editor_impl.dart | 28 +--- .../stream_video/lib/src/webrtc/sdp/sdp.dart | 51 +++++-- .../src/webrtc/traced_peer_connection.dart | 5 +- 11 files changed, 226 insertions(+), 68 deletions(-) create mode 100644 packages/stream_video/lib/src/webrtc/sdp/editor/action/action_mirror_sprop_stereo.dart create mode 100644 packages/stream_video/lib/src/webrtc/sdp/editor/rule/rule_mirror_sprop_stereo.dart delete mode 100644 packages/stream_video/lib/src/webrtc/sdp/editor/rule/rule_set_opus_stereo_enabled.dart diff --git a/packages/stream_video/lib/src/webrtc/peer_connection.dart b/packages/stream_video/lib/src/webrtc/peer_connection.dart index f1423b9f9..88bb98fa8 100644 --- a/packages/stream_video/lib/src/webrtc/peer_connection.dart +++ b/packages/stream_video/lib/src/webrtc/peer_connection.dart @@ -190,8 +190,13 @@ class StreamPeerConnection extends Disposable { /// Creates an answer and sets it as the local description. /// + /// The [offerSdp] is the remote offer that prompted this answer. It is + /// passed through to the SDP editor so rules (e.g. stereo mirroring) can + /// inspect the offer when processing the local answer. + /// /// The remote description must be set before calling this method. - Future> createAnswer([ + Future> createAnswer( + String offerSdp, [ Map mediaConstraints = const {}, ]) async { try { @@ -199,8 +204,13 @@ class StreamPeerConnection extends Disposable { () => '[createLocalAnswer] #$type; mediaConstraints: $mediaConstraints', ); final localAnswer = await pc.createAnswer(mediaConstraints); - final modifiedSdp = sdpEditor.edit(localAnswer.sdp?.let(Sdp.localAnswer)); + final sdp = localAnswer.sdp; + + final modifiedSdp = sdp != null + ? sdpEditor.edit(Sdp.localAnswer(sdp, offerSdp: offerSdp)) + : null; final modifiedAnswer = localAnswer.copyWith(sdp: modifiedSdp); + _logger.v( () => '[createLocalAnswer] #$type; sdp:\n${modifiedAnswer.sdp}', ); diff --git a/packages/stream_video/lib/src/webrtc/rtc_manager.dart b/packages/stream_video/lib/src/webrtc/rtc_manager.dart index 6192b9dcc..70276ce65 100644 --- a/packages/stream_video/lib/src/webrtc/rtc_manager.dart +++ b/packages/stream_video/lib/src/webrtc/rtc_manager.dart @@ -73,6 +73,7 @@ class RtcManager extends Disposable { final StreamVideo _streamVideo; final transceiversManager = TransceiverManager(); + List publishOptions; AudioConstraints _defaultAudioConstraints = const AudioConstraints(); @@ -151,7 +152,7 @@ class RtcManager extends Disposable { final result = await subscriber.setRemoteOffer(offerSdp); if (result.isFailure) return null; - final rtcAnswer = await subscriber.createAnswer(); + final rtcAnswer = await subscriber.createAnswer(offerSdp); return rtcAnswer.getDataOrNull()?.sdp; } diff --git a/packages/stream_video/lib/src/webrtc/sdp/editor/action/action_mirror_sprop_stereo.dart b/packages/stream_video/lib/src/webrtc/sdp/editor/action/action_mirror_sprop_stereo.dart new file mode 100644 index 000000000..75455e30a --- /dev/null +++ b/packages/stream_video/lib/src/webrtc/sdp/editor/action/action_mirror_sprop_stereo.dart @@ -0,0 +1,127 @@ +import '../../../../logger/impl/tagged_logger.dart'; +import '../../attributes/fmtp.dart'; +import '../../attributes/rtpmap.dart'; +import '../../codec/sdp_codec.dart'; +import '../../sdp.dart'; +import 'sdp_edit_action.dart'; + +final _logger = taggedLogger(tag: 'SV:MirrorSpropStereo'); + +/// Adds `stereo=1` to the Opus fmtp line in the answer for audio sections +/// whose offer contained `sprop-stereo=1`. +class MirrorSpropStereoAction implements SdpEditAction { + const MirrorSpropStereoAction({ + required this.offerSdp, + required this.rtpmapParser, + required this.fmtpParser, + }); + + final String? offerSdp; + final RtpmapParser rtpmapParser; + final FmtpParser fmtpParser; + + @override + void execute(List sdpLines) { + final offer = offerSdp; + if (offer == null) { + _logger.d(() => '[execute] no offerSdp available, skipping'); + return; + } + + final stereoMids = _findSpropStereoMids(offer); + if (stereoMids.isEmpty) { + _logger.d(() => '[execute] no sprop-stereo=1 mids in offer'); + return; + } + + _logger.d(() => '[execute] sprop-stereo mids from offer: $stereoMids'); + _addStereoToMatchingMids(sdpLines, stereoMids); + } + + Set _findSpropStereoMids(String offerSdp) { + final mids = {}; + final lines = offerSdp.split('\n'); + + var inAudioSection = false; + String? currentMid; + String? opusPayloadType; + + for (final rawLine in lines) { + final line = rawLine.trimRight(); + + if (line.startsWith('m=')) { + inAudioSection = line.startsWith('m=audio'); + currentMid = null; + opusPayloadType = null; + continue; + } + + if (!inAudioSection) continue; + + if (line.startsWith('a=mid:')) { + currentMid = line.substring('a=mid:'.length).trim(); + } else if (line.isRtpmap) { + final rtpmap = rtpmapParser.parse(line); + if (rtpmap != null && + rtpmap.encodingName.toUpperCase() == + AudioCodec.opus.alias.toUpperCase()) { + opusPayloadType = rtpmap.payloadType; + } + } else if (opusPayloadType != null && line.isFmtp) { + final fmtp = fmtpParser.parse(line); + if (fmtp != null && + fmtp.payloadType == opusPayloadType && + fmtp.parameters['sprop-stereo'] == '1' && + currentMid != null) { + mids.add(currentMid); + } + } + } + + return mids; + } + + void _addStereoToMatchingMids( + List sdpLines, + Set stereoMids, + ) { + var inAudioSection = false; + String? currentMid; + String? opusPayloadType; + + for (var i = 0; i < sdpLines.length; i++) { + final line = sdpLines[i]; + + if (line.startsWith('m=')) { + inAudioSection = line.startsWith('m=audio'); + currentMid = null; + opusPayloadType = null; + continue; + } + + if (!inAudioSection) continue; + + if (line.startsWith('a=mid:')) { + currentMid = line.substring('a=mid:'.length).trim(); + } else if (line.isRtpmap) { + final rtpmap = rtpmapParser.parse(line); + if (rtpmap != null && + rtpmap.encodingName.toUpperCase() == + AudioCodec.opus.alias.toUpperCase()) { + opusPayloadType = rtpmap.payloadType; + } + } else if (currentMid != null && opusPayloadType != null && line.isFmtp) { + final fmtp = fmtpParser.parse(line); + if (fmtp == null || fmtp.payloadType != opusPayloadType) continue; + if (!stereoMids.contains(currentMid)) continue; + if (fmtp.parameters['stereo'] == '1') continue; + + final modified = fmtp.copyWith( + parameters: {...fmtp.parameters, 'stereo': '1'}, + ); + + sdpLines[i] = modified.toSdpLine(); + } + } + } +} diff --git a/packages/stream_video/lib/src/webrtc/sdp/editor/action/sdp_edit_action_factory.dart b/packages/stream_video/lib/src/webrtc/sdp/editor/action/sdp_edit_action_factory.dart index fb067ea78..4e0d1c53e 100644 --- a/packages/stream_video/lib/src/webrtc/sdp/editor/action/sdp_edit_action_factory.dart +++ b/packages/stream_video/lib/src/webrtc/sdp/editor/action/sdp_edit_action_factory.dart @@ -2,16 +2,17 @@ import 'package:meta/meta.dart'; import '../../attributes/fmtp.dart'; import '../../attributes/rtpmap.dart'; +import '../../sdp.dart'; import '../../specification/media_description.dart'; +import '../rule/rule_mirror_sprop_stereo.dart'; import '../rule/rule_prioritize_codec.dart'; import '../rule/rule_set_opus_dtx_enabled.dart'; import '../rule/rule_set_opus_red_enabled.dart'; -import '../rule/rule_set_opus_stereo_enabled.dart'; import '../rule/sdp_munging_rule.dart'; +import 'action_mirror_sprop_stereo.dart'; import 'action_prioritize_codec.dart'; import 'action_set_opus_dtx_enabled.dart'; import 'action_set_opus_red_enabled.dart'; -import 'action_set_opus_stereo_enabled.dart'; import 'sdp_edit_action.dart'; @internal @@ -20,7 +21,10 @@ class SdpEditActionFactory { final _rtpmapParser = RtpmapParser(); final _fmtpParser = FmtpParser(); - SdpEditAction create(SdpMungingRule rule) { + SdpEditAction create( + SdpMungingRule rule, { + Sdp? sdp, + }) { if (rule is PrioritizeCodecRule) { return PrioritizeCodecAction( codec: rule.codec, @@ -40,9 +44,9 @@ class SdpEditActionFactory { rtpmapParser: _rtpmapParser, fmtpParser: _fmtpParser, ); - } else if (rule is SetOpusStereoEnabledRule) { - return SetOpusStereoEnabledAction( - enabled: rule.enabled, + } else if (rule is MirrorSpropStereoRule) { + return MirrorSpropStereoAction( + offerSdp: sdp is LocalAnswerSdp ? sdp.offerSdp : null, rtpmapParser: _rtpmapParser, fmtpParser: _fmtpParser, ); diff --git a/packages/stream_video/lib/src/webrtc/sdp/editor/rule/rule_mirror_sprop_stereo.dart b/packages/stream_video/lib/src/webrtc/sdp/editor/rule/rule_mirror_sprop_stereo.dart new file mode 100644 index 000000000..9d814d93c --- /dev/null +++ b/packages/stream_video/lib/src/webrtc/sdp/editor/rule/rule_mirror_sprop_stereo.dart @@ -0,0 +1,19 @@ +import '../../sdp.dart'; +import 'sdp_munging_rule.dart'; + +/// Adds `stereo=1` to the Opus fmtp line in a local answer when the +/// corresponding offer contained `sprop-stereo=1`. +class MirrorSpropStereoRule extends SdpMungingRule { + const MirrorSpropStereoRule({ + super.platforms, + super.types = const [SdpType.localAnswer], + }); + + @override + String get key => 'mirror-sprop-stereo'; + + @override + String toString() { + return 'MirrorSpropStereoRule{types: $types, platforms: $platforms}'; + } +} diff --git a/packages/stream_video/lib/src/webrtc/sdp/editor/rule/rule_set_opus_stereo_enabled.dart b/packages/stream_video/lib/src/webrtc/sdp/editor/rule/rule_set_opus_stereo_enabled.dart deleted file mode 100644 index 1d359e42a..000000000 --- a/packages/stream_video/lib/src/webrtc/sdp/editor/rule/rule_set_opus_stereo_enabled.dart +++ /dev/null @@ -1,22 +0,0 @@ -import '../../sdp.dart'; -import 'sdp_munging_rule.dart'; - -// Temp SDP munging to add stereo=1 parameter before SFU switch is set for Flutter SDK -class SetOpusStereoEnabledRule extends SdpMungingRule { - const SetOpusStereoEnabledRule({ - required this.enabled, - super.platforms, - super.types = const [SdpType.remoteOffer], - }); - - final bool enabled; - - @override - String get key => 'set-opus-stereo-enabled'; - - @override - String toString() { - return 'SetOpusStereoEnabledRule{enabled: $enabled, ' - 'types: $types, platforms: $platforms}'; - } -} diff --git a/packages/stream_video/lib/src/webrtc/sdp/editor/rule/sdp_munging_rule.dart b/packages/stream_video/lib/src/webrtc/sdp/editor/rule/sdp_munging_rule.dart index cdaefcd79..91f5df2ff 100644 --- a/packages/stream_video/lib/src/webrtc/sdp/editor/rule/sdp_munging_rule.dart +++ b/packages/stream_video/lib/src/webrtc/sdp/editor/rule/sdp_munging_rule.dart @@ -4,9 +4,9 @@ import '../../../../platform_detector/platform_detector.dart'; import '../../codec/sdp_codec.dart'; import '../../editor/rule/rule_set_opus_red_enabled.dart'; import '../../sdp.dart'; +import 'rule_mirror_sprop_stereo.dart'; import 'rule_prioritize_codec.dart'; import 'rule_set_opus_dtx_enabled.dart'; -import 'rule_set_opus_stereo_enabled.dart'; abstract class SdpMungingRule with EquatableMixin { const SdpMungingRule({ @@ -32,11 +32,10 @@ abstract class SdpMungingRule with EquatableMixin { List types, }) = SetOpusRedEnabledRule; - const factory SdpMungingRule.setOpusStereoEnabled({ - required bool enabled, + const factory SdpMungingRule.mirrorSpropStereo({ List platforms, List types, - }) = SetOpusStereoEnabledRule; + }) = MirrorSpropStereoRule; String get key; diff --git a/packages/stream_video/lib/src/webrtc/sdp/editor/sdp_editor.dart b/packages/stream_video/lib/src/webrtc/sdp/editor/sdp_editor.dart index d7b8d8f06..d121696bd 100644 --- a/packages/stream_video/lib/src/webrtc/sdp/editor/sdp_editor.dart +++ b/packages/stream_video/lib/src/webrtc/sdp/editor/sdp_editor.dart @@ -8,7 +8,5 @@ abstract class SdpEditor { set opusRedEnabled(bool value); - set opusStereoEnabled(bool value); - String? edit(Sdp? sdp); } diff --git a/packages/stream_video/lib/src/webrtc/sdp/editor/sdp_editor_impl.dart b/packages/stream_video/lib/src/webrtc/sdp/editor/sdp_editor_impl.dart index fac0d79fa..77e41ef97 100644 --- a/packages/stream_video/lib/src/webrtc/sdp/editor/sdp_editor_impl.dart +++ b/packages/stream_video/lib/src/webrtc/sdp/editor/sdp_editor_impl.dart @@ -10,7 +10,6 @@ import '../sdp.dart'; import 'action/sdp_edit_action_factory.dart'; import 'rule/rule_set_opus_dtx_enabled.dart'; import 'rule/rule_set_opus_red_enabled.dart'; -import 'rule/rule_set_opus_stereo_enabled.dart'; import 'rule/rule_toggle.dart'; import 'rule/sdp_munging_rule.dart'; import 'sdp_editor.dart'; @@ -23,9 +22,6 @@ class NoOpSdpEditor implements SdpEditor { @override set opusRedEnabled(bool value) {} - @override - set opusStereoEnabled(bool value) {} - @override String? edit(Sdp? sdp) { return sdp?.value; @@ -66,16 +62,6 @@ class SdpEditorImpl implements SdpEditor { } } - @override - set opusStereoEnabled(bool value) { - for (final toggle in internalRules) { - if (toggle.rule is SetOpusStereoEnabledRule) { - toggle.enabled = value; - break; - } - } - } - @override String? edit(Sdp? sdp) { if (sdp == null) { @@ -90,7 +76,7 @@ class SdpEditorImpl implements SdpEditor { _logger.i(() => '[edit] sdp.type: ${sdp.type}'); final lines = sdp.value.split('\r\n'); - applyRules(sdp.type, lines); + applyRules(sdp.type, lines, sdp: sdp); if (policy.mungingEnabled) { policy.munging(sdp.type, lines); @@ -103,8 +89,9 @@ class SdpEditorImpl implements SdpEditor { void applyRules( SdpType sdpType, - List lines, - ) { + List lines, { + Sdp? sdp, + }) { for (final toggle in internalRules) { _logger.d(() => '[edit] rule: $toggle'); if (!toggle.enabled) { @@ -120,7 +107,7 @@ class SdpEditorImpl implements SdpEditor { _logger.w(() => '[edit] rejected (mismatched sdpType): $sdpType'); continue; } - _actionFactory.create(rule).execute(lines); + _actionFactory.create(rule, sdp: sdp).execute(lines); } } } @@ -161,9 +148,8 @@ List _createRules() { ), SdpRuleToggle( enabled: true, - rule: const SdpMungingRule.setOpusStereoEnabled( - enabled: true, - types: [SdpType.remoteOffer, SdpType.localAnswer], + rule: const SdpMungingRule.mirrorSpropStereo( + types: [SdpType.localAnswer], ), ), ]; diff --git a/packages/stream_video/lib/src/webrtc/sdp/sdp.dart b/packages/stream_video/lib/src/webrtc/sdp/sdp.dart index 0a89336bd..d8b409d5c 100644 --- a/packages/stream_video/lib/src/webrtc/sdp/sdp.dart +++ b/packages/stream_video/lib/src/webrtc/sdp/sdp.dart @@ -1,16 +1,51 @@ -class Sdp { - const Sdp(this.type, this.value); +sealed class Sdp { + const Sdp(this.value); - const Sdp.localOffer(String sdp) : this(SdpType.localOffer, sdp); + final String value; - const Sdp.localAnswer(String sdp) : this(SdpType.localAnswer, sdp); + SdpType get type; - const Sdp.remoteOffer(String sdp) : this(SdpType.remoteOffer, sdp); + static LocalOfferSdp localOffer(String sdp) => LocalOfferSdp(sdp); - const Sdp.remoteAnswer(String sdp) : this(SdpType.remoteAnswer, sdp); + static LocalAnswerSdp localAnswer( + String sdp, { + String? offerSdp, + }) => + LocalAnswerSdp(sdp, offerSdp: offerSdp); - final SdpType type; - final String value; + static RemoteOfferSdp remoteOffer(String sdp) => RemoteOfferSdp(sdp); + + static RemoteAnswerSdp remoteAnswer(String sdp) => RemoteAnswerSdp(sdp); +} + +class LocalOfferSdp extends Sdp { + const LocalOfferSdp(super.value); + + @override + SdpType get type => SdpType.localOffer; +} + +class LocalAnswerSdp extends Sdp { + const LocalAnswerSdp(super.value, {this.offerSdp}); + + final String? offerSdp; + + @override + SdpType get type => SdpType.localAnswer; +} + +class RemoteOfferSdp extends Sdp { + const RemoteOfferSdp(super.value); + + @override + SdpType get type => SdpType.remoteOffer; +} + +class RemoteAnswerSdp extends Sdp { + const RemoteAnswerSdp(super.value); + + @override + SdpType get type => SdpType.remoteAnswer; } enum SdpType { diff --git a/packages/stream_video/lib/src/webrtc/traced_peer_connection.dart b/packages/stream_video/lib/src/webrtc/traced_peer_connection.dart index 69fcc5b81..4e18b9acb 100644 --- a/packages/stream_video/lib/src/webrtc/traced_peer_connection.dart +++ b/packages/stream_video/lib/src/webrtc/traced_peer_connection.dart @@ -397,12 +397,13 @@ class TracedStreamPeerConnection extends StreamPeerConnection { } @override - Future> createAnswer([ + Future> createAnswer( + String offerSdp, [ Map mediaConstraints = const {}, ]) async { tracer.trace('createAnswer', mediaConstraints); - final result = await super.createAnswer(mediaConstraints); + final result = await super.createAnswer(offerSdp, mediaConstraints); if (result.isSuccess) { tracer.trace('createAnswer.success', result.getDataOrNull()?.toMap()); From 3ef2702cdca9e673d85c6bf387b9229b35b0e909 Mon Sep 17 00:00:00 2001 From: Brazol Date: Fri, 20 Feb 2026 13:08:11 +0100 Subject: [PATCH 8/9] tweaks --- melos.yaml | 6 +- packages/stream_video/CHANGELOG.md | 16 +++-- .../stream_video/lib/src/stream_video.dart | 2 - .../model/rtc_audio_bitrate_preset.dart | 6 +- .../action_set_opus_stereo_enabled.dart | 68 ------------------- packages/stream_video/pubspec.yaml | 5 +- packages/stream_video_filters/pubspec.yaml | 5 +- .../stream_video_flutter/example/pubspec.yaml | 5 +- packages/stream_video_flutter/pubspec.yaml | 5 +- .../pubspec.yaml | 5 +- .../pubspec.yaml | 5 +- 11 files changed, 22 insertions(+), 106 deletions(-) delete mode 100644 packages/stream_video/lib/src/webrtc/sdp/editor/action/action_set_opus_stereo_enabled.dart diff --git a/melos.yaml b/melos.yaml index a2a14fcae..43c71122f 100644 --- a/melos.yaml +++ b/melos.yaml @@ -22,11 +22,7 @@ command: device_info_plus: ^12.1.0 share_plus: ^11.0.0 stream_chat_flutter: ^9.17.0 - stream_webrtc_flutter: - git: - url: https://github.com/GetStream/webrtc-flutter.git - ref: feat/hifi-stereo-playout - stream_video: ^1.2.4 + stream_webrtc_flutter: ^2.2.5 stream_video_flutter: ^1.2.4 stream_video_noise_cancellation: ^1.2.4 stream_video_push_notification: ^1.2.4 diff --git a/packages/stream_video/CHANGELOG.md b/packages/stream_video/CHANGELOG.md index 1a7a8bb42..6cb7138ab 100644 --- a/packages/stream_video/CHANGELOG.md +++ b/packages/stream_video/CHANGELOG.md @@ -1,10 +1,18 @@ ## Upcoming ### ✅ Added -* Added `audioConfigurationPolicy` to `StreamVideoOptions` - a unified audio configuration for both iOS and Android platforms. Includes predefined policies: - * `AudioConfigurationPolicy.call()` - Optimized for voice/video calls (default) - * `AudioConfigurationPolicy.livestream()` - Optimized for livestream playback - * `AudioConfigurationPolicy.custom()` - Full control over platform-specific settings with optional `basePolicy` fallback +* Added HiFi audio mode for high-fidelity scenarios such as live music, podcasts, and professional streaming. Use `SfuAudioBitrateProfile` to select an audio quality profile before joining a call: + * `SfuAudioBitrateProfile.voiceStandard` – Standard voice (64 kbps, default) + * `SfuAudioBitrateProfile.voiceHighQuality` – High-quality voice (128 kbps) + * `SfuAudioBitrateProfile.musicHighQuality` – HiFi music mode (128 kbps, audio processing disabled) +* Added stereo playout and capture (Android only) support with new `audioConfigurationPolicy` in `StreamVideoOptions`. Includes predefined policies: + * `AudioConfigurationPolicy.broadcaster()` – Mono playout with voice processing enabled (default) + * `AudioConfigurationPolicy.viewer()` – Stereo playout with voice processing disabled, ideal for passive listeners + * `AudioConfigurationPolicy.hiFi()` – Stereo capture and playout with voice processing disabled, ideal for hosts streaming high-fidelity audio + * `AudioConfigurationPolicy.custom()` – Full control over platform-specific audio settings with optional `basePolicy` fallback +* Added `RtcMediaDeviceNotifier.reinitializeAudioConfiguration()` to switch audio configuration policy at runtime (must be called before starting a call). + +See the [documentation](https://getstream.io/video/docs/flutter/guides/camera-and-microphone/high-fidelity-audio/) for details. ### ⚠️ Deprecated * Deprecated `androidAudioConfiguration` in `StreamVideoOptions`. Use `audioConfigurationPolicy` instead. diff --git a/packages/stream_video/lib/src/stream_video.dart b/packages/stream_video/lib/src/stream_video.dart index 24213c3e4..14700cfa7 100644 --- a/packages/stream_video/lib/src/stream_video.dart +++ b/packages/stream_video/lib/src/stream_video.dart @@ -1373,7 +1373,6 @@ class StreamVideoOptions { this.latencySettings = const LatencySettings(), this.retryPolicy = const RetryPolicy(), this.defaultCallPreferences, - //TODO: Allow sdp munging for development purposees, remove it before merging this.sdpPolicy = const SdpPolicy(), this.audioProcessor, this.logPriority = Priority.none, @@ -1404,7 +1403,6 @@ class StreamVideoOptions { this.latencySettings = const LatencySettings(), this.retryPolicy = const RetryPolicy(), this.defaultCallPreferences, - //TODO: Allow sdp munging for development purposees, remove it before merging this.sdpPolicy = const SdpPolicy(), this.audioProcessor, this.logPriority = Priority.none, diff --git a/packages/stream_video/lib/src/webrtc/model/rtc_audio_bitrate_preset.dart b/packages/stream_video/lib/src/webrtc/model/rtc_audio_bitrate_preset.dart index 8f0427bc4..e6e3f253f 100644 --- a/packages/stream_video/lib/src/webrtc/model/rtc_audio_bitrate_preset.dart +++ b/packages/stream_video/lib/src/webrtc/model/rtc_audio_bitrate_preset.dart @@ -1,9 +1,9 @@ class AudioBitrate { // Voice profiles - static const voiceStandard = 64000; // clear speech, VoIP+ - static const voiceHighQuality = 128000; // podcast / studio-level speech + static const voiceStandard = 64000; // clear speech, VoIP + static const voiceHighQuality = 128000; // podcast, studio-level speech // Music profiles static const musicStandard = 64000; // lightweight mono music - static const musicHighQuality = 128000; // transparent stereo, good default + static const musicHighQuality = 128000; // stereo, hifi } diff --git a/packages/stream_video/lib/src/webrtc/sdp/editor/action/action_set_opus_stereo_enabled.dart b/packages/stream_video/lib/src/webrtc/sdp/editor/action/action_set_opus_stereo_enabled.dart deleted file mode 100644 index 16fc17e99..000000000 --- a/packages/stream_video/lib/src/webrtc/sdp/editor/action/action_set_opus_stereo_enabled.dart +++ /dev/null @@ -1,68 +0,0 @@ -import '../../../../logger/impl/tagged_logger.dart'; -import '../../attributes/fmtp.dart'; -import '../../attributes/rtpmap.dart'; -import '../../codec/sdp_codec.dart'; -import '../../sdp.dart'; -import 'sdp_edit_action.dart'; - -final _logger = taggedLogger(tag: 'SV:SetOpusStereoEnabled'); - -// Temp SDP munging to add stereo=1 parameter before SFU switch is set for Flutter SDK -class SetOpusStereoEnabledAction implements SdpEditAction { - const SetOpusStereoEnabledAction({ - required this.enabled, - required this.rtpmapParser, - required this.fmtpParser, - }); - - final bool enabled; - final RtpmapParser rtpmapParser; - final FmtpParser fmtpParser; - - @override - void execute(List sdpLines) { - String? opusPayloadType; - for (var index = 0; index < sdpLines.length; index++) { - final sdpLine = sdpLines[index]; - if (sdpLine.isRtpmap) { - final rtpmap = rtpmapParser.parse(sdpLine); - if (rtpmap == null) continue; - if (rtpmap.encodingName.toUpperCase() == - AudioCodec.opus.alias.toUpperCase()) { - opusPayloadType = rtpmap.payloadType; - } - } else if (opusPayloadType != null && sdpLine.isFmtp) { - final original = fmtpParser.parse(sdpLine); - if (original == null || original.payloadType != opusPayloadType) { - _logger.w( - () => '[setOpusStereoEnabled] rejected (not opus): "$original"', - ); - continue; - } - final modified = original.setStereoEnabled(enabled); - _logger.v(() => '[setOpusStereoEnabled] original: "$original"'); - _logger.v(() => '[setOpusStereoEnabled] modified: "$modified"'); - sdpLines[index] = modified.toSdpLine(); - } - } - } -} - -extension on Fmtp { - Fmtp setStereoEnabled(bool enabled) { - if (enabled) { - return copyWith( - parameters: { - ...parameters, - 'stereo': '1', - }, - ); - } else { - return copyWith( - parameters: { - ...parameters, - }..remove('stereo'), - ); - } - } -} diff --git a/packages/stream_video/pubspec.yaml b/packages/stream_video/pubspec.yaml index be253b9c4..bdb14fa8c 100644 --- a/packages/stream_video/pubspec.yaml +++ b/packages/stream_video/pubspec.yaml @@ -31,10 +31,7 @@ dependencies: rxdart: ^0.28.0 sdp_transform: ^0.3.2 state_notifier: ^1.0.0 - stream_webrtc_flutter: - git: - url: https://github.com/GetStream/webrtc-flutter.git - ref: feat/hifi-stereo-playout + stream_webrtc_flutter: ^2.2.5 synchronized: ^3.1.0 system_info2: ^4.0.0 tart: ^0.6.0 diff --git a/packages/stream_video_filters/pubspec.yaml b/packages/stream_video_filters/pubspec.yaml index 6468b267d..0137a5878 100644 --- a/packages/stream_video_filters/pubspec.yaml +++ b/packages/stream_video_filters/pubspec.yaml @@ -15,10 +15,7 @@ dependencies: sdk: flutter plugin_platform_interface: ^2.0.2 stream_video: ^1.2.4 - stream_webrtc_flutter: - git: - url: https://github.com/GetStream/webrtc-flutter.git - ref: feat/hifi-stereo-playout + stream_webrtc_flutter: ^2.2.5 dev_dependencies: flutter_lints: ^6.0.0 diff --git a/packages/stream_video_flutter/example/pubspec.yaml b/packages/stream_video_flutter/example/pubspec.yaml index 2b1645abd..c1f216dd1 100644 --- a/packages/stream_video_flutter/example/pubspec.yaml +++ b/packages/stream_video_flutter/example/pubspec.yaml @@ -31,10 +31,7 @@ dependencies: stream_video: ^1.2.4 stream_video_flutter: ^1.2.4 stream_video_push_notification: ^1.2.4 - stream_webrtc_flutter: - git: - url: https://github.com/GetStream/webrtc-flutter.git - ref: feat/hifi-stereo-playout + stream_webrtc_flutter: ^2.2.5 dependency_overrides: stream_video: diff --git a/packages/stream_video_flutter/pubspec.yaml b/packages/stream_video_flutter/pubspec.yaml index 40ce916b7..2271afaca 100644 --- a/packages/stream_video_flutter/pubspec.yaml +++ b/packages/stream_video_flutter/pubspec.yaml @@ -25,10 +25,7 @@ dependencies: rate_limiter: ^1.0.0 rxdart: ^0.28.0 stream_video: ^1.2.4 - stream_webrtc_flutter: - git: - url: https://github.com/GetStream/webrtc-flutter.git - ref: feat/hifi-stereo-playout + stream_webrtc_flutter: ^2.2.5 visibility_detector: ^0.4.0+2 dev_dependencies: diff --git a/packages/stream_video_noise_cancellation/pubspec.yaml b/packages/stream_video_noise_cancellation/pubspec.yaml index 061e7ed99..acf5b6367 100644 --- a/packages/stream_video_noise_cancellation/pubspec.yaml +++ b/packages/stream_video_noise_cancellation/pubspec.yaml @@ -15,10 +15,7 @@ dependencies: sdk: flutter plugin_platform_interface: ^2.0.2 stream_video: ^1.2.4 - stream_webrtc_flutter: - git: - url: https://github.com/GetStream/webrtc-flutter.git - ref: feat/hifi-stereo-playout + stream_webrtc_flutter: ^2.2.5 dev_dependencies: flutter_lints: ^6.0.0 diff --git a/packages/stream_video_push_notification/pubspec.yaml b/packages/stream_video_push_notification/pubspec.yaml index 1e7573d60..7314dbdc9 100644 --- a/packages/stream_video_push_notification/pubspec.yaml +++ b/packages/stream_video_push_notification/pubspec.yaml @@ -23,10 +23,7 @@ dependencies: shared_preferences: ^2.5.3 stream_video: ^1.2.4 stream_video_flutter: ^1.2.4 - stream_webrtc_flutter: - git: - url: https://github.com/GetStream/webrtc-flutter.git - ref: feat/hifi-stereo-playout + stream_webrtc_flutter: ^2.2.5 uuid: ^4.5.1 dev_dependencies: From 19b260975bff31791189fd03f835ec8237b244c2 Mon Sep 17 00:00:00 2001 From: Brazol Date: Fri, 20 Feb 2026 15:15:02 +0100 Subject: [PATCH 9/9] tweaks --- packages/stream_video/CHANGELOG.md | 2 +- packages/stream_video/lib/src/stream_video.dart | 3 +++ .../stream_video/lib/src/webrtc/rtc_manager.dart | 16 ++++++++-------- 3 files changed, 12 insertions(+), 9 deletions(-) diff --git a/packages/stream_video/CHANGELOG.md b/packages/stream_video/CHANGELOG.md index 6cb7138ab..7c649430a 100644 --- a/packages/stream_video/CHANGELOG.md +++ b/packages/stream_video/CHANGELOG.md @@ -5,7 +5,7 @@ * `SfuAudioBitrateProfile.voiceStandard` – Standard voice (64 kbps, default) * `SfuAudioBitrateProfile.voiceHighQuality` – High-quality voice (128 kbps) * `SfuAudioBitrateProfile.musicHighQuality` – HiFi music mode (128 kbps, audio processing disabled) -* Added stereo playout and capture (Android only) support with new `audioConfigurationPolicy` in `StreamVideoOptions`. Includes predefined policies: +* Added stereo playout (iOS and Android) and capture (Android only) support with new `audioConfigurationPolicy` in `StreamVideoOptions`. Includes predefined policies: * `AudioConfigurationPolicy.broadcaster()` – Mono playout with voice processing enabled (default) * `AudioConfigurationPolicy.viewer()` – Stereo playout with voice processing disabled, ideal for passive listeners * `AudioConfigurationPolicy.hiFi()` – Stereo capture and playout with voice processing disabled, ideal for hosts streaming high-fidelity audio diff --git a/packages/stream_video/lib/src/stream_video.dart b/packages/stream_video/lib/src/stream_video.dart index 14700cfa7..ef0c65be7 100644 --- a/packages/stream_video/lib/src/stream_video.dart +++ b/packages/stream_video/lib/src/stream_video.dart @@ -197,6 +197,9 @@ class StreamVideo extends Disposable { unawaited(RtcManager.cacheGenericSdp()); } + webrtcInitializationCompleter.complete(); + }) + .onError((_, _) { webrtcInitializationCompleter.complete(); }); } else { diff --git a/packages/stream_video/lib/src/webrtc/rtc_manager.dart b/packages/stream_video/lib/src/webrtc/rtc_manager.dart index 70276ce65..aca368309 100644 --- a/packages/stream_video/lib/src/webrtc/rtc_manager.dart +++ b/packages/stream_video/lib/src/webrtc/rtc_manager.dart @@ -734,8 +734,8 @@ extension PublisherRtcManager on RtcManager { for (final option in publishOptions) { if (option.trackType != track.trackType) continue; - final cashedTransceiver = transceiversManager.get(option)?.transceiver; - if (cashedTransceiver == null) { + final cachedTransceiver = transceiversManager.get(option)?.transceiver; + if (cachedTransceiver == null) { final transceiverResult = await _addTransceiver( track, option, @@ -756,7 +756,7 @@ extension PublisherRtcManager on RtcManager { ); } else { await _updateTransceiver( - cashedTransceiver, + cachedTransceiver, track, track.trackType, trackPublishOptions: RtcTrackPublishOptions( @@ -765,7 +765,7 @@ extension PublisherRtcManager on RtcManager { ); _logger.v( - () => '[publishAudioTrack] cached transceiver: $cashedTransceiver', + () => '[publishAudioTrack] cached transceiver: $cachedTransceiver', ); } } @@ -808,8 +808,8 @@ extension PublisherRtcManager on RtcManager { for (final option in publishOptions) { if (option.trackType != track.trackType) continue; - final cashedTransceiver = transceiversManager.get(option)?.transceiver; - if (cashedTransceiver == null) { + final cachedTransceiver = transceiversManager.get(option)?.transceiver; + if (cachedTransceiver == null) { final transceiverResult = await _addTransceiver( track, option, @@ -830,13 +830,13 @@ extension PublisherRtcManager on RtcManager { ); } else { await _updateTransceiver( - cashedTransceiver, + cachedTransceiver, track, track.trackType, ); _logger.v( - () => '[publishVideoTrack] cached transceiver: $cashedTransceiver', + () => '[publishVideoTrack] cached transceiver: $cachedTransceiver', ); } }